diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index f38af2d393..531a1ff88a 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -12,7 +12,7 @@ and session management. ### Related issues/PRs -Related issues: #590 +Related issues: close #590 close #591 Related pr:#591 diff --git a/.github/actions/chart-testing-action b/.github/actions/chart-testing-action index 2fffad3153..e6669bcd63 160000 --- a/.github/actions/chart-testing-action +++ b/.github/actions/chart-testing-action @@ -1 +1 @@ -Subproject commit 2fffad315319f278a64ba2e94505768bd9e2a633 +Subproject commit e6669bcd63d7cb57cb4380c33043eebe5d111992 diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/ServiceInstance.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/ServiceInstance.scala index 8fcb4af737..f9e4718472 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/ServiceInstance.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/ServiceInstance.scala @@ -20,11 +20,17 @@ package org.apache.linkis.common class ServiceInstance { private var applicationName: String = _ private var instance: String = _ + private var registryTimestamp: Long = _ def setApplicationName(applicationName: String): Unit = this.applicationName = applicationName def getApplicationName: String = applicationName def setInstance(instance: String): Unit = this.instance = instance def getInstance: String = instance + def setRegistryTimestamp(registryTimestamp: Long): Unit = this.registryTimestamp = + registryTimestamp + + def getRegistryTimestamp: Long = registryTimestamp + override def equals(other: Any): Boolean = other match { case that: ServiceInstance => applicationName == that.applicationName && @@ -42,7 +48,9 @@ class ServiceInstance { .foldLeft(0)((a, b) => 31 * a + b) } - override def toString: String = s"ServiceInstance($applicationName, $instance)" + override def toString: String = + s"ServiceInstance($applicationName, $instance, $registryTimestamp)" + } object ServiceInstance { @@ -54,6 +62,14 @@ object ServiceInstance { serviceInstance } + def apply(applicationName: String, instance: String, registryTimestamp: Long): ServiceInstance = { + val serviceInstance = new ServiceInstance + serviceInstance.setApplicationName(applicationName) + serviceInstance.setInstance(instance) + serviceInstance.setRegistryTimestamp(registryTimestamp) + serviceInstance + } + def unapply(serviceInstance: ServiceInstance): Option[(String, String)] = if (serviceInstance != null) { Some(serviceInstance.applicationName, serviceInstance.instance) diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala index 14febab63a..9bfa053b77 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala @@ -232,19 +232,20 @@ private[conf] object BDPConfiguration extends Logging { private[common] def formatValue[T](defaultValue: T, value: Option[String]): Option[T] = { if (value.isEmpty || value.exists(StringUtils.isEmpty)) return Option(defaultValue) + val trimValue = value.map(_.trim) val formattedValue = defaultValue match { - case _: String => value - case _: Byte => value.map(_.toByte) - case _: Short => value.map(_.toShort) - case _: Char => value.map(_.toCharArray.apply(0)) - case _: Int => value.map(_.toInt) - case _: Long => value.map(_.toLong) - case _: Float => value.map(_.toFloat) - case _: Double => value.map(_.toDouble) - case _: Boolean => value.map(_.toBoolean) - case _: TimeType => value.map(new TimeType(_)) - case _: ByteType => value.map(new ByteType(_)) - case null => value + case _: String => trimValue + case _: Byte => trimValue.map(_.toByte) + case _: Short => trimValue.map(_.toShort) + case _: Char => trimValue.map(_.toCharArray.apply(0)) + case _: Int => trimValue.map(_.toInt) + case _: Long => trimValue.map(_.toLong) + case _: Float => trimValue.map(_.toFloat) + case _: Double => trimValue.map(_.toDouble) + case _: Boolean => trimValue.map(_.toBoolean) + case _: TimeType => trimValue.map(new TimeType(_)) + case _: ByteType => trimValue.map(new ByteType(_)) + case null => trimValue } formattedValue.asInstanceOf[Option[T]] } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala index 77c82f3883..e558e765be 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala @@ -33,7 +33,7 @@ object LogUtils { } def generateERROR(rawLog: String): String = { - getTimeFormat + " " + "ERROR" + " " + rawLog + getTimeFormat + " " + ERROR_STR + " " + rawLog } def generateWarn(rawLog: String): String = { @@ -52,4 +52,6 @@ object LogUtils { getTimeFormat + " " + "SYSTEM-WARN" + " " + rawLog } + val ERROR_STR = "ERROR" + } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala index 3870fe6e58..917ac53261 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/CodeAndRunTypeUtils.scala @@ -21,6 +21,8 @@ import org.apache.linkis.common.conf.CommonVars import org.apache.commons.lang3.StringUtils +import scala.collection.mutable + object CodeAndRunTypeUtils { private val CONF_LOCK = new Object() @@ -101,7 +103,14 @@ object CodeAndRunTypeUtils { def getLanguageTypeAndCodeTypeRelationMap: Map[String, String] = { val codeTypeAndRunTypeRelationMap = getCodeTypeAndLanguageTypeRelationMap if (codeTypeAndRunTypeRelationMap.isEmpty) Map() - else codeTypeAndRunTypeRelationMap.flatMap(x => x._2.map(y => (y, x._1))) + else { +// codeTypeAndRunTypeRelationMap.flatMap(x => x._2.map(y => (y, x._1))) + val map = mutable.Map[String, String]() + codeTypeAndRunTypeRelationMap.foreach(kv => { + kv._2.foreach(v => map.put(v, kv._1)) + }) + map.toMap + } } def getLanguageTypeByCodeType(codeType: String, defaultLanguageType: String = ""): String = { diff --git a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/exception/ExceptionManagerTest.java b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/exception/ExceptionManagerTest.java index d45bebc125..839a34f859 100644 --- a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/exception/ExceptionManagerTest.java +++ b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/exception/ExceptionManagerTest.java @@ -21,6 +21,7 @@ import java.util.HashMap; import java.util.Map; +import java.util.TreeMap; import org.junit.jupiter.api.Test; @@ -40,7 +41,7 @@ void testGenerateException() { + "null"); assertEquals(errorException.getClass(), ExceptionManager.generateException(null).getClass()); assertEquals(errorException.toString(), ExceptionManager.generateException(null).toString()); - Map map = new HashMap<>(); + Map map = new TreeMap<>(); map.put("level", null); map.put("errCode", 1); map.put("desc", "test"); diff --git a/linkis-commons/linkis-module/pom.xml b/linkis-commons/linkis-module/pom.xml index d4ffc38e2c..f92edb8216 100644 --- a/linkis-commons/linkis-module/pom.xml +++ b/linkis-commons/linkis-module/pom.xml @@ -64,10 +64,6 @@ - - org.springframework.cloud - spring-cloud-starter-netflix-eureka-client - org.springframework.boot @@ -277,4 +273,80 @@ + + + eureka + + true + + discovery + eureka + + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + + + nacos + + + discovery + nacos + + + + + com.alibaba.cloud + spring-cloud-starter-alibaba-nacos-discovery + + + org.springframework.boot + * + + + org.springframework.cloud + spring-cloud-commons + + + org.springframework.cloud + spring-cloud-context + + + org.springframework.boot + spring-boot-starter + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + com.google.code.findbugs + jsr305 + + + org.yaml + snakeyaml + + + io.prometheus + simpleclient + + + com.google.guava + guava + + + + + + + diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala index 582568e626..3c6a25a343 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala @@ -207,4 +207,7 @@ object ServerConfiguration extends Logging { val LINKIS_SERVER_SESSION_PROXY_TICKETID_KEY = CommonVars("wds.linkis.session.proxy.user.ticket.key", "linkis_user_session_proxy_ticket_id_v1") + val LINKIS_SERVER_ENTRANCE_HEADER_KEY = + CommonVars("linkis.server.entrance.header.key", "jobInstanceKey") + } diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala index 8f09139e0e..73699f38ef 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/ticket/RedisClient.scala @@ -70,7 +70,7 @@ object RedisClient { SessionHAConfiguration.RedisHost, SessionHAConfiguration.RedisPort, redisTimeout, - SessionHAConfiguration.RedisSentinalServer + SessionHAConfiguration.RedisPassword ) } diff --git a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java index 6eb97c84d9..a90a1eb3b7 100644 --- a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java +++ b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java @@ -69,6 +69,7 @@ public interface TaskConstant { String TICKET_ID = "ticketId"; String ENGINE_CONN_TASK_ID = "engineConnTaskId"; String ENGINE_CONN_SUBMIT_TIME = "engineConnSubmitTime"; + String FAILOVER_FLAG = "failoverFlag"; String DEBUG_ENBALE = "debug.enable"; String PARAMS_DATA_SOURCE = "dataSources"; diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/JobInstance.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/JobInstance.scala new file mode 100644 index 0000000000..5e2eb10a59 --- /dev/null +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/engine/JobInstance.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.protocol.engine + +case class JobInstance( + status: String, + instances: String, + jobReqId: String, + createTimestamp: Long, + instanceRegistryTimestamp: Long +) diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala index 9b2be16ef7..3affc351d9 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala @@ -45,6 +45,14 @@ object TaskUtils { } } else params.put(key, waitToAdd) + private def clearMap(params: util.Map[String, AnyRef], key: String): Unit = + if (params != null && params.containsKey(key)) { + params.get(key) match { + case map: util.Map[String, AnyRef] => map.clear() + case _ => params.put(key, new util.HashMap[String, AnyRef]()) + } + } + private def getConfigurationMap( params: util.Map[String, AnyRef], key: String @@ -84,13 +92,20 @@ object TaskUtils { def addStartupMap(params: util.Map[String, AnyRef], startupMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, startupMap, TaskConstant.PARAMS_CONFIGURATION_STARTUP) + def clearStartupMap(params: util.Map[String, AnyRef]): Unit = { + val configurationMap = getMap(params, TaskConstant.PARAMS_CONFIGURATION) + if (!configurationMap.isEmpty) { + clearMap(configurationMap, TaskConstant.PARAMS_CONFIGURATION_STARTUP) + } + } + def addRuntimeMap(params: util.Map[String, AnyRef], runtimeMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, runtimeMap, TaskConstant.PARAMS_CONFIGURATION_RUNTIME) def addSpecialMap(params: util.Map[String, AnyRef], specialMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, specialMap, TaskConstant.PARAMS_CONFIGURATION_SPECIAL) - // tdoo + // todo def getLabelsMap(params: util.Map[String, AnyRef]): util.Map[String, AnyRef] = getMap(params, TaskConstant.LABELS) diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ZuulEntranceUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ZuulEntranceUtils.scala index 95c7a81873..ad30484c46 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ZuulEntranceUtils.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/ZuulEntranceUtils.scala @@ -23,7 +23,7 @@ object ZuulEntranceUtils { private val INSTANCE_SPLIT_TOKEN = "_" - private val EXEC_ID = "exec_id" + val EXEC_ID = "exec_id" private val SPLIT_LEN = 3 diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/AbstractGroup.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/AbstractGroup.scala index 6e9ecbd26f..b123682b56 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/AbstractGroup.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/AbstractGroup.scala @@ -23,11 +23,18 @@ abstract class AbstractGroup extends Group { private var _status: GroupStatus = _ private var maxRunningJobs: Int = _ + private var maxAllowRunningJobs: Int = 0 private var maxAskExecutorTimes: Long = 0L def setMaxRunningJobs(maxRunningJobs: Int): Unit = this.maxRunningJobs = maxRunningJobs def getMaxRunningJobs: Int = maxRunningJobs + def setMaxAllowRunningJobs(maxAllowRunningJobs: Int): Unit = this.maxAllowRunningJobs = + maxAllowRunningJobs + + def getMaxAllowRunningJobs: Int = + if (maxAllowRunningJobs <= 0) maxRunningJobs else Math.min(maxAllowRunningJobs, maxRunningJobs) + def setMaxAskExecutorTimes(maxAskExecutorTimes: Long): Unit = this.maxAskExecutorTimes = maxAskExecutorTimes diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEventState.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEventState.scala index 4edc1d5d17..26087d99f0 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEventState.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/SchedulerEventState.scala @@ -38,4 +38,8 @@ object SchedulerEventState extends Enumeration { SchedulerEventState.withName(jobState) ) + def isInitedByStr(jobState: String): Boolean = SchedulerEventState.withName(jobState) == Inited + + def isRunningByStr(jobState: String): Boolean = isRunning(SchedulerEventState.withName(jobState)) + } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala index d541d8a2eb..fcab44a731 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala @@ -27,6 +27,7 @@ import org.apache.linkis.scheduler.executer.Executor import org.apache.linkis.scheduler.future.{BDPFuture, BDPFutureTask} import org.apache.linkis.scheduler.queue._ +import java.util import java.util.concurrent.{ExecutorService, Future} import scala.beans.BeanProperty @@ -122,9 +123,10 @@ class FIFOUserConsumer( } var event: Option[SchedulerEvent] = getWaitForRetryEvent if (event.isEmpty) { - val completedNums = runningJobs.filter(job => job == null || job.isCompleted) - if (completedNums.length < 1) { - Utils.tryQuietly(Thread.sleep(1000)) + val maxAllowRunningJobs = fifoGroup.getMaxAllowRunningJobs + val currentRunningJobs = runningJobs.count(e => e != null && !e.isCompleted) + if (maxAllowRunningJobs <= currentRunningJobs) { + Utils.tryQuietly(Thread.sleep(1000)) // TODO 还可以优化,通过实现JobListener进行优化 return } while (event.isEmpty) { @@ -207,6 +209,19 @@ class FIFOUserConsumer( runningJobs(index) = job } + protected def scanAllRetryJobsAndRemove(): util.List[Job] = { + val jobs = new util.ArrayList[Job]() + for (index <- runningJobs.indices) { + val job = runningJobs(index) + if (job != null && job.isJobCanRetry) { + jobs.add(job) + runningJobs(index) = null + logger.info(s"Job $job can retry, remove from runningJobs") + } + } + jobs + } + override def shutdown(): Unit = { future.cancel(true) val waitEvents = queue.getWaitingEvents diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java index fad0d83a12..308e548f27 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java @@ -23,7 +23,9 @@ public enum StorageErrorCode { FS_NOT_INIT(53001, "please init first"), INCONSISTENT_DATA(53001, "Inconsistent row data read,read %s,need rowLen %s"), - FS_OOM(53002, "OOM occurred while reading the file"); + FS_OOM(53002, "OOM occurred while reading the file"), + + FS_ERROR(53003, "Failed to operation fs"); StorageErrorCode(int errorCode, String message) { this.code = errorCode; diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java index 3047b715a0..5e56b099d7 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetReaderFactory.java @@ -49,7 +49,7 @@ public static ResultSetReader getResultSe return new StorageResultSetReader<>(resultSet, value); } - public static ResultSetReader getResultSetReader(String res) { + public static ResultSetReader getResultSetReader(String res) throws IOException { ResultSetFactory rsFactory = ResultSetFactory.getInstance(); if (rsFactory.isResultSet(res)) { ResultSet resultSet = rsFactory.getResultSet(res); @@ -58,21 +58,12 @@ public static ResultSetReader getResultSetReader(String res) { FsPath resPath = new FsPath(res); ResultSet resultSet = rsFactory.getResultSetByPath(resPath); - try { - FSFactory.getFs(resPath).init(null); - } catch (IOException e) { - logger.warn("ResultSetReaderFactory fs init failed", e); - } - ResultSetReader reader = null; - try { - reader = - ResultSetReaderFactory.getResultSetReader( - resultSet, FSFactory.getFs(resPath).read(resPath)); - } catch (IOException e) { - logger.warn("ResultSetReaderFactory fs read failed", e); - } + Fs fs = FSFactory.getFs(resPath); + fs.init(null); + ResultSetReader reader = + ResultSetReaderFactory.getResultSetReader(resultSet, fs.read(resPath)); if (reader instanceof StorageResultSetReader) { - ((StorageResultSetReader) reader).setFs(FSFactory.getFs(resPath)); + ((StorageResultSetReader) reader).setFs(fs); } return (StorageResultSetReader) reader; } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java index 1abeaf0937..d70319c9bd 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/ResultSetWriterFactory.java @@ -51,28 +51,25 @@ org.apache.linkis.common.io.resultset.ResultSetWriter getResultSetWriter( public static Record[] getRecordByWriter( org.apache.linkis.common.io.resultset.ResultSetWriter writer, - long limit) { + long limit) + throws IOException { String res = writer.toString(); return getRecordByRes(res, limit); } - public static Record[] getRecordByRes(String res, long limit) { + public static Record[] getRecordByRes(String res, long limit) throws IOException { ResultSetReader reader = ResultSetReaderFactory.getResultSetReader(res); int count = 0; List records = new ArrayList<>(); - try { - reader.getMetaData(); - while (reader.hasNext() && count < limit) { - records.add(reader.getRecord()); - count++; - } - } catch (IOException e) { - logger.warn("ResultSetWriter getRecordByRes failed", e); + reader.getMetaData(); + while (reader.hasNext() && count < limit) { + records.add(reader.getRecord()); + count++; } return records.toArray(new Record[0]); } - public static Record getLastRecordByRes(String res) { + public static Record getLastRecordByRes(String res) throws IOException { ResultSetReader reader = ResultSetReaderFactory.getResultSetReader(res); Record record = null; try { diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java index 5109ed44df..ea513664bd 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/resultset/StorageResultSetWriter.java @@ -24,6 +24,7 @@ import org.apache.linkis.storage.*; import org.apache.linkis.storage.conf.*; import org.apache.linkis.storage.domain.*; +import org.apache.linkis.storage.exception.StorageErrorException; import org.apache.linkis.storage.utils.*; import org.apache.commons.io.IOUtils; @@ -37,6 +38,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.linkis.storage.exception.StorageErrorCode.FS_ERROR; + public class StorageResultSetWriter extends ResultSetWriter { private static final Logger logger = LoggerFactory.getLogger(StorageResultSetWriter.class); @@ -98,8 +101,9 @@ public void createNewFile() { fs.init(null); FileSystemUtils.createNewFile(storePath, proxyUser, true); outputStream = fs.write(storePath, true); - } catch (IOException e) { - logger.warn("StorageResultSetWriter createNewFile failed", e); + } catch (Exception e) { + throw new StorageErrorException( + FS_ERROR.getCode(), "StorageResultSetWriter createNewFile failed", e); } logger.info("Succeed to create a new file:{}", storePath); fileCreated = true; diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java index fb064a8f4f..54fd64daad 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/source/ResultsetFileSource.java @@ -47,7 +47,7 @@ record -> { if (emptyValue.equals(Dolphin.LINKIS_NULL)) { return ""; } else { - return nullValue; + return emptyValue; } } else if (r instanceof Double) { return StorageUtils.doubleToString((Double) r); diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java index 0f93cdb6ab..4c50479637 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java @@ -61,16 +61,12 @@ public static void createNewFile(FsPath filePath, boolean createParentWhenNotExi createNewFile(filePath, StorageUtils.getJvmUser(), createParentWhenNotExists); } - public static void createNewFile( - FsPath filePath, String user, boolean createParentWhenNotExists) { + public static void createNewFile(FsPath filePath, String user, boolean createParentWhenNotExists) + throws Exception { FileSystem fileSystem = (FileSystem) FSFactory.getFsByProxyUser(filePath, user); try { fileSystem.init(null); createNewFileWithFileSystem(fileSystem, filePath, user, createParentWhenNotExists); - } catch (IOException e) { - logger.warn("FileSystemUtils createNewFile failed", e); - } catch (Exception e) { - logger.warn("FileSystemUtils createNewFile failed", e); } finally { IOUtils.closeQuietly(fileSystem); } @@ -96,6 +92,30 @@ public static void createNewFileWithFileSystem( } } + /** + * create new file and set file owner by FileSystem + * + * @param fileSystem + * @param filePath + * @param user + * @param createParentWhenNotExists + */ + public static void createNewFileAndSetOwnerWithFileSystem( + FileSystem fileSystem, FsPath filePath, String user, boolean createParentWhenNotExists) + throws Exception { + if (!fileSystem.exists(filePath)) { + if (!fileSystem.exists(filePath.getParent())) { + if (!createParentWhenNotExists) { + throw new IOException( + "parent dir " + filePath.getParent().getPath() + " dose not exists."); + } + mkdirs(fileSystem, filePath.getParent(), user); + } + fileSystem.createNewFile(filePath); + fileSystem.setOwner(filePath, user); + } + } + /** * Recursively create a directory * @@ -133,4 +153,39 @@ public static boolean mkdirs(FileSystem fileSystem, FsPath dest, String user) th } return true; } + + /** + * Recursively create a directory(递归创建目录) add owner info + * + * @param fileSystem + * @param dest + * @param user + * @throws IOException + * @return + */ + public static boolean mkdirsAndSetOwner(FileSystem fileSystem, FsPath dest, String user) + throws IOException { + FsPath parentPath = dest.getParent(); + Stack dirsToMake = new Stack<>(); + dirsToMake.push(dest); + while (!fileSystem.exists(parentPath)) { + dirsToMake.push(parentPath); + + if (Objects.isNull(parentPath.getParent())) { + // parent path of root is null + break; + } + + parentPath = parentPath.getParent(); + } + if (!fileSystem.canExecute(parentPath)) { + throw new IOException("You have not permission to access path " + dest.getPath()); + } + while (!dirsToMake.empty()) { + FsPath path = dirsToMake.pop(); + fileSystem.mkdir(path); + fileSystem.setOwner(path, user); + } + return true; + } } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java index 692ce619b2..07bc0510bc 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/StorageUtils.java @@ -272,4 +272,8 @@ public static byte[] mergeByteArrays(byte[] arr1, byte[] arr2) { System.arraycopy(arr2, 0, mergedArray, arr1.length, arr2.length); return mergedArray; } + + public static boolean isHDFSPath(FsPath fsPath) { + return HDFS.equalsIgnoreCase(fsPath.getFsType()); + } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java index 0c8a3db539..2b0b20188a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java @@ -26,6 +26,7 @@ import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; import org.apache.linkis.cli.application.operator.ujes.UJESClientFactory; import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; import org.apache.commons.lang3.StringUtils; @@ -135,7 +136,12 @@ public static InteractiveJobDesc build(CliCtx ctx) { } if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { - code = CliUtils.readFile(codePath); + try { + code = CliUtils.readFile(codePath); + } catch (Exception e) { + LoggerManager.getInformationLogger().error("Failed to read file", e); + throw e; + } } executionMap.put(LinkisKeys.KEY_CODE, code); @@ -143,6 +149,9 @@ public static InteractiveJobDesc build(CliCtx ctx) { labelMap.put(LinkisKeys.KEY_CODETYPE, runType); labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); + if (ctx.getExtraMap().containsKey(CliKeys.VERSION)) { + sourceMap.put(LinkisKeys.CLI_VERSION, ctx.getExtraMap().get(CliKeys.VERSION)); + } runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); desc.setCreator(creator); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java index 9a54699165..c2d47e2b7a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java @@ -32,7 +32,6 @@ public static void writeToFile( String pathName, String fileName, String content, Boolean overWrite) { File dir = new File(pathName); - File file = new File(fileName); if (!dir.exists()) { try { @@ -47,6 +46,8 @@ public static void writeToFile( } } + File file = new File(dir.getAbsolutePath() + File.separator + fileName); + if (overWrite || !file.exists()) { try { file.createNewFile(); diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java new file mode 100644 index 0000000000..4c914bc3f4 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/constant/CodeConstants.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.constant; + +public class CodeConstants { + // will auto append at end of scala code; make sure the last line is not a comment + public static String SCALA_CODE_AUTO_APPEND_CODE = "val linkisVar=123"; +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java new file mode 100644 index 0000000000..13cbac5577 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.entity; + +public class TemplateConfKey { + + private String templateUuid; + + private String key; + + private String templateName; + + private String configValue; + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + @Override + public String toString() { + return "TemplateKey{" + + "templateUuid='" + + templateUuid + + '\'' + + ", key='" + + key + + '\'' + + ", templateName='" + + templateName + + '\'' + + ", configValue='" + + configValue + + '\'' + + '}'; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobRequest.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobRequest.java index d5d97aa364..46fa8a69ef 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobRequest.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/job/JobRequest.java @@ -49,6 +49,9 @@ public class JobRequest { /** result location */ private String resultLocation; + /** Task status updates is ordered, if false, not checked */ + private Boolean updateOrderFlag = true; + private String observeInfo; private Map metrics = new HashMap<>(); @@ -205,6 +208,14 @@ public void setObserveInfo(String observeInfo) { this.observeInfo = observeInfo; } + public Boolean getUpdateOrderFlag() { + return updateOrderFlag; + } + + public void setUpdateOrderFlag(Boolean updateOrderFlag) { + this.updateOrderFlag = updateOrderFlag; + } + @Override public String toString() { return "JobRequest{" diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java new file mode 100644 index 0000000000..e8b566cda1 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf; + +import org.apache.linkis.protocol.message.RequestProtocol; + +public class TemplateConfRequest implements RequestProtocol { + + private String templateUuid; + + private String templateName; + + public TemplateConfRequest(String templateUuid, String templateName) { + this.templateUuid = templateUuid; + this.templateName = templateName; + } + + public TemplateConfRequest(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java new file mode 100644 index 0000000000..8822fe988d --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf; + +import org.apache.linkis.governance.common.entity.TemplateConfKey; + +import java.util.ArrayList; +import java.util.List; + +public class TemplateConfResponse { + + private List list = new ArrayList<>(); + + public List getList() { + return list; + } + + public void setList(List list) { + this.list = list; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala index a4671eaa17..b8b156173b 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala @@ -90,10 +90,4 @@ object GovernanceCommonConf { val EC_APP_MANAGE_MODE = CommonVars("linkis.ec.app.manage.mode", "attach") - val SCALA_PARSE_APPEND_CODE_ENABLED = - CommonVars("linkis.scala.parse.append.code.enable", true).getValue - - val SCALA_PARSE_APPEND_CODE = - CommonVars("linkis.scala.parse.append.code", "val linkisVar=1").getValue - } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala index 544dfcdab6..ec7bb9e80a 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala @@ -17,7 +17,7 @@ package org.apache.linkis.governance.common.exception -import org.apache.linkis.common.exception.{ErrorException, ExceptionLevel, LinkisRuntimeException} +import org.apache.linkis.common.exception.{ExceptionLevel, LinkisRuntimeException} class GovernanceErrorException(errorCode: Int, errorMsg: String) extends LinkisRuntimeException(errorCode, errorMsg) { diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala index 87576d5e48..d5669ad428 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala @@ -19,6 +19,7 @@ package org.apache.linkis.governance.common.paser import org.apache.linkis.common.utils.{CodeAndRunTypeUtils, Logging, Utils} import org.apache.linkis.governance.common.conf.GovernanceCommonConf +import org.apache.linkis.governance.common.constant.CodeConstants import org.apache.linkis.governance.common.paser.CodeType.CodeType import org.apache.commons.lang3.StringUtils @@ -86,6 +87,11 @@ abstract class CombinedEngineCodeParser extends CodeParser { } +/** + * Scala is no longer using Parser but instead using EmptyParser. If there is a comment at the end, + * it will cause the task to become stuck + */ +@deprecated class ScalaCodeParser extends SingleCodeParser with Logging { override val codeType: CodeType = CodeType.Scala @@ -109,11 +115,9 @@ class ScalaCodeParser extends SingleCodeParser with Logging { case _ => } if (statementBuffer.nonEmpty) codeBuffer.append(statementBuffer.mkString("\n")) - - // Append code `val linkisVar=1` in ends to prevent bugs that do not exit tasks for a long time - if (GovernanceCommonConf.SCALA_PARSE_APPEND_CODE_ENABLED) { - codeBuffer.append(GovernanceCommonConf.SCALA_PARSE_APPEND_CODE) - } + // Make sure the last line is not a comment + codeBuffer.append("\n") + codeBuffer.append(CodeConstants.SCALA_CODE_AUTO_APPEND_CODE) codeBuffer.toArray } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala index 2e44739787..df197ddb2c 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/protocol/job/JobReqProcotol.scala @@ -51,3 +51,10 @@ class RequestOneJob extends JobReq { } case class RequestAllJob(instance: String) extends JobReq + +case class RequestFailoverJob( + reqMap: util.Map[String, java.lang.Long], + statusList: util.List[String], + startTimestamp: Long, + limit: Int = 10 +) extends JobReq diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala index db7045baec..04adf3446c 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala @@ -29,7 +29,8 @@ class ScalaCodeParserTest { "val codeBuffer = new ArrayBuffer[String]()\n val statementBuffer = new ArrayBuffer[String]()" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(scalaCode) - Assertions.assertTrue(array.length == 2) + Assertions.assertTrue(array.size == 3) + } @Test @@ -40,7 +41,7 @@ class ScalaCodeParserTest { " def addInt( a:Int, b:Int )\n var sum:Int = 0\n sum = a + b\n return sum\n }" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(abnormalCode) - Assertions.assertTrue(array.length == 2) + Assertions.assertTrue(array.length == 3) } @@ -53,7 +54,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(importCode) - Assertions.assertTrue(array.length == 3) + Assertions.assertTrue(array.length == 4) } @@ -67,7 +68,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val arrayResult1 = scalaCodeParser.parse(specialCodeExp1) - Assertions.assertTrue(arrayResult1.length == 3) + Assertions.assertTrue(arrayResult1.length == 4) val specialCodeExp2 = " @BeanProperty\n var id: Long = _\n @BeanProperty\n var status: Int = 0\n " + @@ -78,7 +79,7 @@ class ScalaCodeParserTest { ".append(data, that.data)\n .isEquals\n }" val arrayResult2 = scalaCodeParser.parse(specialCodeExp2) - Assertions.assertTrue(arrayResult2.length == 2) + Assertions.assertTrue(arrayResult2.length == 3) } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala index 672c0e8acb..5b23d01fc3 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala @@ -176,8 +176,15 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { .findAvailPortByRange(GovernanceCommonConf.ENGINE_CONN_PORT_RANGE.getValue) .toString - var springConf = Map("server.port" -> engineConnPort, "spring.profiles.active" -> "engineconn") - + var springConf = + Map[String, String]("server.port" -> engineConnPort, "spring.profiles.active" -> "engineconn") + val properties = + PortUtils.readFromProperties(Configuration.getLinkisHome + "/conf/version.properties") + if (StringUtils.isNotBlank(properties.getProperty("version"))) { + springConf += ("eureka.instance.metadata-map.linkis.app.version" -> properties.getProperty( + "version" + )) + } request.creationDesc.properties.asScala.filter(_._1.startsWith("spring.")).foreach { case (k, v) => springConf = springConf + (k -> v) diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala index b3715a8910..21dd6cd706 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala @@ -17,15 +17,16 @@ package org.apache.linkis.ecm.core.utils -import org.apache.linkis.common.utils.Utils +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils -import java.io.IOException +import java.io.{BufferedReader, FileReader, IOException} import java.net.ServerSocket +import java.util.Properties -object PortUtils { +object PortUtils extends Logging { /** * portRange: '-' is the separator @@ -62,4 +63,23 @@ object PortUtils { Utils.tryFinally(socket.getLocalPort)(IOUtils.closeQuietly(socket)) } + def readFromProperties(propertiesFile: String): Properties = { + val properties: Properties = new Properties + var reader: BufferedReader = null; + try { + reader = new BufferedReader(new FileReader(propertiesFile)) + properties.load(reader) + } catch { + case e: Exception => + logger.warn(s"loading vsersion faild with path $propertiesFile error:$e") + } finally { + try if (reader != null) reader.close + catch { + case e: Exception => + logger.warn(s"try to close buffered reader with error:${e.getMessage}") + } + } + properties + } + } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml index 41022d30da..99458c8afc 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml @@ -51,6 +51,12 @@ linkis-rpc ${project.version} provided + + + com.google.guava + guava + + diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java index ca4412824d..0c745ef64d 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/errorcode/EngineconnServerErrorCodeSummary.java @@ -33,7 +33,15 @@ public enum EngineconnServerErrorCodeSummary implements LinkisErrorCode { 11110, "the parameters of engineConnInstance and ticketId are both not exists.(engineConnInstance 和ticketId 的参数都不存在.)"), LOG_IS_NOT_EXISTS(11110, "Log directory {0} does not exists.(日志目录 {0} 不存在.)"), - FAILED_TO_DOWNLOAD(911115, "failed to downLoad(下载失败)"); + FAILED_TO_DOWNLOAD(911115, "failed to downLoad(下载失败)"), + FILE_IS_OVERSIZE(911116, "Download file has exceeded 100MB(下载文件已超过100M)"), + PARAMETER_NOT_NULL(911117, "Parameter {0} cannot be empty (参数 {0} 不能为空)"), + LOGTYPE_ERROR( + 911118, + "logType only supports stdout, stderr, gc, yarnApp(logType仅支持stdout,stderr,gc,yarnApp)"), + NOT_PERMISSION( + 911119, "You {0} have no permission to download Log in ECM {1}(用户 {0} 无权限下载 ECM {1} 日志)"), + ; /** (errorCode)错误码 */ private final int errorCode; diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java new file mode 100644 index 0000000000..d4fc0a49e1 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/restful/ECMRestfulApi.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.restful; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.Consts; + +import org.springframework.web.bind.annotation.*; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.attribute.FileOwnerAttributeView; +import java.nio.file.attribute.UserPrincipal; +import java.text.MessageFormat; + +import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary.*; + +@Api(tags = "ECM") +@RequestMapping(path = "/engineconnManager") +@RestController +public class ECMRestfulApi { + + private final Logger logger = LoggerFactory.getLogger(ECMRestfulApi.class); + + /** + * * Reason for using the get method: Added gateway forwarding rules, which only support get + * requests + * + * @param req + * @param response + * @param emInstance + * @param instance + * @param logDirSuffix + * @param logType + * @throws IOException + */ + @ApiOperation( + value = "downloadEngineLog", + notes = "download engine log", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "emInstance", + required = true, + dataType = "String", + example = "xxx0002:9102"), + @ApiImplicitParam( + name = "instance", + required = true, + dataType = "String", + example = "xxx0002:35873"), + @ApiImplicitParam(name = "logDirSuffix", required = true, dataType = "String"), + @ApiImplicitParam(name = "logType", required = true, dataType = "String") + }) + @ApiOperationSupport(ignoreParameters = {"json"}) + @RequestMapping(path = "/downloadEngineLog", method = RequestMethod.GET) + public Message downloadEngineLog( + HttpServletRequest req, + HttpServletResponse response, + @RequestParam(value = "emInstance") String emInstance, + @RequestParam(value = "instance") String instance, + @RequestParam(value = "logDirSuffix") String logDirSuffix, + @RequestParam(value = "logType") String logType) + throws IOException { + String userName = ModuleUserUtils.getOperationUser(req, "downloadEngineLog"); + if (StringUtils.isBlank(instance)) { + return Message.error(MessageFormat.format(PARAMETER_NOT_NULL.getErrorDesc(), "instance")); + } + if (StringUtils.isBlank(logDirSuffix)) { + return Message.error(MessageFormat.format(PARAMETER_NOT_NULL.getErrorDesc(), "logDirSuffix")); + } + if (StringUtils.isBlank(logType)) { + return Message.error(MessageFormat.format(PARAMETER_NOT_NULL.getErrorDesc(), "logType")); + } else if (!logType.equals("stdout") + && !logType.equals("stderr") + && !logType.equals("gc") + && !logType.equals("yarnApp")) { + return Message.error(LOGTYPE_ERROR.getErrorDesc()); + } + // 获取文件的权限归属者 + FileOwnerAttributeView ownerView = + Files.getFileAttributeView( + Paths.get(logDirSuffix + "/" + logType), FileOwnerAttributeView.class); + UserPrincipal owner = ownerView.getOwner(); + if (!owner.getName().equals(userName) + && Configuration.isNotAdmin(userName) + && Configuration.isNotJobHistoryAdmin(userName)) { + return Message.error( + MessageFormat.format(NOT_PERMISSION.getErrorDesc(), userName, emInstance)); + } + File inputFile = new File(logDirSuffix, logType); + if (!inputFile.exists()) { + return Message.error(MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc(), logDirSuffix)); + } else { + long fileSizeInBytes = inputFile.length(); + long fileSizeInMegabytes = fileSizeInBytes / (1024 * 1024); + if (fileSizeInMegabytes > 100) { + return Message.error(MessageFormat.format(FILE_IS_OVERSIZE.getErrorDesc(), logDirSuffix)); + } + ServletOutputStream outputStream = null; + FileInputStream inputStream = null; + BufferedInputStream fis = null; + PrintWriter writer = null; + try { + inputStream = new FileInputStream(inputFile); + fis = new BufferedInputStream(inputStream); + byte[] buffer = new byte[1024]; + int bytesRead = 0; + response.setCharacterEncoding(Consts.UTF_8.toString()); + java.nio.file.Path source = Paths.get(inputFile.getPath()); + response.addHeader("Content-Type", Files.probeContentType(source)); + // filename eg:xxx002_11529_stdout.txt + response.addHeader( + "Content-Disposition", + "attachment;filename=" + instance.replace(":", "_") + "_" + logType + ".txt"); + response.addHeader("Content-Length", fileSizeInBytes + ""); + outputStream = response.getOutputStream(); + while ((bytesRead = fis.read(buffer, 0, 1024)) != -1) { + outputStream.write(buffer, 0, bytesRead); + } + } catch (IOException e) { + logger.warn("Download EngineLog Failed Msg :", e); + response.reset(); + response.setCharacterEncoding(Consts.UTF_8.toString()); + response.setContentType("text/plain; charset=utf-8"); + writer = response.getWriter(); + writer.append("error(错误):" + e.getMessage()); + writer.flush(); + } finally { + if (outputStream != null) { + outputStream.flush(); + } + IOUtils.closeQuietly(outputStream); + IOUtils.closeQuietly(fis); + IOUtils.closeQuietly(inputStream); + } + return Message.ok(); + } + } +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java index a6a932a578..5a0ade21dc 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java @@ -189,6 +189,7 @@ private String getYarnAppRegexByEngineType(String engineType) { case "sqoop": regex = EngineConnConf.SQOOP_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); break; + case "flink": case "hive": regex = EngineConnConf.HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); break; diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala index 3959eb942b..cb04f364fb 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala @@ -62,7 +62,6 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { ): String = { val props = engineExecutionContext.getProperties if (null != props && props.containsKey(GovernanceConstant.TASK_RESOURCES_STR)) { - val workDir = ComputationEngineUtils.getCurrentWorkDir val jobId = engineExecutionContext.getJobId props.get(GovernanceConstant.TASK_RESOURCES_STR) match { case resources: util.List[Object] => @@ -71,9 +70,7 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { val fileName = resource.get(GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR).toString val resourceId = resource.get(GovernanceConstant.TASK_RESOURCE_ID_STR).toString val version = resource.get(GovernanceConstant.TASK_RESOURCE_VERSION_STR).toString - val fullPath = - if (workDir.endsWith(seperator)) pathType + workDir + fileName - else pathType + workDir + seperator + fileName + val fullPath = fileName val response = Utils.tryCatch { bmlClient.downloadShareResource(processUser, resourceId, version, fullPath, true) } { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala index c072c32794..bcd423fd21 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala @@ -33,6 +33,12 @@ object ComputationExecutorConf { "Maximum number of tasks executed by the synchronization EC" ) + val PRINT_TASK_PARAMS_SKIP_KEYS = CommonVars( + "linkis.engineconn.print.task.params.skip.keys", + "jobId", + "skip to print params key at job logs" + ) + val ENGINE_PROGRESS_FETCH_INTERVAL = CommonVars( "wds.linkis.engineconn.progresss.fetch.interval-in-seconds", diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala index f59adaadef..fe98e3328e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala @@ -17,7 +17,10 @@ package org.apache.linkis.engineconn.computation.executor.cs +import org.apache.linkis.common.utils.Logging import org.apache.linkis.cs.client.service.CSResourceService +import org.apache.linkis.engineconn.common.conf.EngineConnConf +import org.apache.linkis.governance.common.utils.GovernanceConstant import org.apache.commons.lang3.StringUtils @@ -27,7 +30,7 @@ import java.util.regex.Pattern import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer -class CSResourceParser { +class CSResourceParser extends Logging { private val pb = Pattern.compile("cs://[^\\s\"]+[$\\s]{0,1}", Pattern.CASE_INSENSITIVE) @@ -47,7 +50,6 @@ class CSResourceParser { nodeNameStr: String ): String = { - // TODO getBMLResource val bmlResourceList = CSResourceService.getInstance().getUpstreamBMLResource(contextIDValueStr, nodeNameStr) @@ -56,23 +58,25 @@ class CSResourceParser { val preFixNames = new ArrayBuffer[String]() val parsedNames = new ArrayBuffer[String]() + val prefixName = System.currentTimeMillis().toString + "_" preFixResourceNames.foreach { preFixResourceName => val resourceName = preFixResourceName.replace(PREFIX, "").trim val bmlResourceOption = bmlResourceList.asScala.find(_.getDownloadedFileName.equals(resourceName)) if (bmlResourceOption.isDefined) { + val replacementName = EngineConnConf.getEngineTmpDir + prefixName + resourceName val bmlResource = bmlResourceOption.get val map = new util.HashMap[String, Object]() - map.put("resourceId", bmlResource.getResourceId) - map.put("version", bmlResource.getVersion) - map.put("fileName", resourceName) + map.put(GovernanceConstant.TASK_RESOURCE_ID_STR, bmlResource.getResourceId) + map.put(GovernanceConstant.TASK_RESOURCE_VERSION_STR, bmlResource.getVersion) + map.put(GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR, replacementName) parsedResources.add(map) preFixNames.append(preFixResourceName) - parsedNames.append(resourceName) + parsedNames.append(replacementName) + logger.warn(s"Replace cs file from {$preFixResourceName} to {$replacementName}") } - } - props.put("resources", parsedResources) + props.put(GovernanceConstant.TASK_RESOURCES_STR, parsedResources) StringUtils.replaceEach(code, preFixNames.toArray, parsedNames.toArray) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala index 940973be61..bd6d44e4a6 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala @@ -22,6 +22,7 @@ import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor import org.apache.linkis.engineconn.acessible.executor.listener.event.{ + TaskLogUpdateEvent, TaskResponseErrorEvent, TaskStatusChangedEvent } @@ -40,7 +41,14 @@ import org.apache.linkis.governance.common.paser.CodeParser import org.apache.linkis.governance.common.protocol.task.{EngineConcurrentInfo, RequestTask} import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel +import org.apache.linkis.manager.label.entity.engine.{ + CodeLanguageLabel, + EngineType, + EngineTypeLabel, + RunType, + UserCreatorLabel +} +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.scheduler.executer._ @@ -50,6 +58,8 @@ import org.apache.commons.lang3.exception.ExceptionUtils import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger +import scala.collection.JavaConverters._ + import com.google.common.cache.{Cache, CacheBuilder} abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) @@ -132,6 +142,12 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) override def close(): Unit = { if (null != lastTask) CLOSE_LOCKER.synchronized { + listenerBusContext.getEngineConnSyncListenerBus.postToAll( + TaskLogUpdateEvent( + lastTask.getTaskId, + LogUtils.generateERROR("EC exits unexpectedly and actively kills the task") + ) + ) killTask(lastTask.getTaskId) } else { @@ -169,9 +185,11 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) Utils.tryFinally { transformTaskStatus(engineConnTask, ExecutionNodeStatus.Running) val engineExecutionContext = createEngineExecutionContext(engineConnTask) + + val engineCreationContext = EngineConnObject.getEngineCreationContext + var hookedCode = engineConnTask.getCode Utils.tryCatch { - val engineCreationContext = EngineConnObject.getEngineCreationContext ComputationExecutorHook.getComputationExecutorHooks.foreach(hook => { hookedCode = hook.beforeExecutorExecute(engineExecutionContext, engineCreationContext, hookedCode) @@ -182,12 +200,24 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) } else { logger.info(s"hooked after code: $hookedCode ") } + + // task params log + // spark engine: at org.apache.linkis.engineplugin.spark.executor.SparkEngineConnExecutor.executeLine log special conf + Utils.tryAndWarn { + val engineType = LabelUtil.getEngineType(engineCreationContext.getLabels()) + EngineType.mapStringToEngineType(engineType) match { + case EngineType.HIVE | EngineType.TRINO => printTaskParamsLog(engineExecutionContext) + case _ => + } + } + val localPath = EngineConnConf.getLogDir engineExecutionContext.appendStdout( LogUtils.generateInfo( s"EngineConn local log path: ${DataWorkCloudApplication.getServiceInstance.toString} $localPath" ) ) + var response: ExecuteResponse = null val incomplete = new StringBuilder val codes = @@ -244,6 +274,11 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) case s: SuccessExecuteResponse => succeedTasks.increase() s + case incompleteExecuteResponse: IncompleteExecuteResponse => + ErrorExecuteResponse( + s"The task cannot be an incomplete response ${incompleteExecuteResponse.message}", + null + ) case _ => response } response @@ -271,6 +306,7 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) TaskResponseErrorEvent(engineConnTask.getTaskId, errorExecuteResponse.message) ) transformTaskStatus(engineConnTask, ExecutionNodeStatus.Failed) + case _ => logger.warn(s"task get response is $executeResponse") } executeResponse } @@ -335,6 +371,30 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) } } + /** + * job task log print task params info + * + * @param engineExecutorContext + * @return + * Unit + */ + + def printTaskParamsLog(engineExecutorContext: EngineExecutionContext): Unit = { + val sb = new StringBuilder + + EngineConnObject.getEngineCreationContext.getOptions.asScala.foreach({ case (key, value) => + // skip log jobId because it corresponding jobid when the ec created + if (!ComputationExecutorConf.PRINT_TASK_PARAMS_SKIP_KEYS.getValue.contains(key)) { + sb.append(s"${key}=${value.toString}\n") + } + }) + + sb.append("\n") + engineExecutorContext.appendStdout( + LogUtils.generateInfo(s"Your job exec with configs:\n${sb.toString()}\n") + ) + } + def transformTaskStatus(task: EngineConnTask, newStatus: ExecutionNodeStatus): Unit = { val oriStatus = task.getStatus logger.info(s"task ${task.getTaskId} from status $oriStatus to new status $newStatus") diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala index f96896f557..4446bdc672 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala @@ -91,4 +91,12 @@ object ComputationEngineConnMetrics { getTotalBusyTimeMills(nodeStatus) + getTotalIdleTimeMills(nodeStatus) def getUnlockToShutdownDurationMills(): Long = unlockToShutdownDurationMills.get() + + def getLastUnlockTimestamp(nodeStatus: NodeStatus): Long = { + nodeStatus match { + case NodeStatus.Unlock => lastUnlockTimeMills + case _ => 0 + } + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala index 010ced97fd..e5d74282de 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala @@ -66,6 +66,10 @@ class DefaultNodeHeartbeatMsgManager extends NodeHeartbeatMsgManager with Loggin ECConstants.EC_TOTAL_LOCK_TIME_MILLS_KEY, ComputationEngineConnMetrics.getTotalLockTimeMills(status).asInstanceOf[Object] ) + msgMap.put( + ECConstants.EC_LAST_UNLOCK_TIMESTAMP, + ComputationEngineConnMetrics.getLastUnlockTimestamp(status).asInstanceOf[Object] + ) case _ => } val engineParams = EngineConnObject.getEngineCreationContext.getOptions diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala index bc738d5498..23d6ff2586 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala @@ -187,7 +187,7 @@ class TaskExecutionServiceImpl if (!lockService.isLockExist(requestTask.getLock)) { logger.error(s"Lock ${requestTask.getLock} not exist, cannot execute.") return ErrorExecuteResponse( - "Lock not exixt", + "Lock not exist", new EngineConnExecutorErrorException( EngineConnExecutorErrorCode.INVALID_LOCK, "Lock : " + requestTask.getLock + " not exist(您的锁无效,请重新获取后再提交)." @@ -422,31 +422,51 @@ class TaskExecutionServiceImpl ): Future[_] = { val sleepInterval = ComputationExecutorConf.ENGINE_PROGRESS_FETCH_INTERVAL.getValue scheduler.submit(new Runnable { - override def run(): Unit = Utils.tryAndWarn { + override def run(): Unit = { Utils.tryQuietly(Thread.sleep(TimeUnit.MILLISECONDS.convert(1, TimeUnit.SECONDS))) while (null != taskFuture && !taskFuture.isDone) { - if ( - ExecutionNodeStatus.isCompleted(task.getStatus) || ExecutionNodeStatus - .isRunning(task.getStatus) - ) { - val progressResponse = taskProgress(task.getTaskId) - val resourceResponse = buildResourceMap(task) - val extraInfoMap = buildExtraInfoMap(task) - // todo add other info - val resourceMap = - if (null != resourceResponse) resourceResponse.getResourceMap else null - - val respRunningInfo: ResponseTaskRunningInfo = new ResponseTaskRunningInfo( - progressResponse.execId, - progressResponse.progress, - progressResponse.progressInfo, - resourceMap, - extraInfoMap - ) - - sendToEntrance(task, respRunningInfo) - Thread.sleep(TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS)) + if (!ExecutionNodeStatus.isCompleted(task.getStatus)) { + Utils.tryAndWarn { + val progressResponse = Utils.tryCatch(taskProgress(task.getTaskId)) { + case e: Exception => + logger.info("Failed to get progress", e) + null + } + val resourceResponse = Utils.tryCatch(buildResourceMap(task)) { case e: Exception => + logger.info("Failed to get resource", e) + null + } + val extraInfoMap = Utils.tryCatch(buildExtraInfoMap(task)) { case e: Exception => + logger.info("Failed to get extra info ", e) + null + } + val resourceMap = + if (null != resourceResponse) resourceResponse.getResourceMap else null + + /** + * It is guaranteed that there must be progress the progress must be greater than or + * equal to 0.1 + */ + val newProgressResponse = if (null == progressResponse) { + ResponseTaskProgress(task.getTaskId, 0.1f, null) + } else if (progressResponse.progress < 0.1f) { + ResponseTaskProgress(task.getTaskId, 0.1f, progressResponse.progressInfo) + } else { + progressResponse + } + val respRunningInfo: ResponseTaskRunningInfo = new ResponseTaskRunningInfo( + newProgressResponse.execId, + newProgressResponse.progress, + newProgressResponse.progressInfo, + resourceMap, + extraInfoMap + ) + sendToEntrance(task, respRunningInfo) + } } + Utils.tryQuietly( + Thread.sleep(TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS)) + ) } } }) @@ -499,7 +519,7 @@ class TaskExecutionServiceImpl } override def taskProgress(taskID: String): ResponseTaskProgress = { - var response = ResponseTaskProgress(taskID, 0, null) + var response = ResponseTaskProgress(taskID, 0.01f, null) if (StringUtils.isBlank(taskID)) return response val executor = taskIdCache.getIfPresent(taskID) if (null != executor) { @@ -514,11 +534,9 @@ class TaskExecutionServiceImpl ResponseTaskProgress(taskID, progress, executor.getProgressInfo(taskID)) ) } - } else { - response = ResponseTaskProgress(taskID, -1, null) } } else { - logger.error(s"Executor of taskId : $taskID is not cached.") + logger.info(s"Executor of taskId : $taskID is not cached.") } response } @@ -606,7 +624,7 @@ class TaskExecutionServiceImpl logger.warn("Unknown event : " + BDPJettyServerHelper.gson.toJson(event)) } - override def onLogUpdate(logUpdateEvent: TaskLogUpdateEvent): Unit = { + override def onLogUpdate(logUpdateEvent: TaskLogUpdateEvent): Unit = Utils.tryAndWarn { if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { if (null != logUpdateEvent && StringUtils.isNotBlank(logUpdateEvent.taskId)) { val task = getTaskByTaskId(logUpdateEvent.taskId) @@ -663,32 +681,33 @@ class TaskExecutionServiceImpl } } - override def onProgressUpdate(taskProgressUpdateEvent: TaskProgressUpdateEvent): Unit = { - if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { - val task = getTaskByTaskId(taskProgressUpdateEvent.taskId) - if (null != task) { - val resourceResponse = buildResourceMap(task) - val extraInfoMap = buildExtraInfoMap(task) + override def onProgressUpdate(taskProgressUpdateEvent: TaskProgressUpdateEvent): Unit = + Utils.tryAndWarn { + if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { + val task = getTaskByTaskId(taskProgressUpdateEvent.taskId) + if (null != task) { + val resourceResponse = buildResourceMap(task) + val extraInfoMap = buildExtraInfoMap(task) - val resourceMap = if (null != resourceResponse) resourceResponse.getResourceMap else null + val resourceMap = if (null != resourceResponse) resourceResponse.getResourceMap else null - val respRunningInfo: ResponseTaskRunningInfo = new ResponseTaskRunningInfo( - taskProgressUpdateEvent.taskId, - taskProgressUpdateEvent.progress, - taskProgressUpdateEvent.progressInfo, - resourceMap, - extraInfoMap - ) + val respRunningInfo: ResponseTaskRunningInfo = new ResponseTaskRunningInfo( + taskProgressUpdateEvent.taskId, + taskProgressUpdateEvent.progress, + taskProgressUpdateEvent.progressInfo, + resourceMap, + extraInfoMap + ) - sendToEntrance(task, respRunningInfo) - } else { - logger.error( - "Task cannot null! taskProgressUpdateEvent : " + ComputationEngineUtils.GSON - .toJson(taskProgressUpdateEvent) - ) + sendToEntrance(task, respRunningInfo) + } else { + logger.error( + "Task cannot null! taskProgressUpdateEvent : " + ComputationEngineUtils.GSON + .toJson(taskProgressUpdateEvent) + ) + } } } - } override def onResultSetCreated(taskResultCreateEvent: TaskResultCreateEvent): Unit = { logger.info(s"start to deal result event ${taskResultCreateEvent.taskId}") diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala index d06e8ac077..15e70315e3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala @@ -39,10 +39,10 @@ class ECTaskKillHandler extends MonitorHandler with Logging { while (elements.hasNext) { val element = elements.next Utils.tryCatch { - doKill(element) logger.error( s"ERROR: entrance : ${element.getUpstreamConnection().getUpstreamServiceInstanceName()} lose connect, will kill job : ${element.getKey()}" ) + doKill(element) } { t => logger.error("Failed to kill job: " + element.getKey, t) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala index eefc69f5bb..43f7ab4446 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala @@ -61,6 +61,8 @@ object EngineConnConf { val ENGINE_CONN_LOCAL_LOG_DIRS_KEY = CommonVars("wds.linkis.engine.logs.dir.key", "LOG_DIRS") + val ENGINE_CONN_LOCAL_TMP_DIR = CommonVars("wds.linkis.engine.tmp.dir", "TEMP_DIRS") + val ENGINE_CONN_CREATION_WAIT_TIME = CommonVars("wds.linkis.engine.connector.init.time", new TimeType("8m")) @@ -84,6 +86,8 @@ object EngineConnConf { def getWorkHome: String = System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue) + def getEngineTmpDir: String = System.getenv(ENGINE_CONN_LOCAL_TMP_DIR.getValue) + def getLogDir: String = { val logDir = System.getenv(ENGINE_CONN_LOCAL_LOG_DIRS_KEY.getValue) if (StringUtils.isNotEmpty(logDir)) logDir else new File(getWorkHome, "logs").getPath diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala index 95a01202e8..40cf314853 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala @@ -37,7 +37,7 @@ object AccessibleExecutorConfiguration { val ENGINECONN_LOG_SEND_SIZE = CommonVars[Int]("wds.linkis.engineconn.log.send.cache.size", 300) val ENGINECONN_MAX_FREE_TIME = - CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("30m")) + CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("10m")) val ENGINECONN_LOCK_CHECK_INTERVAL = CommonVars("wds.linkis.engineconn.lock.free.interval", new TimeType("3m")) diff --git a/linkis-computation-governance/linkis-entrance/pom.xml b/linkis-computation-governance/linkis-entrance/pom.xml index dea4d1d4d7..bda458c356 100644 --- a/linkis-computation-governance/linkis-entrance/pom.xml +++ b/linkis-computation-governance/linkis-entrance/pom.xml @@ -90,6 +90,12 @@ ${project.version} + + org.apache.linkis + linkis-ps-common-lock + ${project.version} + + diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java index 0bf27a68b3..86b1a91f7a 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java @@ -42,6 +42,7 @@ import org.apache.linkis.entrance.persistence.QueryPersistenceManager; import org.apache.linkis.entrance.persistence.ResultSetEngine; import org.apache.linkis.entrance.scheduler.EntranceGroupFactory; +import org.apache.linkis.entrance.scheduler.EntranceParallelConsumerManager; import org.apache.linkis.entrance.scheduler.EntranceSchedulerContext; import org.apache.linkis.orchestrator.ecm.EngineConnManagerBuilder; import org.apache.linkis.orchestrator.ecm.EngineConnManagerBuilder$; @@ -51,7 +52,6 @@ import org.apache.linkis.scheduler.executer.ExecutorManager; import org.apache.linkis.scheduler.queue.ConsumerManager; import org.apache.linkis.scheduler.queue.GroupFactory; -import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager; import org.apache.linkis.scheduler.queue.parallelqueue.ParallelScheduler; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; @@ -150,7 +150,7 @@ public EntranceInterceptor[] entranceInterceptors() { new ScalaCodeInterceptor(), new SQLLimitEntranceInterceptor(), new CommentInterceptor(), - new SetTenantLabelInterceptor(), + // new SetTenantLabelInterceptor(), new UserCreatorIPCheckInterceptor() }; } @@ -190,7 +190,7 @@ public GroupFactory groupFactory() { @Bean @ConditionalOnMissingBean public ConsumerManager consumerManager() { - return new ParallelConsumerManager( + return new EntranceParallelConsumerManager( ENTRANCE_SCHEDULER_MAX_PARALLELISM_USERS().getValue(), "EntranceJobScheduler"); } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java index cb37279c11..bee17b8ed4 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/constant/ServiceNameConsts.java @@ -26,4 +26,6 @@ private ServiceNameConsts() {} public static final String ENTRANCE_SERVER = "entranceServer"; public static final String ENTRANCE_INTERCEPTOR = "entranceInterceptors"; + + public static final String ENTRANCE_FAILOVER_SERVER = "entranceFailoverServer"; } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java index 2f045a1760..51a522d3d2 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java @@ -62,6 +62,11 @@ public enum EntranceErrorCodeSummary implements LinkisErrorCode { INVALID_RESULTSETS(20053, "Invalid resultsets, cannot use cache(结果集无效,无法使用 cache)"), SUBMITTING_QUERY_FAILED(30009, "Submitting the query failed(提交查询失败)!"), + + SUBMIT_CODE_ISEMPTY( + 30010, + "Submitting the execution code, after code preprocessing, the real execution code is empty, please check the executed code(提交的执行代码,经过预处理后为空,请检查执行的代码是否为空或则只有注解)!"), + QUERY_STATUS_FAILED(50081, "Query from jobHistory status failed(从 jobHistory 状态查询失败)"), GET_QUERY_RESPONSE(50081, "Get query response incorrectly(获取查询响应结果不正确)"), QUERY_TASKID_ERROR(50081, "Query task of taskId:{0} error(查询任务id:{}的任务出错)"), @@ -71,7 +76,11 @@ public enum EntranceErrorCodeSummary implements LinkisErrorCode { SHELL_BLACKLISTED_CODE(50081, "Shell code contains blacklisted code(shell中包含黑名单代码)"), JOB_HISTORY_FAILED_ID(50081, ""), - LOGPATH_NOT_NULL(20301, "The logPath cannot be empty(日志路径不能为空)"); + LOGPATH_NOT_NULL(20301, "The logPath cannot be empty(日志路径不能为空)"), + + FAILOVER_RUNNING_TO_CANCELLED( + 30001, + "Job {0} failover, status changed from Running to Cancelled (任务故障转移,状态从Running变更为Cancelled)"); /** (errorCode)错误码 */ private final int errorCode; diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java new file mode 100644 index 0000000000..424e7ca170 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.restful; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "entrance lable manager") +@RestController +@RequestMapping(path = "/entrance/operation/consumer") +public class EntranceConsumerRestfulApi { + + private EntranceServer entranceServer; + + private static final Logger logger = LoggerFactory.getLogger(EntranceConsumerRestfulApi.class); + + @Autowired + public void setEntranceServer(EntranceServer entranceServer) { + this.entranceServer = entranceServer; + } + + @ApiOperation(value = "kill-consumer", notes = "kill consumer", response = Message.class) + @RequestMapping(path = "/kill", method = RequestMethod.GET) + public Message killConsumer( + HttpServletRequest req, @RequestParam(value = "groupName") String groupName) { + String operationUser = ModuleUserUtils.getOperationUser(req, "kill consumer"); + if (Configuration.isNotAdmin(operationUser)) { + return Message.error("only admin can do this"); + } + logger.info("user {} to kill consumer {}", operationUser, groupName); + ConsumerManager consumerManager = + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext() + .getOrCreateConsumerManager(); + consumerManager.destroyConsumer(groupName); + logger.info("user {} finished to kill consumer {}", operationUser, groupName); + return Message.ok(); + } + + @ApiOperation(value = "consumer-info", notes = "list consumers info", response = Message.class) + @RequestMapping(path = "/info", method = RequestMethod.GET) + public Message countConsumer(HttpServletRequest req) { + String operationUser = ModuleUserUtils.getOperationUser(req, "kill consumer"); + if (Configuration.isNotAdmin(operationUser)) { + return Message.error("only admin can do this"); + } + ConsumerManager consumerManager = + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext() + .getOrCreateConsumerManager(); + return Message.ok().data("consumerNum", consumerManager.listConsumers().length); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java index 2ab457747c..0737e25ed8 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java @@ -18,14 +18,19 @@ package org.apache.linkis.entrance.restful; import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.entrance.scheduler.EntranceSchedulerContext; import org.apache.linkis.instance.label.client.InstanceLabelClient; import org.apache.linkis.manager.label.constant.LabelKeyConstant; import org.apache.linkis.manager.label.constant.LabelValueConstant; import org.apache.linkis.protocol.label.InsLabelRefreshRequest; +import org.apache.linkis.protocol.label.InsLabelRemoveRequest; import org.apache.linkis.rpc.Sender; +import org.apache.linkis.scheduler.SchedulerContext; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletRequest; @@ -46,6 +51,14 @@ public class EntranceLabelRestfulApi { private static final Logger logger = LoggerFactory.getLogger(EntranceLabelRestfulApi.class); + private EntranceServer entranceServer; + + @Autowired + public void setEntranceServer(EntranceServer entranceServer) { + this.entranceServer = entranceServer; + } + + private static Boolean offlineFlag = false; @ApiOperation(value = "update", notes = "update route label", response = Message.class) @ApiOperationSupport(ignoreParameters = {"jsonNode"}) @@ -72,13 +85,51 @@ public Message updateRouteLabel(HttpServletRequest req, @RequestBody JsonNode js public Message updateRouteLabel(HttpServletRequest req) { ModuleUserUtils.getOperationUser(req, "markoffline"); Map labels = new HashMap(); - logger.info("Prepare to modify the routelabel of entry to offline"); + logger.info("Prepare to modify the routelabel of entrance to offline"); labels.put(LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE); InsLabelRefreshRequest insLabelRefreshRequest = new InsLabelRefreshRequest(); insLabelRefreshRequest.setLabels(labels); insLabelRefreshRequest.setServiceInstance(Sender.getThisServiceInstance()); InstanceLabelClient.getInstance().refreshLabelsToInstance(insLabelRefreshRequest); + synchronized (offlineFlag) { + offlineFlag = true; + } logger.info("Finished to modify the routelabel of entry to offline"); + + logger.info("Prepare to update all not execution task instances to empty string"); + SchedulerContext schedulerContext = + entranceServer.getEntranceContext().getOrCreateScheduler().getSchedulerContext(); + if (schedulerContext instanceof EntranceSchedulerContext) { + ((EntranceSchedulerContext) schedulerContext).setOfflineFlag(true); + } + entranceServer.updateAllNotExecutionTaskInstances(true); + logger.info("Finished to update all not execution task instances to empty string"); + return Message.ok(); } + + @ApiOperation( + value = "backonline", + notes = "from offline status to recover", + response = Message.class) + @RequestMapping(path = "/backonline", method = RequestMethod.GET) + public Message backOnline(HttpServletRequest req) { + ModuleUserUtils.getOperationUser(req, "backonline"); + logger.info("Prepare to modify the routelabel of entrance to remove offline"); + InsLabelRemoveRequest insLabelRemoveRequest = new InsLabelRemoveRequest(); + insLabelRemoveRequest.setServiceInstance(Sender.getThisServiceInstance()); + InstanceLabelClient.getInstance().removeLabelsFromInstance(insLabelRemoveRequest); + synchronized (offlineFlag) { + offlineFlag = false; + } + logger.info("Finished to backonline"); + return Message.ok(); + } + + @ApiOperation(value = "isOnline", notes = "entrance isOnline", response = Message.class) + @RequestMapping(path = "/isOnline", method = RequestMethod.GET) + public Message isOnline(HttpServletRequest req) { + logger.info("Whether Entrance is online: {}", !offlineFlag); + return Message.ok().data("isOnline", !offlineFlag); + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java index 7d36df8fec..7b487352d5 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java @@ -20,8 +20,7 @@ import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.entrance.EntranceServer; import org.apache.linkis.entrance.execute.EntranceJob; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.manager.label.utils.LabelUtil; +import org.apache.linkis.entrance.scheduler.CreatorECTypeDefaultConf; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -67,7 +66,7 @@ public Message taskinfo( HttpServletRequest req, @RequestParam(value = "user", required = false) String user, @RequestParam(value = "creator", required = false) String creator, - @RequestParam(value = "engineTypeLabel", required = false) String engineTypeLabelValue) { + @RequestParam(value = "ecType", required = false) String ecType) { String userName = ModuleUserUtils.getOperationUser(req, "taskinfo"); String queryUser = user; if (Configuration.isNotAdmin(userName)) { @@ -83,23 +82,12 @@ public Message taskinfo( } else if (StringUtils.isBlank(creator)) { filterWords = queryUser; } - EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(filterWords); - int taskNumber = 0; + EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(filterWords, ecType); int runningNumber = 0; int queuedNumber = 0; if (null != undoneTasks) { for (EntranceJob task : undoneTasks) { - if (StringUtils.isNotBlank(engineTypeLabelValue)) { - EngineTypeLabel engineTypeLabel = - LabelUtil.getEngineTypeLabel(task.getJobRequest().getLabels()); - // Task types do not match, do not count - if (null == engineTypeLabel - || !engineTypeLabelValue.equalsIgnoreCase(engineTypeLabel.getStringValue())) { - continue; - } - } - taskNumber++; if (task.isRunning()) { runningNumber++; } else { @@ -107,17 +95,25 @@ public Message taskinfo( } } } - return Message.ok("success") - .data("taskNumber", taskNumber) - .data("runningNumber", runningNumber) - .data("queuedNumber", queuedNumber); + Message resp = + Message.ok("success") + .data("taskNumber", undoneTasks.length) + .data("runningNumber", runningNumber) + .data("queuedNumber", queuedNumber); + if (StringUtils.isNoneBlank(creator, ecType)) { + int creatorECTypeMaxRunningJobs = + CreatorECTypeDefaultConf.getCreatorECTypeMaxRunningJobs(creator, ecType); + resp.data("creatorECTypeMaxRunningJobs", creatorECTypeMaxRunningJobs); + resp.data("limitExceeded", runningNumber > creatorECTypeMaxRunningJobs); + } + return resp; } - @ApiOperation(value = "Status", notes = "get running task number ", response = Message.class) + @ApiOperation(value = "runningtask", notes = "get running task number ", response = Message.class) @RequestMapping(path = "/runningtask", method = RequestMethod.GET) - public Message status(HttpServletRequest req) { + public Message runningtask(HttpServletRequest req) { ModuleUserUtils.getOperationUser(req, "runningtask"); - EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(""); + EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask("", null); Boolean isCompleted = false; if (null == undoneTasks || undoneTasks.length < 1) { isCompleted = true; diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java index c1479efd8a..dfab300ab4 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java @@ -29,13 +29,16 @@ import org.apache.linkis.governance.common.entity.job.JobRequest; import org.apache.linkis.manager.common.protocol.resource.ResourceWithStatus; import org.apache.linkis.protocol.constants.TaskConstant; +import org.apache.linkis.protocol.engine.JobInstance; import org.apache.linkis.protocol.engine.JobProgressInfo; import org.apache.linkis.protocol.utils.ZuulEntranceUtils; import org.apache.linkis.rpc.Sender; import org.apache.linkis.scheduler.listener.LogListener; import org.apache.linkis.scheduler.queue.Job; import org.apache.linkis.scheduler.queue.SchedulerEventState; +import org.apache.linkis.server.BDPJettyServerHelper; import org.apache.linkis.server.Message; +import org.apache.linkis.server.conf.ServerConfiguration; import org.apache.linkis.server.security.SecurityFilter; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -61,6 +64,7 @@ import scala.Option; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; import io.swagger.annotations.Api; @@ -110,15 +114,6 @@ public Message execute(HttpServletRequest req, @RequestBody Map JobRequest jobReq = ((EntranceJob) job).getJobRequest(); Long jobReqId = jobReq.getId(); ModuleUserUtils.getOperationUser(req, "execute task,id: " + jobReqId); - pushLog( - LogUtils.generateInfo( - "You have submitted a new job, script code (after variable substitution) is"), - job); - pushLog( - "************************************SCRIPT CODE************************************", job); - pushLog(jobReq.getExecutionCode(), job); - pushLog( - "************************************SCRIPT CODE************************************", job); String execID = ZuulEntranceUtils.generateExecID( job.getId(), @@ -162,15 +157,6 @@ public Message submit(HttpServletRequest req, @RequestBody Map j JobRequest jobRequest = ((EntranceJob) job).getJobRequest(); Long jobReqId = jobRequest.getId(); ModuleUserUtils.getOperationUser(req, "submit jobReqId: " + jobReqId); - pushLog( - LogUtils.generateInfo( - "You have submitted a new job, script code (after variable substitution) is"), - job); - pushLog( - "************************************SCRIPT CODE************************************", job); - pushLog(jobRequest.getExecutionCode(), job); - pushLog( - "************************************SCRIPT CODE************************************", job); pushLog( LogUtils.generateInfo( "Your job is accepted, jobID is " @@ -198,6 +184,13 @@ private void pushLog(String log, Job job) { entranceServer.getEntranceContext().getOrCreateLogManager().onLogUpdate(job, log); } + private JobInstance parseHeaderToJobInstance(HttpServletRequest req) + throws JsonProcessingException { + String jobStr = + req.getHeader(ServerConfiguration.LINKIS_SERVER_ENTRANCE_HEADER_KEY().getValue()); + return BDPJettyServerHelper.gson().fromJson(jobStr, JobInstance.class); + } + @ApiOperation(value = "status", notes = "get task stats", response = Message.class) @ApiImplicitParams({ @ApiImplicitParam(name = "taskID", required = false, dataType = "String", value = " task id"), @@ -209,28 +202,74 @@ public Message status( HttpServletRequest req, @PathVariable("id") String id, @RequestParam(value = "taskID", required = false) String taskID) { + ModuleUserUtils.getOperationUser(req, "job status"); Message message = null; - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - ModuleUserUtils.getOperationUser(req, "status realId: " + realId); - Option job = Option.apply(null); + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/status"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/status"); + message.data("status", jobInstance.status()).data("execID", "").data("taskID", id); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, return status is Inited", id); + String status = SchedulerEventState.Inited().toString(); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/status"); + message.data("status", status).data("execID", "").data("taskID", id); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } + + Option job = null; try { job = entranceServer.getJob(realId); } catch (Exception e) { - logger.warn("获取任务 {} 状态时出现错误", realId, e.getMessage()); + logger.warn("get {} status error", realId, e); + if (StringUtils.isEmpty(taskID)) { + message = + Message.error( + "Get job by ID error and cannot obtain the corresponding task status.(获取job时发生异常,不能获取相应的任务状态)"); + return message; + } long realTaskID = Long.parseLong(taskID); String status = JobHistoryHelper.getStatusByTaskID(realTaskID); message = Message.ok(); message.setMethod("/api/entrance/" + id + "/status"); - message.data("status", status).data("execID", id); + message.data("status", status).data("execID", execID); return message; } - if (job.isDefined()) { + if (job != null && job.isDefined()) { if (job.get() instanceof EntranceJob) { ((EntranceJob) job.get()).updateNewestAccessByClientTimestamp(); } message = Message.ok(); message.setMethod("/api/entrance/" + id + "/status"); - message.data("status", job.get().getState().toString()).data("execID", id); + message.data("status", job.get().getState().toString()).data("execID", execID); } else { message = Message.error( @@ -246,9 +285,56 @@ public Message status( @Override @RequestMapping(path = "/{id}/progress", method = RequestMethod.GET) public Message progress(HttpServletRequest req, @PathVariable("id") String id) { + ModuleUserUtils.getOperationUser(req, "job progress"); Message message = null; - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - ModuleUserUtils.getOperationUser(req, "progress realId: " + realId); + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/progress"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/progress"); + message + .data("progress", "1.0") + .data("execID", "") + .data("taskID", id) + .data("progressInfo", new ArrayList<>()); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, return progress is 0", id); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/progress"); + message + .data("progress", 0) + .data("execID", "") + .data("taskID", id) + .data("progressInfo", new ArrayList<>()); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } + Option job = null; try { job = entranceServer.getJob(realId); @@ -275,7 +361,7 @@ public Message progress(HttpServletRequest req, @PathVariable("id") String id) { message .data("progress", Math.abs(job.get().getProgress())) - .data("execID", id) + .data("execID", execID) .data("progressInfo", list); } } else { @@ -296,9 +382,60 @@ public Message progress(HttpServletRequest req, @PathVariable("id") String id) { @Override @RequestMapping(path = "/{id}/progressWithResource", method = RequestMethod.GET) public Message progressWithResource(HttpServletRequest req, @PathVariable("id") String id) { + ModuleUserUtils.getOperationUser(req, "job progressWithResource"); Message message = null; - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - ModuleUserUtils.getOperationUser(req, "progressWithResource realId: " + realId); + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/progressWithResource"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + long realTaskID = Long.parseLong(id); + JobRequest jobRequest = JobHistoryHelper.getTaskByTaskID(realTaskID); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/progressWithResource"); + Map metricsVo = new HashMap<>(); + buildYarnResource(jobRequest, metricsVo, message); + message + .data("progress", "1.0") + .data("execID", "") + .data("taskID", id) + .data("progressInfo", new ArrayList<>()); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, return progress is 0 and resource is null", id); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/progressWithResource"); + message + .data(TaskConstant.JOB_YARNRESOURCE, null) + .data("progress", 0) + .data("execID", "") + .data("taskID", id) + .data("progressInfo", new ArrayList<>()); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } Option job = null; try { job = entranceServer.getJob(realId); @@ -324,57 +461,12 @@ public Message progressWithResource(HttpServletRequest req, @PathVariable("id") message.setMethod("/api/entrance/" + id + "/progressWithResource"); JobRequest jobRequest = ((EntranceJob) job.get()).getJobRequest(); - Map metrics = jobRequest.getMetrics(); Map metricsVo = new HashMap<>(); - if (metrics.containsKey(TaskConstant.JOB_YARNRESOURCE)) { - HashMap resourceMap = - (HashMap) metrics.get(TaskConstant.JOB_YARNRESOURCE); - ArrayList resoureList = new ArrayList<>(12); - if (null != resourceMap && !resourceMap.isEmpty()) { - resourceMap.forEach( - (applicationId, resource) -> { - resoureList.add(new YarnResourceWithStatusVo(applicationId, resource)); - }); - metricsVo.put(TaskConstant.JOB_YARNRESOURCE, resoureList); - Optional cores = - resourceMap.values().stream() - .map(resource -> resource.getQueueCores()) - .reduce((x, y) -> x + y); - Optional memory = - resourceMap.values().stream() - .map(resource -> resource.queueMemory()) - .reduce((x, y) -> x + y); - float corePercent = 0.0f; - float memoryPercent = 0.0f; - if (cores.isPresent() && memory.isPresent()) { - corePercent = - cores.get().floatValue() - / EntranceConfiguration.YARN_QUEUE_CORES_MAX().getHotValue(); - memoryPercent = - memory.get().floatValue() - / (EntranceConfiguration.YARN_QUEUE_MEMORY_MAX().getHotValue().longValue() - * 1024 - * 1024 - * 1024); - } - String coreRGB = RGBUtils.getRGB(corePercent); - String memoryRGB = RGBUtils.getRGB(memoryPercent); - metricsVo.put(TaskConstant.JOB_CORE_PERCENT, corePercent); - metricsVo.put(TaskConstant.JOB_MEMORY_PERCENT, memoryPercent); - metricsVo.put(TaskConstant.JOB_CORE_RGB, coreRGB); - metricsVo.put(TaskConstant.JOB_MEMORY_RGB, memoryRGB); - - message.data(TaskConstant.JOB_YARN_METRICS, metricsVo); - } else { - message.data(TaskConstant.JOB_YARNRESOURCE, null); - } - } else { - message.data(TaskConstant.JOB_YARNRESOURCE, null); - } + buildYarnResource(jobRequest, metricsVo, message); message .data("progress", Math.abs(job.get().getProgress())) - .data("execID", id) + .data("execID", execID) .data("progressInfo", list); } } else { @@ -385,6 +477,60 @@ public Message progressWithResource(HttpServletRequest req, @PathVariable("id") return message; } + private void buildYarnResource( + JobRequest jobRequest, Map metricsVo, Message message) { + try { + Map metrics = jobRequest.getMetrics(); + if (metrics.containsKey(TaskConstant.JOB_YARNRESOURCE)) { + + HashMap resourceMap = + (HashMap) metrics.get(TaskConstant.JOB_YARNRESOURCE); + ArrayList resoureList = new ArrayList<>(12); + if (null != resourceMap && !resourceMap.isEmpty()) { + resourceMap.forEach( + (applicationId, resource) -> { + resoureList.add(new YarnResourceWithStatusVo(applicationId, resource)); + }); + metricsVo.put(TaskConstant.JOB_YARNRESOURCE, resoureList); + Optional cores = + resourceMap.values().stream() + .map(resource -> resource.getQueueCores()) + .reduce((x, y) -> x + y); + Optional memory = + resourceMap.values().stream() + .map(resource -> resource.queueMemory()) + .reduce((x, y) -> x + y); + float corePercent = 0.0f; + float memoryPercent = 0.0f; + if (cores.isPresent() && memory.isPresent()) { + corePercent = + cores.get().floatValue() / EntranceConfiguration.YARN_QUEUE_CORES_MAX().getValue(); + memoryPercent = + memory.get().floatValue() + / (EntranceConfiguration.YARN_QUEUE_MEMORY_MAX().getValue().longValue() + * 1024 + * 1024 + * 1024); + } + String coreRGB = RGBUtils.getRGB(corePercent); + String memoryRGB = RGBUtils.getRGB(memoryPercent); + metricsVo.put(TaskConstant.JOB_CORE_PERCENT, corePercent); + metricsVo.put(TaskConstant.JOB_MEMORY_PERCENT, memoryPercent); + metricsVo.put(TaskConstant.JOB_CORE_RGB, coreRGB); + metricsVo.put(TaskConstant.JOB_MEMORY_RGB, memoryRGB); + + message.data(TaskConstant.JOB_YARN_METRICS, metricsVo); + } else { + message.data(TaskConstant.JOB_YARNRESOURCE, null); + } + } else { + message.data(TaskConstant.JOB_YARNRESOURCE, null); + } + } catch (Exception e) { + logger.error("build yarnResource error", e); + } + } + private void setJobProgressInfos( List> list, JobProgressInfo jobProgressInfo) { Map map = new HashMap<>(); @@ -403,10 +549,78 @@ private void setJobProgressInfos( @Override @RequestMapping(path = "/{id}/log", method = RequestMethod.GET) public Message log(HttpServletRequest req, @PathVariable("id") String id) { - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - ModuleUserUtils.getOperationUser(req, "log realId: " + realId); - Option job = Option.apply(null); + ModuleUserUtils.getOperationUser(req, "get job log"); Message message = null; + int fromLine = 0; + int size = 100; + boolean distinctLevel = true; + String fromLineStr = req.getParameter("fromLine"); + String sizeStr = req.getParameter("size"); + if (StringUtils.isNotBlank(fromLineStr)) { + fromLine = Math.max(Integer.parseInt(fromLineStr), 0); + } + if (StringUtils.isNotBlank(sizeStr)) { + size = Integer.parseInt(sizeStr) >= 0 ? Integer.parseInt(sizeStr) : 10000; + } + String distinctLevelStr = req.getParameter("distinctLevel"); + if ("false".equals(distinctLevelStr)) { + distinctLevel = false; + } + + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/log"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + message = + Message.error( + "The job you just executed has ended. This interface no longer provides a query. It is recommended that you download the log file for viewing.(您刚刚执行的job已经结束,本接口不再提供查询,建议您下载日志文件进行查看)"); + message.setMethod("/api/entrance/" + id + "/log"); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, return customer log", id); + message = Message.ok(); + message.setMethod("/api/entrance/" + id + "/log"); + String log = + LogUtils.generateInfo( + "The job will failover soon, please try again later.(job很快就会failover,请稍后再试)"); + Object retLog; + if (distinctLevel) { + String[] array = new String[4]; + array[2] = log; + array[3] = log; + retLog = new ArrayList(Arrays.asList(array)); + } else { + retLog = log; + } + message.data("log", retLog).data("execID", "").data("taskID", id).data("fromLine", 0); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } + + Option job = null; try { job = entranceServer.getJob(realId); } catch (final Throwable t) { @@ -416,27 +630,10 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { message.setMethod("/api/entrance/" + id + "/log"); return message; } - if (job.isDefined()) { + if (job != null && job.isDefined()) { logger.debug("begin to get log for {}(开始获取 {} 的日志)", job.get().getId(), job.get().getId()); LogReader logReader = entranceServer.getEntranceContext().getOrCreateLogManager().getLogReader(realId); - int fromLine = 0; - int size = 100; - boolean distinctLevel = true; - if (req != null) { - String fromLineStr = req.getParameter("fromLine"); - String sizeStr = req.getParameter("size"); - if (StringUtils.isNotBlank(fromLineStr)) { - fromLine = Math.max(Integer.parseInt(fromLineStr), 0); - } - if (StringUtils.isNotBlank(sizeStr)) { - size = Integer.parseInt(sizeStr) >= 0 ? Integer.parseInt(sizeStr) : 10000; - } - String distinctLevelStr = req.getParameter("distinctLevel"); - if ("false".equals(distinctLevelStr)) { - distinctLevel = false; - } - } Object retLog = null; int retFromLine = 0; @@ -458,7 +655,7 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { e); message = Message.ok(); message.setMethod("/api/entrance/" + id + "/log"); - message.data("log", "").data("execID", id).data("fromLine", retFromLine + fromLine); + message.data("log", "").data("execID", execID).data("fromLine", retFromLine + fromLine); } catch (final IllegalArgumentException e) { logger.debug( "Failed to get log information for :{}(为 {} 获取日志失败)", @@ -467,7 +664,7 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { e); message = Message.ok(); message.setMethod("/api/entrance/" + id + "/log"); - message.data("log", "").data("execID", id).data("fromLine", retFromLine + fromLine); + message.data("log", "").data("execID", execID).data("fromLine", retFromLine + fromLine); return message; } catch (final Exception e1) { logger.debug( @@ -477,7 +674,7 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { e1); message = Message.error("Failed to get log information(获取日志信息失败)"); message.setMethod("/api/entrance/" + id + "/log"); - message.data("log", "").data("execID", id).data("fromLine", retFromLine + fromLine); + message.data("log", "").data("execID", execID).data("fromLine", retFromLine + fromLine); return message; } finally { if (null != logReader && job.get().isCompleted()) { @@ -486,7 +683,7 @@ public Message log(HttpServletRequest req, @PathVariable("id") String id) { } message = Message.ok(); message.setMethod("/api/entrance/" + id + "/log"); - message.data("log", retLog).data("execID", id).data("fromLine", retFromLine + fromLine); + message.data("log", retLog).data("execID", execID).data("fromLine", retFromLine + fromLine); logger.debug("success to get log for {} (获取 {} 日志成功)", job.get().getId(), job.get().getId()); } else { message = @@ -514,7 +711,6 @@ public Message killJobs( JsonNode taskIDNode = jsonNode.get("taskIDList"); ArrayList waitToForceKill = new ArrayList<>(); String userName = ModuleUserUtils.getOperationUser(req, "killJobs"); - if (idNode.size() != taskIDNode.size()) { return Message.error( "The length of the ID list does not match the length of the TASKID list(id列表的长度与taskId列表的长度不一致)"); @@ -527,7 +723,7 @@ public Message killJobs( String id = idNode.get(i).asText(); Long taskID = taskIDNode.get(i).asLong(); String realId = ZuulEntranceUtils.parseExecID(id)[3]; - Option job = Option.apply(null); + Option job = null; try { job = entranceServer.getJob(realId); } catch (Exception e) { @@ -541,7 +737,7 @@ public Message killJobs( continue; } Message message = null; - if (job.isEmpty()) { + if (job == null || job.isEmpty()) { logger.warn("can not find a job in entranceServer, will force to kill it"); waitToForceKill.add(taskID); message = Message.ok("Forced Kill task (强制杀死任务)"); @@ -577,11 +773,12 @@ public Message killJobs( if (null != logListener) { logListener.onLogUpdate( entranceJob, - "Job " - + jobReq.getId() - + " was kill by user successfully(任务" - + jobReq.getId() - + "已成功取消)"); + LogUtils.generateInfo( + "Job " + + jobReq.getId() + + " was kill by user successfully(任务" + + jobReq.getId() + + "已成功取消)")); } this.entranceServer .getEntranceContext() @@ -594,9 +791,9 @@ public Message killJobs( logger.error("kill job {} failed ", job.get().getId(), t); message = Message.error( - "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)"); + "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)", + t); message.setMethod("/api/entrance/" + id + "/kill"); - message.setStatus(1); } } messages.add(message); @@ -609,7 +806,7 @@ public Message killJobs( @ApiOperation(value = "kill", notes = "kill", response = Message.class) @ApiImplicitParams({ - @ApiImplicitParam(name = "id", required = true, dataType = "String", value = "excute id"), + @ApiImplicitParam(name = "id", required = true, dataType = "String", value = "exec id"), @ApiImplicitParam(name = "taskID", required = false, dataType = "String", value = "task id") }) @Override @@ -618,23 +815,68 @@ public Message kill( HttpServletRequest req, @PathVariable("id") String id, @RequestParam(value = "taskID", required = false) Long taskID) { - String realId = ZuulEntranceUtils.parseExecID(id)[3]; - String userName = ModuleUserUtils.getOperationUser(req, "kill task realId:" + realId); + String userName = ModuleUserUtils.getOperationUser(req, "kill job"); + Message message = null; + String realId; + String execID; + if (id.startsWith(ZuulEntranceUtils.EXEC_ID())) { + // execID + realId = ZuulEntranceUtils.parseExecID(id)[3]; + execID = id; + } else { + // taskID + JobInstance jobInstance; + try { + jobInstance = parseHeaderToJobInstance(req); + } catch (JsonProcessingException e) { + logger.error("parse JobInstance json error, id: {}", id); + message = Message.error("parse JobInstance json error"); + message.setMethod("/api/entrance/" + id + "/kill"); + return message; + } + + // return ok when job complete + if (SchedulerEventState.isCompletedByStr(jobInstance.status())) { + message = Message.error("The job already completed. Do not support kill.(任务已经结束,不支持kill)"); + message.setMethod("/api/entrance/" + id + "/kill"); + return message; + } else if (jobInstance.instanceRegistryTimestamp() > jobInstance.createTimestamp()) { + logger.warn("The job {} wait failover, but now force kill", id); + // TODO If failover during force kill, the job status may change from Cancelled to Running + long taskId = Long.parseLong(id); + JobHistoryHelper.forceKill(taskId); + message = Message.ok("Forced Kill task (强制杀死任务)"); + message.setMethod("/api/entrance/" + id + "/kill"); + message.data("execID", "").data("taskID", id); + return message; + } else { + realId = jobInstance.jobReqId(); + execID = + ZuulEntranceUtils.generateExecID( + realId, + Sender.getThisServiceInstance().getApplicationName(), + new String[] {Sender.getThisInstance()}); + } + } - Option job = Option.apply(null); + Option job = null; try { job = entranceServer.getJob(realId); } catch (Exception e) { logger.warn("can not find a job in entranceServer, will force to kill it", e); // 如果在内存中找不到该任务,那么该任务可能已经完成了,或者就是重启导致的 + if (taskID == null || taskID <= 0) { + message = Message.error("Get job by ID error, kill failed.(获取job时发生异常,kill失败)"); + return message; + } JobHistoryHelper.forceKill(taskID); - Message message = Message.ok("Forced Kill task (强制杀死任务)"); + message = Message.ok("Forced Kill task (强制杀死任务)"); message.setMethod("/api/entrance/" + id + "/kill"); message.setStatus(0); return message; } - Message message = null; - if (job.isEmpty()) { + + if (job == null || job.isEmpty()) { logger.warn("can not find a job in entranceServer, will force to kill it"); // 如果在内存中找不到该任务,那么该任务可能已经完成了,或者就是重启导致的 JobHistoryHelper.forceKill(taskID); @@ -660,8 +902,7 @@ public Message kill( job.get().kill(); message = Message.ok("Successfully killed the job(成功kill了job)"); message.setMethod("/api/entrance/" + id + "/kill"); - message.setStatus(0); - message.data("execID", id); + message.data("execID", execID); // ensure the job's state is cancelled in database if (job.get() instanceof EntranceJob) { EntranceJob entranceJob = (EntranceJob) job.get(); @@ -678,10 +919,11 @@ public Message kill( logger.error("kill job {} failed ", job.get().getId(), t); message = Message.error( - "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)" - + "message: " - + t.getMessage()); + "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败) with error:" + + t.getMessage(), + t); message.setMethod("/api/entrance/" + id + "/kill"); + message.setStatus(1); } } return message; diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java new file mode 100644 index 0000000000..5a91c71a11 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.conf.EntranceConfiguration; +import org.apache.linkis.entrance.utils.EntranceUtils; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryEngineConfig; +import org.apache.linkis.governance.common.protocol.conf.ResponseQueryConfig; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; +import org.apache.linkis.rpc.Sender; + +import org.apache.commons.lang3.StringUtils; + +import java.util.concurrent.TimeUnit; + +import scala.Tuple2; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CreatorECTypeDefaultConf { + + private static final Logger logger = LoggerFactory.getLogger(CreatorECTypeDefaultConf.class); + + public static Sender confSender = + Sender.getSender( + Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME().getValue()); + + private static LoadingCache confCache = + CacheBuilder.newBuilder() + .maximumSize(1000) + .expireAfterWrite( + (long) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT_CONF_CACHE().getValue(), + TimeUnit.MINUTES) + .build( + new CacheLoader() { + @Override + public Integer load(String key) throws Exception { + Tuple2 tuple2 = + EntranceUtils.fromKeyGetLabels(key); + RequestQueryEngineConfig requestQueryEngineConfig = + new RequestQueryEngineConfig(tuple2._1, tuple2._2(), null); + int jobLimit = + (int) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT().getValue(); + try { + Object response = confSender.ask(requestQueryEngineConfig); + if (response instanceof ResponseQueryConfig) { + jobLimit = + (int) + EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT() + .getValue(((ResponseQueryConfig) response).getKeyAndValue()); + } + } catch (Exception e) { + logger.warn("Failed to get key {} from conf", key, e); + } + return jobLimit; + } + }); + + public static int getCreatorECTypeMaxRunningJobs(String creator, String ecType) { + int jobLimit = (int) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT().getValue(); + if (StringUtils.isNoneBlank(creator, ecType)) { + try { + String key = EntranceUtils.getDefaultCreatorECTypeKey(creator, ecType); + jobLimit = confCache.get(key); + } catch (Exception e) { + logger.warn("Failed to get key creator {} ecType {} from cache", creator, ecType, e); + } + } + int entranceNumber = EntranceUtils.getRunningEntranceNumber(); + return jobLimit / entranceNumber; + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java index 7558ab6dc2..7c38d27947 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java @@ -20,13 +20,17 @@ import org.apache.linkis.common.ServiceInstance; import org.apache.linkis.entrance.EntranceContext; import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.entrance.conf.EntranceConfiguration; import org.apache.linkis.entrance.conf.EntranceConfiguration$; import org.apache.linkis.entrance.constant.ServiceNameConsts; import org.apache.linkis.entrance.execute.EntranceJob; +import org.apache.linkis.entrance.job.EntranceExecutionJob; import org.apache.linkis.entrance.log.LogReader; import org.apache.linkis.governance.common.protocol.conf.EntranceInstanceConfRequest; import org.apache.linkis.rpc.Sender; +import org.apache.commons.io.IOUtils; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.event.ContextClosedEvent; import org.springframework.context.event.EventListener; @@ -94,13 +98,19 @@ private void shutdownEntrance(ContextClosedEvent event) { if (shutdownFlag) { logger.warn("event has been handled"); } else { + if (EntranceConfiguration.ENTRANCE_SHUTDOWN_FAILOVER_CONSUME_QUEUE_ENABLED()) { + logger.warn("Entrance exit to update and clean all ConsumeQueue task instances"); + updateAllNotExecutionTaskInstances(false); + } + logger.warn("Entrance exit to stop all job"); - EntranceJob[] allUndoneJobs = getAllUndoneTask(null); - if (null != allUndoneJobs) { - for (EntranceJob job : allUndoneJobs) { + EntranceJob[] allUndoneTask = getAllUndoneTask(null, null); + if (null != allUndoneTask) { + for (EntranceJob job : allUndoneTask) { job.onFailure( "Your job will be marked as canceled because the Entrance service restarted(因为Entrance服务重启,您的任务将被标记为取消)", null); + IOUtils.closeQuietly(((EntranceExecutionJob) job).getLogWriter().get()); } } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/EntranceFailoverJobServer.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/EntranceFailoverJobServer.java new file mode 100644 index 0000000000..4e66da5cc3 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/EntranceFailoverJobServer.java @@ -0,0 +1,180 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.server; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.entrance.conf.EntranceConfiguration; +import org.apache.linkis.entrance.constant.ServiceNameConsts; +import org.apache.linkis.entrance.scheduler.EntranceSchedulerContext; +import org.apache.linkis.entrance.utils.JobHistoryHelper; +import org.apache.linkis.governance.common.entity.job.JobRequest; +import org.apache.linkis.publicservice.common.lock.entity.CommonLock; +import org.apache.linkis.publicservice.common.lock.service.CommonLockService; +import org.apache.linkis.rpc.Sender; +import org.apache.linkis.scheduler.queue.SchedulerEventState; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.event.ContextClosedEvent; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.*; +import java.util.stream.Collectors; + +import scala.Enumeration; +import scala.collection.Iterator; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Component(ServiceNameConsts.ENTRANCE_FAILOVER_SERVER) +public class EntranceFailoverJobServer { + + private static final Logger logger = LoggerFactory.getLogger(EntranceFailoverJobServer.class); + + @Autowired private EntranceServer entranceServer; + + @Autowired private CommonLockService commonLockService; + + private static String ENTRANCE_FAILOVER_LOCK = "ENTRANCE_FAILOVER_LOCK"; + + private ScheduledExecutorService scheduledExecutor; + + private Future future; + + @PostConstruct + public void init() { + if (EntranceConfiguration.ENTRANCE_FAILOVER_ENABLED()) { + this.scheduledExecutor = + Executors.newSingleThreadScheduledExecutor( + Utils.threadFactory("Linkis-Failover-Scheduler-Thread-", true)); + failoverTask(); + } + } + + @EventListener + private void shutdownFailover(ContextClosedEvent event) { + if (future != null && !future.isDone()) { + future.cancel(true); + } + if (scheduledExecutor != null) { + scheduledExecutor.shutdown(); + logger.info("Entrance Failover Server exit!"); + } + } + + public void failoverTask() { + future = + scheduledExecutor.scheduleWithFixedDelay( + () -> { + EntranceSchedulerContext schedulerContext = + (EntranceSchedulerContext) + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext(); + + // entrance do not failover job when it is offline + if (schedulerContext.getOfflineFlag()) return; + + CommonLock commonLock = new CommonLock(); + commonLock.setLockObject(ENTRANCE_FAILOVER_LOCK); + Boolean locked = false; + try { + locked = commonLockService.lock(commonLock, 30 * 1000L); + if (!locked) return; + logger.info("success locked {}", ENTRANCE_FAILOVER_LOCK); + + // get all entrance server from eureka + ServiceInstance[] serviceInstances = + Sender.getInstances(Sender.getThisServiceInstance().getApplicationName()); + if (serviceInstances == null || serviceInstances.length <= 0) return; + + // serverInstance to map + Map serverInstanceMap = + Arrays.stream(serviceInstances) + .collect( + Collectors.toMap( + ServiceInstance::getInstance, + ServiceInstance::getRegistryTimestamp, + (k1, k2) -> k2)); + + // It is very important to avoid repeated execute job + // when failover self job, if self instance is empty, the job can be repeated + // execute + if (!serverInstanceMap.containsKey(Sender.getThisInstance())) { + logger.warn( + "server has just started and has not get self info, it does not failover"); + return; + } + + // get failover job expired time (获取任务故障转移过期时间,配置为0表示不过期, 过期则不处理) + long expiredTimestamp = 0L; + if (EntranceConfiguration.ENTRANCE_FAILOVER_DATA_INTERVAL_TIME() > 0) { + expiredTimestamp = + System.currentTimeMillis() + - EntranceConfiguration.ENTRANCE_FAILOVER_DATA_INTERVAL_TIME(); + } + + List jobRequests = + JobHistoryHelper.queryWaitForFailoverTask( + serverInstanceMap, + getUnCompleteStatus(), + expiredTimestamp, + EntranceConfiguration.ENTRANCE_FAILOVER_DATA_NUM_LIMIT()); + if (jobRequests.isEmpty()) return; + List ids = + jobRequests.stream().map(JobRequest::getId).collect(Collectors.toList()); + logger.info("success query failover jobs , job size: {}, ids: {}", ids.size(), ids); + + // failover to local server + for (JobRequest jobRequest : jobRequests) { + entranceServer.failoverExecute(jobRequest); + } + logger.info("finished execute failover jobs, job ids: {}", ids); + + } catch (Exception e) { + logger.error("failover failed", e); + } finally { + if (locked) commonLockService.unlock(commonLock); + } + }, + EntranceConfiguration.ENTRANCE_FAILOVER_SCAN_INIT_TIME(), + EntranceConfiguration.ENTRANCE_FAILOVER_SCAN_INTERVAL(), + TimeUnit.MILLISECONDS); + } + + private List getUnCompleteStatus() { + List status = new ArrayList<>(); + Enumeration.ValueSet values = SchedulerEventState.values(); + Iterator iterator = values.iterator(); + while (iterator.hasNext()) { + Enumeration.Value next = iterator.next(); + if (!SchedulerEventState.isCompleted(next)) status.add(next.toString()); + } + return status; + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala index 1035de1e2b..a610d524b2 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala @@ -17,28 +17,42 @@ package org.apache.linkis.entrance +import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.exception.{ErrorException, LinkisException, LinkisRuntimeException} import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.cs.CSEntranceHelper +import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorException, SubmitFailedException} import org.apache.linkis.entrance.execute.EntranceJob +import org.apache.linkis.entrance.job.EntranceExecutionJob import org.apache.linkis.entrance.log.LogReader +import org.apache.linkis.entrance.parser.ParserUtils import org.apache.linkis.entrance.timeout.JobTimeoutManager import org.apache.linkis.entrance.utils.JobHistoryHelper +import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.protocol.task.RequestTaskKill import org.apache.linkis.governance.common.utils.LoggerUtils +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel import org.apache.linkis.protocol.constants.TaskConstant import org.apache.linkis.rpc.Sender +import org.apache.linkis.rpc.conf.RPCConfiguration import org.apache.linkis.scheduler.queue.{Job, SchedulerEventState} import org.apache.linkis.server.conf.ServerConfiguration import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.exception.ExceptionUtils -import java.text.MessageFormat -import java.util +import java.{lang, util} +import java.text.{MessageFormat, SimpleDateFormat} +import java.util.Date +import java.util.concurrent.TimeUnit + +import scala.collection.JavaConverters._ abstract class EntranceServer extends Logging { @@ -46,6 +60,8 @@ abstract class EntranceServer extends Logging { private val jobTimeoutManager: JobTimeoutManager = new JobTimeoutManager() + private val timeoutCheck = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue + def init(): Unit def getName: String @@ -82,6 +98,294 @@ abstract class EntranceServer extends Logging { LoggerUtils.setJobIdMDC(jobRequest.getId.toString) val logAppender = new java.lang.StringBuilder() + jobRequest = dealInitedJobRequest(jobRequest, logAppender) + + val job = getEntranceContext.getOrCreateEntranceParser().parseToJob(jobRequest) + Utils.tryThrow { + job.init() + job.setLogListener(getEntranceContext.getOrCreateLogManager()) + job.setProgressListener(getEntranceContext.getOrCreatePersistenceManager()) + job.setJobListener(getEntranceContext.getOrCreatePersistenceManager()) + job match { + case entranceJob: EntranceJob => + entranceJob.setEntranceListenerBus(getEntranceContext.getOrCreateEventListenerBus) + case _ => + } + Utils.tryCatch { + if (logAppender.length() > 0) { + job.getLogListener.foreach(_.onLogUpdate(job, logAppender.toString.trim)) + } + } { t => + logger.error("Failed to write init log, reason: ", t) + } + + /** + * job.afterStateChanged() method is only called in job.run(), and job.run() is called only + * after job is scheduled so it suggest that we lack a hook for job init, currently we call + * this to trigger JobListener.onJobinit() + */ + Utils.tryAndWarn(job.getJobListener.foreach(_.onJobInited(job))) + if (logger.isDebugEnabled()) { + logger.debug( + s"After code preprocessing, the real execution code is:${jobRequest.getExecutionCode}" + ) + } + if (StringUtils.isBlank(jobRequest.getExecutionCode)) { + throw new SubmitFailedException( + SUBMIT_CODE_ISEMPTY.getErrorCode, + SUBMIT_CODE_ISEMPTY.getErrorDesc + ) + } + getEntranceContext.getOrCreateScheduler().submit(job) + val msg = LogUtils.generateInfo( + s"Job with jobId : ${jobRequest.getId} and execID : ${job.getId()} submitted " + ) + logger.info(msg) + + job match { + case entranceJob: EntranceJob => + entranceJob.getJobRequest.setReqId(job.getId()) + if (timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { + jobTimeoutManager.add(job.getId(), entranceJob) + } + entranceJob.getLogListener.foreach(_.onLogUpdate(entranceJob, msg)) + case _ => + } + LoggerUtils.removeJobIdMDC() + job + } { t => + LoggerUtils.removeJobIdMDC() + job.onFailure("Submitting the query failed!(提交查询失败!)", t) + val _jobRequest: JobRequest = + getEntranceContext.getOrCreateEntranceParser().parseToJobRequest(job) + getEntranceContext + .getOrCreatePersistenceManager() + .createPersistenceEngine() + .updateIfNeeded(_jobRequest) + t match { + case e: LinkisException => e + case e: LinkisRuntimeException => e + case t: Throwable => + new SubmitFailedException( + SUBMITTING_QUERY_FAILED.getErrorCode, + SUBMITTING_QUERY_FAILED.getErrorDesc + ExceptionUtils.getRootCauseMessage(t), + t + ) + } + } + } + + def logReader(execId: String): LogReader + + def getJob(execId: String): Option[Job] = + getEntranceContext.getOrCreateScheduler().get(execId).map(_.asInstanceOf[Job]) + + private[entrance] def getEntranceWebSocketService: Option[EntranceWebSocketService] = + if (ServerConfiguration.BDP_SERVER_SOCKET_MODE.getValue) { + if (entranceWebSocketService.isEmpty) synchronized { + if (entranceWebSocketService.isEmpty) { + entranceWebSocketService = Some(new EntranceWebSocketService) + entranceWebSocketService.foreach(_.setEntranceServer(this)) + entranceWebSocketService.foreach( + getEntranceContext.getOrCreateEventListenerBus.addListener + ) + } + } + entranceWebSocketService + } else None + + def getAllUndoneTask(filterWords: String): Array[EntranceJob] = { + val consumers = getEntranceContext + .getOrCreateScheduler() + .getSchedulerContext + .getOrCreateConsumerManager + .listConsumers() + .toSet + val filterConsumer = if (StringUtils.isNotBlank(filterWords)) { + consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + } else { + consumers + } + filterConsumer + .flatMap { consumer => + consumer.getRunningEvents ++ consumer.getConsumeQueue.getWaitingEvents + } + .filter(job => job != null && job.isInstanceOf[EntranceJob]) + .map(_.asInstanceOf[EntranceJob]) + .toArray + } + + def getAllConsumeQueueTask(): Array[EntranceJob] = { + val consumers = getEntranceContext + .getOrCreateScheduler() + .getSchedulerContext + .getOrCreateConsumerManager + .listConsumers() + .toSet + + consumers + .flatMap { consumer => + consumer.getConsumeQueue.getWaitingEvents + } + .filter(job => job != null && job.isInstanceOf[EntranceJob]) + .map(_.asInstanceOf[EntranceJob]) + .toArray + } + + def clearAllConsumeQueue(): Unit = { + getEntranceContext + .getOrCreateScheduler() + .getSchedulerContext + .getOrCreateConsumerManager + .listConsumers() + .foreach(_.getConsumeQueue.clearAll()) + } + + def updateAllNotExecutionTaskInstances(retryWhenUpdateFail: Boolean): Unit = { + val consumeQueueTasks = getAllConsumeQueueTask() + + clearAllConsumeQueue() + logger.info("Finished to clean all ConsumeQueue") + + if (consumeQueueTasks != null && consumeQueueTasks.length > 0) { + val taskIds = new util.ArrayList[Long]() + consumeQueueTasks.foreach(job => { + taskIds.add(job.getJobRequest.getId.asInstanceOf[Long]) + job match { + case entranceExecutionJob: EntranceExecutionJob => + val msg = LogUtils.generateWarn( + s"job ${job.getJobRequest.getId} clean from ConsumeQueue, wait for failover" + ) + entranceExecutionJob.getLogListener.foreach(_.onLogUpdate(entranceExecutionJob, msg)) + entranceExecutionJob.getLogWriter.foreach(_.close()) + case _ => + } + }) + + JobHistoryHelper.updateAllConsumeQueueTask(taskIds, retryWhenUpdateFail) + logger.info("Finished to update all not execution task instances") + } + } + + /** + * execute failover job (提交故障转移任务,返回新的execId) + * + * @param jobRequest + */ + def failoverExecute(jobRequest: JobRequest): Unit = { + + if (null == jobRequest || null == jobRequest.getId || jobRequest.getId <= 0) { + throw new EntranceErrorException( + PERSIST_JOBREQUEST_ERROR.getErrorCode, + PERSIST_JOBREQUEST_ERROR.getErrorDesc + ) + } + + val logAppender = new java.lang.StringBuilder() + logAppender.append( + "*************************************FAILOVER************************************** \n" + ) + + // try to kill ec + killOldEC(jobRequest, logAppender); + + // deal Inited jobRequest, if status is Inited, need to deal by all Interceptors, such as set log_path + if (SchedulerEventState.isInitedByStr(jobRequest.getStatus)) { + dealInitedJobRequest(jobRequest, logAppender) + } + + if ( + EntranceConfiguration.ENTRANCE_FAILOVER_RUNNING_KILL_ENABLED.getValue && + SchedulerEventState.isRunningByStr(jobRequest.getStatus) + ) { + // deal Running jobRequest, if enabled, status changed from Running to Cancelled + dealRunningJobRequest(jobRequest, logAppender) + } else { + // init and submit + initAndSubmitJobRequest(jobRequest, logAppender) + } + } + + def killOldEC(jobRequest: JobRequest, logAppender: lang.StringBuilder): Unit = { + Utils.tryCatch { + logAppender.append( + LogUtils + .generateInfo(s"job ${jobRequest.getId} start to kill old ec \n") + ) + if ( + !SchedulerEventState.isRunning(SchedulerEventState.withName(jobRequest.getStatus)) + || !SchedulerEventState.isScheduled(SchedulerEventState.withName(jobRequest.getStatus)) + ) { + val msg = s"job ${jobRequest.getId} status is not running or scheduled, ignore it" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + return + } + + if ( + jobRequest.getMetrics == null + || !jobRequest.getMetrics.containsKey(TaskConstant.JOB_ENGINECONN_MAP) + ) { + val msg = s"job ${jobRequest.getId} not have EC info, ignore it" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + return + } + + val engineMap = jobRequest.getMetrics + .get(TaskConstant.JOB_ENGINECONN_MAP) + .asInstanceOf[util.Map[String, Object]] + + val engineInstance = + engineMap.asScala + .map(_._2.asInstanceOf[util.Map[String, Object]]) + .filter(_.containsKey(TaskConstant.ENGINE_INSTANCE)) + .maxBy(_.getOrDefault(TaskConstant.ENGINE_CONN_SUBMIT_TIME, "0").toString) + + if (engineInstance == null || engineInstance.containsKey(TaskConstant.FAILOVER_FLAG)) { + val msg = + s"job ${jobRequest.getId} do not submit to EC or already failover, not need kill ec" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + return + } + engineInstance.put(TaskConstant.FAILOVER_FLAG, "") + + val ecInstance = ServiceInstance( + GovernanceCommonConf.ENGINE_CONN_SPRING_NAME.getValue, + engineInstance.get(TaskConstant.ENGINE_INSTANCE).toString + ) + if (jobRequest.getLabels.asScala.exists(_.isInstanceOf[ExecuteOnceLabel])) { + // kill ec by linkismanager + val engineStopRequest = new EngineStopRequest + engineStopRequest.setServiceInstance(ecInstance) + // send to linkismanager kill ec + Sender + .getSender(RPCConfiguration.LINKIS_MANAGER_SERVICE_NAME.getValue) + .send(engineStopRequest) + val msg = + s"job ${jobRequest.getId} send EngineStopRequest to linkismanager, kill EC instance $ecInstance" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + } else if (engineInstance.containsKey(TaskConstant.ENGINE_CONN_TASK_ID)) { + // get ec taskId + val engineTaskId = engineInstance.get(TaskConstant.ENGINE_CONN_TASK_ID).toString + // send to ec kill task + Sender + .getSender(ecInstance) + .send(RequestTaskKill(engineTaskId)) + val msg = + s"job ${jobRequest.getId} send RequestTaskKill to kill engineConn $ecInstance, execID $engineTaskId" + logger.info(msg) + logAppender.append(LogUtils.generateInfo(msg) + "\n") + } + } { t => + logger.error(s"job ${jobRequest.getId} kill ec error", t) + } + } + + def dealInitedJobRequest(jobReq: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + var jobRequest = jobReq Utils.tryThrow( getEntranceContext .getOrCreateEntranceInterceptors() @@ -128,6 +432,68 @@ abstract class EntranceServer extends Logging { .updateIfNeeded(jobRequest) error } + jobRequest + } + + def dealRunningJobRequest(jobRequest: JobRequest, logAppender: lang.StringBuilder): Unit = { + Utils.tryCatch { + // error_msg + val msg = + MessageFormat.format( + EntranceErrorCodeSummary.FAILOVER_RUNNING_TO_CANCELLED.getErrorDesc, + jobRequest.getId.toString + ) + // init jobRequest properties + jobRequest.setStatus(SchedulerEventState.Cancelled.toString) + jobRequest.setProgress("1.0") + jobRequest.setInstances(Sender.getThisInstance) + jobRequest.setErrorCode(EntranceErrorCodeSummary.FAILOVER_RUNNING_TO_CANCELLED.getErrorCode) + jobRequest.setErrorDesc(msg) + + // update jobRequest + getEntranceContext + .getOrCreatePersistenceManager() + .createPersistenceEngine() + .updateIfNeeded(jobRequest) + + // getOrGenerate log_path + var logPath = jobRequest.getLogPath + if (StringUtils.isBlank(logPath)) { + ParserUtils.generateLogPath(jobRequest, null) + logPath = jobRequest.getLogPath + logAppender.append( + LogUtils.generateInfo(s"job ${jobRequest.getId} generate new logPath $logPath \n") + ) + } + val job = getEntranceContext.getOrCreateEntranceParser().parseToJob(jobRequest) + val logWriter = getEntranceContext.getOrCreateLogManager().createLogWriter(job) + if (logAppender.length() > 0) { + logWriter.write(logAppender.toString.trim) + } + + logWriter.write(LogUtils.generateInfo(msg) + "\n") + logWriter.flush() + logWriter.close() + + } { case e: Exception => + logger.error(s"Job ${jobRequest.getId} failover, change status error", e) + } + } + + def initAndSubmitJobRequest(jobRequest: JobRequest, logAppender: lang.StringBuilder): Unit = { + // init properties + initJobRequestProperties(jobRequest, logAppender) + + // update jobRequest + getEntranceContext + .getOrCreatePersistenceManager() + .createPersistenceEngine() + .updateIfNeeded(jobRequest) + + // reset `UpdateOrderFlag` + jobRequest.setUpdateOrderFlag(true) + + logger.info(s"job ${jobRequest.getId} update JobRequest success") val job = getEntranceContext.getOrCreateEntranceParser().parseToJob(jobRequest) Utils.tryThrow { @@ -145,7 +511,7 @@ abstract class EntranceServer extends Logging { job.getLogListener.foreach(_.onLogUpdate(job, logAppender.toString.trim)) } } { t => - logger.error("Failed to write init log, reason: ", t) + logger.error("Failed to write init JobRequest log, reason: ", t) } /** @@ -154,27 +520,35 @@ abstract class EntranceServer extends Logging { * this to trigger JobListener.onJobinit() */ Utils.tryAndWarn(job.getJobListener.foreach(_.onJobInited(job))) + if (logger.isDebugEnabled()) { + logger.debug( + s"After code preprocessing, the real execution code is:${jobRequest.getExecutionCode}" + ) + } + if (StringUtils.isBlank(jobRequest.getExecutionCode)) { + throw new SubmitFailedException( + SUBMIT_CODE_ISEMPTY.getErrorCode, + SUBMIT_CODE_ISEMPTY.getErrorDesc + ) + } getEntranceContext.getOrCreateScheduler().submit(job) val msg = LogUtils.generateInfo( - s"Job with jobId : ${jobRequest.getId} and execID : ${job.getId()} submitted " + s"Job with jobId : ${jobRequest.getId} and execID : ${job.getId()} submitted, success to failover" ) logger.info(msg) job match { case entranceJob: EntranceJob => entranceJob.getJobRequest.setReqId(job.getId()) - if (jobTimeoutManager.timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { + if (timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { jobTimeoutManager.add(job.getId(), entranceJob) } entranceJob.getLogListener.foreach(_.onLogUpdate(entranceJob, msg)) case _ => } - LoggerUtils.removeJobIdMDC() - job } { t => - LoggerUtils.removeJobIdMDC() job.onFailure("Submitting the query failed!(提交查询失败!)", t) - val _jobRequest: JobRequest = + val _jobRequest = getEntranceContext.getOrCreateEntranceParser().parseToJobRequest(job) getEntranceContext .getOrCreatePersistenceManager() @@ -193,26 +567,83 @@ abstract class EntranceServer extends Logging { } } - def logReader(execId: String): LogReader + private def initJobRequestProperties( + jobRequest: JobRequest, + logAppender: lang.StringBuilder + ): Unit = { + logger.info(s"job ${jobRequest.getId} start to initialize the properties") + val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") + val initInstance = Sender.getThisInstance + val initDate = new Date(System.currentTimeMillis) + val initStatus = SchedulerEventState.Inited.toString + val initProgress = "0.0" + val initReqId = "" - def getJob(execId: String): Option[Job] = - getEntranceContext.getOrCreateScheduler().get(execId).map(_.asInstanceOf[Job]) + logAppender.append( + LogUtils + .generateInfo(s"job ${jobRequest.getId} start to Initialize the properties \n") + ) + logAppender.append( + LogUtils.generateInfo(s"the instances ${jobRequest.getInstances} -> ${initInstance} \n") + ) + logAppender.append( + LogUtils.generateInfo( + s"the created_time ${sdf.format(jobRequest.getCreatedTime)} -> ${sdf.format(initDate)} \n" + ) + ) + logAppender.append( + LogUtils.generateInfo(s"the status ${jobRequest.getStatus} -> $initStatus \n") + ) + logAppender.append( + LogUtils.generateInfo(s"the progress ${jobRequest.getProgress} -> $initProgress \n") + ) - private[entrance] def getEntranceWebSocketService: Option[EntranceWebSocketService] = - if (ServerConfiguration.BDP_SERVER_SOCKET_MODE.getValue) { - if (entranceWebSocketService.isEmpty) synchronized { - if (entranceWebSocketService.isEmpty) { - entranceWebSocketService = Some(new EntranceWebSocketService) - entranceWebSocketService.foreach(_.setEntranceServer(this)) - entranceWebSocketService.foreach( - getEntranceContext.getOrCreateEventListenerBus.addListener + val metricMap = new util.HashMap[String, Object]() + if (EntranceConfiguration.ENTRANCE_FAILOVER_RETAIN_METRIC_ENGINE_CONN_ENABLED.getValue) { + if ( + jobRequest.getMetrics != null && jobRequest.getMetrics.containsKey( + TaskConstant.JOB_ENGINECONN_MAP ) - } + ) { + val oldEngineconnMap = jobRequest.getMetrics + .get(TaskConstant.JOB_ENGINECONN_MAP) + .asInstanceOf[util.Map[String, Object]] + metricMap.put(TaskConstant.JOB_ENGINECONN_MAP, oldEngineconnMap) } - entranceWebSocketService - } else None + } - def getAllUndoneTask(filterWords: String): Array[EntranceJob] = { + if (EntranceConfiguration.ENTRANCE_FAILOVER_RETAIN_METRIC_YARN_RESOURCE_ENABLED.getValue) { + if ( + jobRequest.getMetrics != null && jobRequest.getMetrics.containsKey( + TaskConstant.JOB_YARNRESOURCE + ) + ) { + val oldResourceMap = jobRequest.getMetrics + .get(TaskConstant.JOB_YARNRESOURCE) + .asInstanceOf[util.Map[String, Object]] + metricMap.put(TaskConstant.JOB_YARNRESOURCE, oldResourceMap) + } + } + + jobRequest.setInstances(initInstance) + jobRequest.setCreatedTime(initDate) + jobRequest.setStatus(initStatus) + jobRequest.setProgress(initProgress) + jobRequest.setReqId(initReqId) + jobRequest.setErrorCode(0) + jobRequest.setErrorDesc("") + jobRequest.setMetrics(metricMap) + jobRequest.getMetrics.put(TaskConstant.JOB_SUBMIT_TIME, initDate) + // Allow task status updates to be unordered + jobRequest.setUpdateOrderFlag(false) + + logAppender.append( + LogUtils.generateInfo(s"job ${jobRequest.getId} success to initialize the properties \n") + ) + logger.info(s"job ${jobRequest.getId} success to initialize the properties") + } + + def getAllUndoneTask(filterWords: String, ecType: String = null): Array[EntranceJob] = { val consumers = getEntranceContext .getOrCreateScheduler() .getSchedulerContext @@ -220,7 +651,14 @@ abstract class EntranceServer extends Logging { .listConsumers() .toSet val filterConsumer = if (StringUtils.isNotBlank(filterWords)) { - consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + if (StringUtils.isNotBlank(ecType)) { + consumers.filter(consumer => + consumer.getGroup.getGroupName.contains(filterWords) && consumer.getGroup.getGroupName + .contains(ecType) + ) + } else { + consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + } } else { consumers } @@ -233,6 +671,40 @@ abstract class EntranceServer extends Logging { .toArray } + /** + * to check timeout task,and kill timeout task timeout: default > 48h + */ + def startTimeOutCheck(): Unit = { + Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable() { + override def run(): Unit = { + Utils.tryCatch { + + val timeoutType = EntranceConfiguration.ENTRANCE_TASK_TIMEOUT.getHotValue() + logger.info(s"Start to check timeout Job, timout is ${timeoutType}") + val timeoutTime = System.currentTimeMillis() - timeoutType.toLong + getAllUndoneTask(null, null).filter(job => job.createTime < timeoutTime).foreach { + job => + job.onFailure(s"Job has run for longer than the maximum time $timeoutType", null) + } + logger.info(s"Finished to check timeout Job, timout is ${timeoutType}") + } { case t: Throwable => + logger.warn(s"TimeoutDetective Job failed. ${t.getMessage}", t) + } + } + + }, + EntranceConfiguration.ENTRANCE_TASK_TIMEOUT_SCAN.getValue.toLong, + EntranceConfiguration.ENTRANCE_TASK_TIMEOUT_SCAN.getValue.toLong, + TimeUnit.MILLISECONDS + ) + } + + if (timeoutCheck) { + logger.info("Job time check is enabled") + startTimeOutCheck() + } + } object EntranceServer { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala index 714b9f0cc2..b5339c9e2e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala @@ -215,18 +215,6 @@ class EntranceWebSocketService s"Your job's execution code is (after variable substitution and code check) " ) ) - entranceServer.getEntranceContext - .getOrCreateLogManager() - .onLogUpdate( - job, - "************************************SCRIPT CODE************************************" - ) - entranceServer.getEntranceContext - .getOrCreateLogManager() - .onLogUpdate( - job, - "************************************SCRIPT CODE************************************" - ) entranceServer.getEntranceContext .getOrCreateLogManager() .onLogUpdate( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala index 7c3935e69b..270376911b 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala @@ -220,7 +220,69 @@ object EntranceConfiguration { val CREATOR_IP_SWITCH = CommonVars("wds.linkis.entrance.user.creator.ip.interceptor.switch", false) + val TEMPLATE_CONF_SWITCH = + CommonVars("linkis.entrance.template.conf.interceptor.switch", false) + val ENABLE_ENTRANCE_DIRTY_DATA_CLEAR = CommonVars("linkis.entrance.auto.clean.dirty.data.enable", false) + val ENTRANCE_CREATOR_JOB_LIMIT: CommonVars[Int] = + CommonVars[Int]( + "linkis.entrance.creator.job.concurrency.limit", + 10000, + "Creator task concurrency limit parameters" + ) + + val ENTRANCE_CREATOR_JOB_LIMIT_CONF_CACHE = + CommonVars("linkis.entrance.creator.job.concurrency.limit.conf.cache.time", 30L) + + val ENTRANCE_TASK_TIMEOUT = + CommonVars("linkis.entrance.task.timeout", new TimeType("48h")) + + val ENTRANCE_TASK_TIMEOUT_SCAN = + CommonVars("linkis.entrance.task.timeout.scan", new TimeType("12h")) + + val ENABLE_HDFS_JVM_USER = + CommonVars[Boolean]("linkis.entrance.enable.hdfs.jvm.user", true).getValue + + val ENTRANCE_FAILOVER_ENABLED = CommonVars("linkis.entrance.failover.enable", true).getValue + + val ENTRANCE_FAILOVER_SCAN_INIT_TIME = + CommonVars("linkis.entrance.failover.scan.init.time", 3 * 1000).getValue + + val ENTRANCE_FAILOVER_SCAN_INTERVAL = + CommonVars("linkis.entrance.failover.scan.interval", 30 * 1000).getValue + + val ENTRANCE_FAILOVER_DATA_NUM_LIMIT = + CommonVars("linkis.entrance.failover.data.num.limit", 10).getValue + + val ENTRANCE_FAILOVER_DATA_INTERVAL_TIME = + CommonVars("linkis.entrance.failover.data.interval.time", new TimeType("1d").toLong).getValue + + // if true, the waitForRetry job in runningJobs can be failover + val ENTRANCE_FAILOVER_RETRY_JOB_ENABLED = + CommonVars("linkis.entrance.failover.retry.job.enable", false) + + val ENTRANCE_UPDATE_BATCH_SIZE = CommonVars("linkis.entrance.update.batch.size", 100) + + // if true, the job in ConsumeQueue can be failover + val ENTRANCE_SHUTDOWN_FAILOVER_CONSUME_QUEUE_ENABLED = + CommonVars("linkis.entrance.shutdown.failover.consume.queue.enable", true).getValue + + val ENTRANCE_GROUP_SCAN_ENABLED = CommonVars("linkis.entrance.group.scan.enable", true) + + val ENTRANCE_GROUP_SCAN_INIT_TIME = CommonVars("linkis.entrance.group.scan.init.time", 3 * 1000) + + val ENTRANCE_GROUP_SCAN_INTERVAL = CommonVars("linkis.entrance.group.scan.interval", 60 * 1000) + + val ENTRANCE_FAILOVER_RETAIN_METRIC_ENGINE_CONN_ENABLED = + CommonVars("linkis.entrance.failover.retain.metric.engine.conn.enable", false) + + val ENTRANCE_FAILOVER_RETAIN_METRIC_YARN_RESOURCE_ENABLED = + CommonVars("linkis.entrance.failover.retain.metric.yarn.resource.enable", false) + + // if true, job whose status is running will be set to Cancelled + val ENTRANCE_FAILOVER_RUNNING_KILL_ENABLED = + CommonVars("linkis.entrance.failover.running.kill.enable", false) + } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala index 627ab82b8e..34bd6ead01 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala @@ -18,7 +18,6 @@ package org.apache.linkis.entrance.interceptor.impl import org.apache.linkis.common.utils.CodeAndRunTypeUtils -import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.interceptor.EntranceInterceptor import org.apache.linkis.governance.common.entity.job.JobRequest import org.apache.linkis.manager.label.utils.LabelUtil diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala index 7a7cb7463a..a40c3fa35d 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CustomVariableUtils.scala @@ -63,7 +63,7 @@ object CustomVariableUtils extends Logging { } val variableMap = TaskUtils .getVariableMap(jobRequest.getParams) - .asInstanceOf[util.HashMap[String, String]] + .asInstanceOf[util.Map[String, String]] variables.putAll(variableMap) if (!variables.containsKey("user")) { variables.put("user", jobRequest.getExecuteUser) diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala index 8436ccc711..35b40db339 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala @@ -57,6 +57,8 @@ object SparkExplain extends Explain { private val sy = Pattern.compile("sys\\.") private val scCancelAllJobs = Pattern.compile("sc\\.cancelAllJobs(\\s*)") private val runtime = Pattern.compile("Runtime\\.getRuntime") + private val LINE_BREAK = "\n" + private val LOG: Logger = LoggerFactory.getLogger(getClass) override def authPass(code: String, error: StringBuilder): Boolean = { if (EntranceConfiguration.SKIP_AUTH.getHotValue()) { @@ -99,6 +101,7 @@ object SQLExplain extends Explain { private val LIMIT: String = "limit" private val LIMIT_UPPERCASE: String = "LIMIT" private val IDE_ALLOW_NO_LIMIT = "--set wds.linkis.engine.no.limit.allow=true" + private val LOG: Logger = LoggerFactory.getLogger(getClass) override def authPass(code: String, error: StringBuilder): Boolean = { true @@ -131,6 +134,8 @@ object SQLExplain extends Explain { .generateWarn("please pay attention ,SQL full export mode opened(请注意,SQL全量导出模式打开)\n") ) } + var isFirstTimePrintingLimit = true + var isFirstTimePrintingOverLimit = true if (tempCode.contains("""\;""")) { val semicolonIndexes = findRealSemicolonIndex(tempCode) var oldIndex = 0 @@ -140,20 +145,27 @@ object SQLExplain extends Explain { if (isSelectCmd(singleCode)) { val trimCode = singleCode.trim if (isSelectCmdNoLimit(trimCode) && !isNoLimitAllowed) { - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql without limit, DSS will add limit 5000 to your sql" - ) + "\n" - ) + if (isFirstTimePrintingLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql without limit, DSS will add limit 5000 to your sql" + ) + "\n" + ) + isFirstTimePrintingLimit = false + } + // 将注释先干掉,然后再进行添加limit val realCode = cleanComment(trimCode) fixedCode += (realCode + SQL_APPEND_LIMIT) } else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) { val trimCode = singleCode.trim - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" - ) + "\n" - ) + if (isFirstTimePrintingOverLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" + ) + "\n" + ) + isFirstTimePrintingOverLimit = false + } fixedCode += repairSelectOverLimit(trimCode) } else { fixedCode += singleCode.trim @@ -167,20 +179,27 @@ object SQLExplain extends Explain { if (isSelectCmd(singleCode)) { val trimCode = singleCode.trim if (isSelectCmdNoLimit(trimCode) && !isNoLimitAllowed) { - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql without limit, DSS will add limit 5000 to your sql" - ) + "\n" - ) + if (isFirstTimePrintingLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql without limit, DSS will add limit 5000 to your sql" + ) + "\n" + ) + isFirstTimePrintingLimit = false + } + // 将注释先干掉,然后再进行添加limit val realCode = cleanComment(trimCode) fixedCode += (realCode + SQL_APPEND_LIMIT) } else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) { val trimCode = singleCode.trim - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" - ) + "\n" - ) + if (isFirstTimePrintingOverLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" + ) + "\n" + ) + isFirstTimePrintingOverLimit = false + } fixedCode += repairSelectOverLimit(trimCode) } else { fixedCode += singleCode.trim @@ -210,6 +229,8 @@ object SQLExplain extends Explain { array.toArray } + private def addNoLimit(code: String) = code + NO_LIMIT_STRING + protected def needNoLimit(code: String): Boolean = code.endsWith(NO_LIMIT_STRING) def isSelectCmd(code: String): Boolean = { @@ -217,16 +238,17 @@ object SQLExplain extends Explain { return false } val realCode = cleanComment(code) - realCode.trim.split("\\s+")(0).toLowerCase().contains("select") + realCode.trim.split("\\s+")(0).toLowerCase(Locale.getDefault).contains("select") } - def continueWhenError: Boolean = false + // def continueWhenError = false def isSelectCmdNoLimit(cmd: String): Boolean = { if (StringUtils.isEmpty(cmd)) { return false } val realCode = cmd.trim + // limit is often the last in a sql statement, so you need to make a final judgment val arr = realCode.split("\\s+") val words = new ArrayBuffer[String]() arr foreach { w => @@ -235,8 +257,10 @@ object SQLExplain extends Explain { val a = words.toArray val length = a.length if (a.length > 1) { - val second_last = a(length - 2) - !"limit".equals(second_last.toLowerCase()) + val second_last = a(length - 2).toLowerCase(Locale.getDefault) + // for some case eg:"SELECT * from dual WHERE (1=1)LIMIT 1;" + val result = !("limit".equals(second_last) || second_last.contains(")limit")) + result } else { false } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala new file mode 100644 index 0000000000..6accd30bd5 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl + +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.interceptor.EntranceInterceptor +import org.apache.linkis.governance.common.entity.job.JobRequest + +import java.lang + +class TemplateConfInterceptor extends EntranceInterceptor { + + override def apply(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + if (EntranceConfiguration.TEMPLATE_CONF_SWITCH.getValue) { + TemplateConfUtils.dealWithTemplateConf(jobRequest, logAppender) + } else { + jobRequest + } + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala new file mode 100644 index 0000000000..cdcbe01e85 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala @@ -0,0 +1,276 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl + +import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.exception.LinkisCommonErrorException +import org.apache.linkis.common.log.LogUtils +import org.apache.linkis.common.utils.{CodeAndRunTypeUtils, Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.governance.common.entity.TemplateConfKey +import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.protocol.conf.{TemplateConfRequest, TemplateConfResponse} +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.constant.LabelKeyConstant +import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel +import org.apache.linkis.manager.label.utils.LabelUtil +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.rpc.Sender + +import org.apache.commons.lang3.StringUtils + +import java.{lang, util} +import java.util.concurrent.TimeUnit + +import scala.collection.JavaConverters._ + +import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} + +object TemplateConfUtils extends Logging { + + val confTemplateNameKey = "ec.resource.name" + + private val templateCache: LoadingCache[String, util.List[TemplateConfKey]] = CacheBuilder + .newBuilder() + .maximumSize(1000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build(new CacheLoader[String, util.List[TemplateConfKey]]() { + + override def load(templateUuid: String): util.List[TemplateConfKey] = { + var templateList = Utils.tryAndWarn { + val sender: Sender = Sender + .getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) + + logger.info(s"load template configuration data templateUuid:$templateUuid") + val res = sender.ask(new TemplateConfRequest(templateUuid)) match { + case response: TemplateConfResponse => + logger + .debug(s"${response.getList()}") + response.getList + case _ => + logger + .warn(s"load template configuration data templateUuid:$templateUuid loading failed") + new util.ArrayList[TemplateConfKey](0) + } + res + } + if (templateList.size() == 0) { + logger.warn(s"template configuration data loading failed, plaese check warn log") + } + templateList + } + + }) + + private val templateCacheName: LoadingCache[String, util.List[TemplateConfKey]] = CacheBuilder + .newBuilder() + .maximumSize(1000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build(new CacheLoader[String, util.List[TemplateConfKey]]() { + + override def load(templateName: String): util.List[TemplateConfKey] = { + var templateList = Utils.tryAndWarn { + val sender: Sender = Sender + .getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) + + logger.info(s"load template configuration data templateName:$templateName") + val res = sender.ask(new TemplateConfRequest(null, templateName)) match { + case response: TemplateConfResponse => + logger + .debug(s"${response.getList()}") + response.getList + case _ => + logger + .warn(s"load template configuration data templateName:$templateName loading failed") + new util.ArrayList[TemplateConfKey](0) + } + res + } + + if (templateList.size() == 0) { + logger.warn(s"template configuration data loading failed, plaese check warn log") + } + templateList + } + + }) + + /** + * Get user-defined template conf name value + * + * @param code + * :code + * @param codeType + * :sql,hql,scala + * @return + * String the last one of template conf name + */ + def getCustomTemplateConfName(code: String, codeType: String): String = { + var templateConfName = ""; + + var varString: String = null + var errString: String = null + var rightVarString: String = null + + val languageType = CodeAndRunTypeUtils.getLanguageTypeByCodeType(codeType) + + languageType match { + case CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL => + varString = s"""\\s*---@set ${confTemplateNameKey}=\\s*.+\\s*""" + errString = """\s*---@.*""" + case CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON | CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL => + varString = s"""\\s*##@set ${confTemplateNameKey}=\\s*.+\\s*""" + errString = """\s*##@""" + case CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA => + varString = s"""\\s*///@set ${confTemplateNameKey}=\\s*.+\\s*""" + errString = """\s*///@.+""" + case _ => + return templateConfName + } + + val customRegex = varString.r.unanchored + val errRegex = errString.r.unanchored + var codeRes = code.replaceAll("\r\n", "\n") + // only allow set at fisrt line + val res = codeRes.split("\n") + if (res.size > 0) { + val str = res(0) + str match { + case customRegex() => + val clearStr = if (str.endsWith(";")) str.substring(0, str.length - 1) else str + val res: Array[String] = clearStr.split("=") + if (res != null && res.length == 2) { + templateConfName = res(1).trim + logger.info(s"get template conf name $templateConfName") + } else { + if (res.length > 2) { + throw new LinkisCommonErrorException( + 20044, + s"$str template conf name var defined uncorrectly" + ) + } else { + throw new LinkisCommonErrorException( + 20045, + s"template conf name var was defined uncorrectly:$str" + ) + } + } + case errRegex() => + logger.warn( + s"The template conf name var definition is incorrect:$str,if it is not used, it will not run the error, but it is recommended to use the correct specification to define" + ) + case _ => + } + } + templateConfName + } + + def dealWithTemplateConf(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + jobRequest match { + case requestPersistTask: JobRequest => + val params = requestPersistTask.getParams + val startMap = TaskUtils.getStartupMap(params) + + var templateConflist: util.List[TemplateConfKey] = new util.ArrayList[TemplateConfKey]() + var templateName: String = "" + // only for Creator:IDE, try to get template conf name from code string. eg:---@set ec.resource.name=xxxx + val (user, creator) = LabelUtil.getUserCreator(jobRequest.getLabels) + if (EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue.equals(creator)) { + val codeType = LabelUtil.getCodeType(jobRequest.getLabels) + templateName = + TemplateConfUtils.getCustomTemplateConfName(jobRequest.getExecutionCode, codeType) + } + + // code template name > start params template uuid + if (StringUtils.isBlank(templateName)) { + logger.debug("jobRequest startMap param template name is empty") + + logger.info("jobRequest startMap params :{} ", startMap) + val templateUuid = startMap.getOrDefault(LabelKeyConstant.TEMPLATE_CONF_KEY, "").toString + + if (StringUtils.isBlank(templateUuid)) { + logger.debug("jobRequest startMap param template id is empty") + } else { + logger.info("try to get template conf list with template uid:{} ", templateUuid) + logAppender.append( + LogUtils + .generateInfo(s"Try to get template conf data with template uid:$templateUuid\nn") + ) + templateConflist = templateCache.get(templateUuid) + if (templateConflist == null || templateConflist.size() == 0) { + logAppender.append( + LogUtils.generateWarn( + s"Can not get any template conf data with template uid:$templateUuid\n" + ) + ) + } + } + } else { + logger.info("Try to get template conf list with template name:[{}]", templateName) + logAppender.append( + LogUtils + .generateInfo(s"Try to get template conf data with template name:[$templateName]\n") + ) + templateConflist = templateCacheName.get(templateName) + if (templateConflist == null || templateConflist.size() == 0) { + logAppender.append( + LogUtils.generateWarn( + s"Can not get any template conf data with template name:$templateName\n" + ) + ) + } else { + // to remove metedata start param + TaskUtils.clearStartupMap(params) + + val onceLabel = + LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel( + classOf[ExecuteOnceLabel] + ) + logger.info("Add once label for task id:{}", requestPersistTask.getId.toString) + requestPersistTask.getLabels.add(onceLabel) + } + } + + if (templateConflist != null && templateConflist.size() > 0) { + val keyList = new util.HashMap[String, AnyRef]() + templateConflist.asScala.foreach(ele => { + val key = ele.getKey + val oldValue = startMap.get(key) + if (oldValue != null && StringUtils.isNotBlank(oldValue.toString)) { + logger.info(s"key:$key value:$oldValue not empty, skip to deal") + } else { + val newValue = ele.getConfigValue + logger.info(s"key:$key value:$newValue will add to startMap params") + if (TaskUtils.isWithDebugInfo(params)) { + logAppender.append(LogUtils.generateInfo(s"add $key=$newValue\n")) + } + keyList.put(key, newValue) + } + + }) + if (keyList.size() > 0) { + TaskUtils.addStartupMap(params, keyList) + } + } + + case _ => + } + jobRequest + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala index 573c134493..653e9ad78b 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/UserCreatorIPCheckUtils.scala @@ -67,7 +67,7 @@ object UserCreatorIPCheckUtils extends Logging { def checkUserIp(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { // Get IP address - val jobIp = jobRequest.getSource.get(TaskConstant.REQUEST_IP) + val jobIp = jobRequest.getSource.getOrDefault(TaskConstant.REQUEST_IP, "") logger.debug(s"start to checkTenantLabel $jobIp") if (StringUtils.isNotBlank(jobIp)) { jobRequest match { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala index 0487a238cf..72d40305a6 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala @@ -41,10 +41,26 @@ class VarSubstitutionInterceptor extends EntranceInterceptor { LogUtils.generateInfo("Program is substituting variables for you") + "\n" ) val codeType = LabelUtil.getCodeType(jobRequest.getLabels) - jobRequest.setExecutionCode(CustomVariableUtils.replaceCustomVar(jobRequest, codeType)) + val realCode = CustomVariableUtils.replaceCustomVar(jobRequest, codeType) + jobRequest.setExecutionCode(realCode) logAppender.append( LogUtils.generateInfo("Variables substitution ended successfully") + "\n" ) + // print code after variables substitution + logAppender.append( + LogUtils.generateInfo( + "You have submitted a new job, script code (after variable substitution) is" + ) + "\n" + ); + logAppender.append( + "************************************SCRIPT CODE************************************" + "\n" + ) + logAppender.append(realCode); + logAppender.append("\n"); + logAppender.append( + "************************************SCRIPT CODE************************************" + "\n" + ); + jobRequest } { case e: VarSubstitutionException => diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala index 483cf9ab43..748f82df4b 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogReader.scala @@ -19,7 +19,11 @@ package org.apache.linkis.entrance.log import org.apache.linkis.common.io.{Fs, FsPath} import org.apache.linkis.common.utils.Utils +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.exception.LogReadFailedException import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.utils.StorageUtils import java.io.{InputStream, IOException} import java.util @@ -36,13 +40,26 @@ class CacheLogReader(logPath: String, charset: String, sharedCache: Cache, user: var closed = false private def createInputStream: InputStream = { + if (!logPath.contains(user)) { + throw new LogReadFailedException( + s"${user} does not have permission to read the path $logPath" + ) + } + val fsPath = new FsPath(logPath) if (fileSystem == null) lock synchronized { if (fileSystem == null) { - fileSystem = FSFactory.getFsByProxyUser(new FsPath(logPath), user) + + fileSystem = + if (StorageUtils.isHDFSPath(fsPath) && EntranceConfiguration.ENABLE_HDFS_JVM_USER) { + FSFactory.getFs(new FsPath(logPath)).asInstanceOf[FileSystem] + } else { + FSFactory.getFsByProxyUser(new FsPath(logPath), user).asInstanceOf[FileSystem] + } + fileSystem.init(new util.HashMap[String, String]()) } } - val inputStream: InputStream = fileSystem.read(new FsPath(logPath)) + val inputStream: InputStream = fileSystem.read(fsPath) inputStream } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala index 9028c469ab..b54dc757cd 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala @@ -41,6 +41,8 @@ class CacheLogWriter(logPath: String, charset: String, sharedCache: Cache, user: val sb = new StringBuilder if (removed != null) sb.append(removed).append("\n") logs.filter(_ != null).foreach(log => sb.append(log).append("\n")) + // need append latest msg before clear + sb.append(msg).append("\n") sharedCache.cachedLogs.fakeClear() super.write(sb.toString()) pushTime.setTime( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala index 54914b6002..4b082342ce 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/ErrorCodeManager.scala @@ -18,6 +18,12 @@ package org.apache.linkis.entrance.log import org.apache.linkis.errorcode.client.handler.LinkisErrorCodeHandler +import org.apache.linkis.errorcode.client.manager.LinkisErrorCodeManager +import org.apache.linkis.errorcode.common.LinkisErrorCode + +import java.util + +import scala.collection.JavaConverters.asScalaBufferConverter abstract class ErrorCodeManager { @@ -35,6 +41,22 @@ abstract class ErrorCodeManager { None } + def errorMatchAndGetContent(log: String): Option[(String, String, String)] = { + getErrorCodes.foreach(e => + if (e.regex.findFirstIn(log).isDefined) { + val matched = e.regex.unapplySeq(log) + if (matched.nonEmpty) { + return Some( + e.code, + e.message.format(matched.get: _*), + e.regex.findFirstIn(log).getOrElse("") + ) + } else Some(e.code, e.message, "") + } + ) + None + } + } /** @@ -44,7 +66,24 @@ object FlexibleErrorCodeManager extends ErrorCodeManager { private val errorCodeHandler = LinkisErrorCodeHandler.getInstance() - override def getErrorCodes: Array[ErrorCode] = Array.empty + private val linkisErrorCodeManager = LinkisErrorCodeManager.getInstance + + override def getErrorCodes: Array[ErrorCode] = { + val errorCodes: util.List[LinkisErrorCode] = linkisErrorCodeManager.getLinkisErrorCodes + if (errorCodes == null) { + Array.empty + } else { + errorCodes.asScala + .map(linkisErrorCode => + ErrorCode( + linkisErrorCode.getErrorRegex, + linkisErrorCode.getErrorCode, + linkisErrorCode.getErrorDesc + ) + ) + .toArray + } + } override def errorMatch(log: String): Option[(String, String)] = { val errorCodes = errorCodeHandler.handle(log) diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala index 24633dfbb2..4f37ff1040 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala @@ -37,11 +37,15 @@ import java.util class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, user: String) extends LogWriter(charset) { - if (StringUtils.isBlank(logPath)) + if (StringUtils.isBlank(logPath)) { throw new EntranceErrorException(LOGPATH_NOT_NULL.getErrorCode, LOGPATH_NOT_NULL.getErrorDesc) + } - protected var fileSystem = + protected var fileSystem = if (EntranceConfiguration.ENABLE_HDFS_JVM_USER) { + FSFactory.getFs(new FsPath(logPath)).asInstanceOf[FileSystem] + } else { FSFactory.getFsByProxyUser(new FsPath(logPath), user).asInstanceOf[FileSystem] + } override protected var outputStream: OutputStream = null @@ -55,7 +59,12 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u private def init(): Unit = { fileSystem.init(new util.HashMap[String, String]()) - FileSystemUtils.createNewFileWithFileSystem(fileSystem, new FsPath(logPath), user, true) + FileSystemUtils.createNewFileAndSetOwnerWithFileSystem( + fileSystem, + new FsPath(logPath), + user, + true + ) } @throws[IOException] @@ -99,6 +108,8 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u val sb = new StringBuilder if (removed != null) sb.append(removed).append("\n") logs.filter(_ != null).foreach(log => sb.append(log).append("\n")) + // need append latest msg before fake clear + sb.append(msg).append("\n") sharedCache.cachedLogs.fakeClear() writeToFile(sb.toString()) pushTime.setTime( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala index 626a643a0b..19f4c5c6ad 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LogManager.scala @@ -17,6 +17,7 @@ package org.apache.linkis.entrance.log +import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.EntranceContext import org.apache.linkis.entrance.job.EntranceExecutionJob @@ -59,10 +60,19 @@ abstract class LogManager extends LogListener with Logging { } } } - entranceExecutionJob.getLogWriter.foreach(logWriter => logWriter.write(log)) - errorCodeManager.foreach(_.errorMatch(log).foreach { case (code, errorMsg) => - errorCodeListener.foreach(_.onErrorCodeCreated(job, code, errorMsg)) + var writeLog = log + errorCodeManager.foreach(_.errorMatchAndGetContent(log).foreach { + case (code, errorMsg, targetMsg) => + if (!targetMsg.contains(LogUtils.ERROR_STR) && log.contains(LogUtils.ERROR_STR)) { + writeLog = LogUtils.generateERROR( + s"error code: $code, errorMsg: $errorMsg, errorLine: $targetMsg \n" + log + ) + } + errorCodeListener.foreach(_.onErrorCodeCreated(job, code, errorMsg)) + case _ => }) + entranceExecutionJob.getLogWriter.foreach(logWriter => logWriter.write(writeLog)) + case _ => } } { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala index 155d8c7bd5..5b62a49aa1 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala @@ -51,7 +51,7 @@ class LoopArray[T](maxCapacity: Int) { } else if (index > _max) { throw new IllegalArgumentException("The index " + index + " must be less than " + _max) } - val _index = (flag + (index - realSize)) % maxCapacity + val _index = (flag + (index - realSize + maxCapacity - 1)) % maxCapacity eventQueue(_index).asInstanceOf[T] } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala index 4b9b4570f1..e5c657023e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala @@ -20,19 +20,15 @@ package org.apache.linkis.entrance.orchestrator.plugin import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.utils.EntranceUtils import org.apache.linkis.governance.common.protocol.conf.{ RequestQueryEngineConfigWithGlobalConfig, ResponseQueryConfig } -import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext -import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} import org.apache.linkis.orchestrator.plugin.UserParallelOrchestratorPlugin import org.apache.linkis.rpc.Sender -import org.apache.linkis.server.BDPJettyServerHelper - -import org.apache.commons.lang3.StringUtils import java.util import java.util.concurrent.TimeUnit @@ -43,10 +39,6 @@ import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlugin with Logging { - private val SPLIT = "," - - private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory - private def getDefaultMaxRuningNum: Int = { EntranceConfiguration.WDS_LINKIS_INSTANCE.getHotValue() } @@ -62,7 +54,7 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu .build(new CacheLoader[String, Integer]() { override def load(key: String): Integer = { - val (userCreatorLabel, engineTypeLabel) = fromKeyGetLabels(key) + val (userCreatorLabel, engineTypeLabel) = EntranceUtils.fromKeyGetLabels(key) val keyAndValue = Utils.tryAndWarnMsg { sender .ask(RequestQueryEngineConfigWithGlobalConfig(userCreatorLabel, engineTypeLabel)) @@ -75,10 +67,8 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu null == keyAndValue || !keyAndValue .containsKey(EntranceConfiguration.WDS_LINKIS_INSTANCE.key) ) { - logger.error( - s"cannot found user configuration key:${EntranceConfiguration.WDS_LINKIS_INSTANCE.key}," + - s"will use default value ${EntranceConfiguration.WDS_LINKIS_INSTANCE.getHotValue()}。All config map: ${BDPJettyServerHelper.gson - .toJson(keyAndValue)}" + logger.warn( + s"cannot found user configuration key:${EntranceConfiguration.WDS_LINKIS_INSTANCE.key}," + s"will use default value " ) } val maxRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue, true) @@ -102,27 +92,7 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu if (null == userCreatorLabel || null == engineTypeLabel) { return getDefaultMaxRuningNum } - configCache.get(getKey(userCreatorLabel, engineTypeLabel)) - } - - private def getKey( - userCreatorLabel: UserCreatorLabel, - engineTypeLabel: EngineTypeLabel - ): String = { - userCreatorLabel.getStringValue + SPLIT + engineTypeLabel.getStringValue - } - - private def fromKeyGetLabels(key: String): (UserCreatorLabel, EngineTypeLabel) = { - if (StringUtils.isBlank(key)) (null, null) - else { - val labelStringValues = key.split(SPLIT) - if (labelStringValues.length < 2) return (null, null) - val userCreatorLabel = labelFactory - .createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY, labelStringValues(0)) - val engineTypeLabel = labelFactory - .createLabel[EngineTypeLabel](LabelKeyConstant.ENGINE_TYPE_KEY, labelStringValues(1)) - (userCreatorLabel, engineTypeLabel) - } + configCache.get(EntranceUtils.getUserCreatorEcTypeKey(userCreatorLabel, engineTypeLabel)) } override def isReady: Boolean = true diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala index 02d1a6a08e..2ba98438e8 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala @@ -17,20 +17,12 @@ package org.apache.linkis.entrance.persistence -import org.apache.linkis.common.io.{FsPath, MetaData, Record} -import org.apache.linkis.common.io.resultset.ResultSet -import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.common.utils.Logging import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} -import org.apache.linkis.entrance.execute.StorePathExecuteRequest -import org.apache.linkis.entrance.job.{EntranceExecuteRequest, EntranceExecutionJob} -import org.apache.linkis.entrance.scheduler.cache.CacheOutputExecuteResponse -import org.apache.linkis.governance.common.entity.job.SubJobDetail import org.apache.linkis.scheduler.executer.{AliasOutputExecuteResponse, OutputExecuteResponse} import org.apache.linkis.scheduler.queue.Job -import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriterFactory} -import org.apache.linkis.storage.utils.FileSystemUtils +import org.apache.linkis.storage.resultset.ResultSetFactory -import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils class EntranceResultSetEngine extends ResultSetEngine with Logging { @@ -46,15 +38,11 @@ class EntranceResultSetEngine extends ResultSetEngine with Logging { EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc ) } - case CacheOutputExecuteResponse(alias, output) => - if (ResultSetFactory.getInstance.isResultSetPath(output)) { - getDir(output) - } else { - throw new EntranceErrorException( - EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getErrCode, - EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc - ) - } + case _ => + throw new EntranceErrorException( + EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getErrCode, + EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc + ) } } @@ -64,7 +52,7 @@ class EntranceResultSetEngine extends ResultSetEngine with Logging { } else { val arr = str.split("/").filter(StringUtils.isNotBlank) if (arr.length <= 2) { - return str + str } else { str.substring(0, str.lastIndexOf("/")) } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.scala new file mode 100644 index 0000000000..26d8a60c4c --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.scala @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.job.EntranceExecutionJob +import org.apache.linkis.entrance.utils.JobHistoryHelper +import org.apache.linkis.scheduler.SchedulerContext +import org.apache.linkis.scheduler.queue.Group +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer + +import java.util +import java.util.concurrent.ExecutorService + +import scala.collection.JavaConverters.collectionAsScalaIterableConverter + +class EntranceFIFOUserConsumer( + schedulerContext: SchedulerContext, + executeService: ExecutorService, + private var group: Group +) extends FIFOUserConsumer(schedulerContext, executeService, group) + with Logging { + + override def loop(): Unit = { + // When offlineFlag=true, the unsubmitted tasks will be failover, and the running tasks will wait for completion. + // In this case,super.loop only submits the retry task, but the retry task can failover and speed up the entrance offline + // (当offlineFlag=true时,未提交任务会被故障转移,运行中任务会等待完成.此时super.loop只会提交重试任务,但是重试任务完全可以故障转移,加快entrance下线) + schedulerContext match { + case entranceSchedulerContext: EntranceSchedulerContext => + if ( + entranceSchedulerContext.getOfflineFlag && EntranceConfiguration.ENTRANCE_FAILOVER_RETRY_JOB_ENABLED.getValue + ) { + val jobs = scanAllRetryJobsAndRemove() + if (!jobs.isEmpty) { + val ids = new util.ArrayList[Long]() + jobs.asScala.foreach { + case entranceJob: EntranceExecutionJob => + entranceJob.getLogWriter.foreach(_.close()) + ids.add(entranceJob.getJobRequest.getId) + case _ => + } + JobHistoryHelper.updateBatchInstancesEmpty(ids) + } + Utils.tryQuietly(Thread.sleep(5000)) + return + } + case _ => + } + + // general logic + super.loop() + + } + + override def runScheduleIntercept: Boolean = { + val consumers = getSchedulerContext.getOrCreateConsumerManager.listConsumers + var creatorRunningJobNum = 0 + // APP_TEST_hadoop_hive or IDE_hadoop_hive + val groupNameStr = getGroup.getGroupName + val groupNames = groupNameStr.split("_") + val length = groupNames.length + if (length < 3) return true + // APP_TEST + val lastIndex = groupNameStr.lastIndexOf("_") + val secondLastIndex = groupNameStr.lastIndexOf("_", lastIndex - 1) + val creatorName = groupNameStr.substring(0, secondLastIndex) + // hive + val ecType = groupNames(length - 1) + for (consumer <- consumers) { + val groupName = consumer.getGroup.getGroupName + if (groupName.startsWith(creatorName) && groupName.endsWith(ecType)) { + creatorRunningJobNum += consumer.getRunningEvents.length + } + } + val creatorECTypeMaxRunningJobs = + CreatorECTypeDefaultConf.getCreatorECTypeMaxRunningJobs(creatorName, ecType) + if (logger.isDebugEnabled) { + logger.debug( + s"Creator: $creatorName EC:$ecType there are currently:$creatorRunningJobNum jobs running and maximum limit: $creatorECTypeMaxRunningJobs" + ) + } + if (creatorRunningJobNum > creatorECTypeMaxRunningJobs) { + logger.error( + s"Creator: $creatorName EC:$ecType there are currently:$creatorRunningJobNum jobs running that exceed the maximum limit: $creatorECTypeMaxRunningJobs" + ) + false + } else true + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala index 7f16dd2463..de4c025e30 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala @@ -17,30 +17,20 @@ package org.apache.linkis.entrance.scheduler -import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.conf.{CommonVars, Configuration} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} import org.apache.linkis.entrance.execute.EntranceJob +import org.apache.linkis.entrance.utils.EntranceUtils import org.apache.linkis.governance.common.protocol.conf.{ RequestQueryEngineConfigWithGlobalConfig, ResponseQueryConfig } -import org.apache.linkis.instance.label.client.InstanceLabelClient -import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext -import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} import org.apache.linkis.manager.label.entity.Label -import org.apache.linkis.manager.label.entity.engine.{ - ConcurrentEngineConnLabel, - EngineTypeLabel, - UserCreatorLabel -} -import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} import org.apache.linkis.manager.label.utils.LabelUtil -import org.apache.linkis.protocol.constants.TaskConstant -import org.apache.linkis.protocol.utils.TaskUtils import org.apache.linkis.rpc.Sender import org.apache.linkis.scheduler.queue.{Group, GroupFactory, SchedulerEvent} import org.apache.linkis.scheduler.queue.parallelqueue.ParallelGroup @@ -51,8 +41,6 @@ import java.util import java.util.concurrent.TimeUnit import java.util.regex.Pattern -import scala.collection.JavaConverters._ - import com.google.common.cache.{Cache, CacheBuilder} class EntranceGroupFactory extends GroupFactory with Logging { @@ -63,7 +51,7 @@ class EntranceGroupFactory extends GroupFactory with Logging { .maximumSize(EntranceConfiguration.GROUP_CACHE_MAX.getValue) .build() - private val GROUP_MAX_CAPACITY = CommonVars("wds.linkis.entrance.max.capacity", 2000) + private val GROUP_MAX_CAPACITY = CommonVars("wds.linkis.entrance.max.capacity", 1000) private val SPECIFIED_USERNAME_REGEX = CommonVars("wds.linkis.entrance.specified.username.regex", "hduser.*") @@ -81,29 +69,16 @@ class EntranceGroupFactory extends GroupFactory with Logging { } override def getOrCreateGroup(event: SchedulerEvent): Group = { - val (labels, params) = event match { + val labels = event match { case job: EntranceJob => - (job.getJobRequest.getLabels, job.getJobRequest.getParams) + job.getJobRequest.getLabels + case _ => + throw new EntranceErrorException(LABEL_NOT_NULL.getErrorCode, LABEL_NOT_NULL.getErrorDesc) } - val groupName = EntranceGroupFactory.getGroupNameByLabels(labels, params) + val groupName = EntranceGroupFactory.getGroupNameByLabels(labels) val cacheGroup = groupNameToGroups.getIfPresent(groupName) if (null == cacheGroup) synchronized { val maxAskExecutorTimes = EntranceConfiguration.MAX_ASK_EXECUTOR_TIME.getValue.toLong - if (groupName.startsWith(EntranceGroupFactory.CONCURRENT)) { - if (null == groupNameToGroups.getIfPresent(groupName)) synchronized { - if (null == groupNameToGroups.getIfPresent(groupName)) { - val group = new ParallelGroup( - groupName, - 100, - EntranceConfiguration.CONCURRENT_FACTORY_MAX_CAPACITY.getValue - ) - group.setMaxRunningJobs(EntranceConfiguration.CONCURRENT_MAX_RUNNING_JOBS.getValue) - group.setMaxAskExecutorTimes(EntranceConfiguration.CONCURRENT_EXECUTOR_TIME.getValue) - groupNameToGroups.put(groupName, group) - return group - } - } - } val sender: Sender = Sender.getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) val userCreatorLabel: UserCreatorLabel = LabelUtil.getUserCreatorLabel(labels) @@ -141,8 +116,11 @@ class EntranceGroupFactory extends GroupFactory with Logging { group.setMaxRunningJobs(maxRunningJobs) group.setMaxAskExecutorTimes(maxAskExecutorTimes) groupNameToGroups.put(groupName, group) + group + } + else { + cacheGroup } - groupNameToGroups.getIfPresent(groupName) } override def getGroup(groupName: String): Group = { @@ -156,105 +134,40 @@ class EntranceGroupFactory extends GroupFactory with Logging { group } + /** + * User task concurrency control is controlled for multiple Entrances, which will be evenly + * distributed based on the number of existing Entrances + * @param keyAndValue + * @return + */ private def getUserMaxRunningJobs(keyAndValue: util.Map[String, String]): Int = { - var userDefinedRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue) - var entranceNum = Sender.getInstances(Sender.getThisServiceInstance.getApplicationName).length - val labelList = new util.ArrayList[Label[_]]() - val offlineRouteLabel = LabelBuilderFactoryContext.getLabelBuilderFactory - .createLabel[RouteLabel](LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE) - labelList.add(offlineRouteLabel) - var offlineIns: Array[ServiceInstance] = null - Utils.tryAndWarn { - offlineIns = InstanceLabelClient.getInstance - .getInstanceFromLabel(labelList) - .asScala - .filter(l => - null != l && l.getApplicationName - .equalsIgnoreCase(Sender.getThisServiceInstance.getApplicationName) - ) - .toArray - } - if (null != offlineIns) { - logger.info(s"There are ${offlineIns.length} offlining instance.") - entranceNum = entranceNum - offlineIns.length - } - /* - Sender.getInstances may get 0 instances due to cache in Sender. So this instance is the one instance. - */ - if (0 >= entranceNum) { - logger.error( - s"Got ${entranceNum} ${Sender.getThisServiceInstance.getApplicationName} instances." - ) - entranceNum = 1 - } + val userDefinedRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue) + val entranceNum = EntranceUtils.getRunningEntranceNumber() Math.max( EntranceConfiguration.ENTRANCE_INSTANCE_MIN.getValue, userDefinedRunningJobs / entranceNum - ); + ) } } object EntranceGroupFactory { - val CACHE = "_Cache" - - val CONCURRENT = "Concurrent_" - - def getGroupName( - creator: String, - user: String, - params: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] - ): String = { - val runtime = TaskUtils.getRuntimeMap(params) - val cache = - if ( - runtime.get(TaskConstant.READ_FROM_CACHE) != null && runtime - .get(TaskConstant.READ_FROM_CACHE) - .asInstanceOf[Boolean] - ) { - CACHE - } else "" - if (StringUtils.isNotEmpty(creator)) creator + "_" + user + cache - else EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue + "_" + user + cache - } - - def getGroupNameByLabels( - labels: java.util.List[Label[_]], - params: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] - ): String = { - - val userCreator = labels.asScala.find(_.isInstanceOf[UserCreatorLabel]) - val engineType = labels.asScala.find(_.isInstanceOf[EngineTypeLabel]) - val concurrent = labels.asScala.find(_.isInstanceOf[ConcurrentEngineConnLabel]) - if (userCreator.isEmpty || engineType.isEmpty) { + /** + * Entrance group rule creator_username_engineType eg:IDE_PEACEWONG_SPARK + * @param labels + * @param params + * @return + */ + def getGroupNameByLabels(labels: java.util.List[Label[_]]): String = { + val userCreatorLabel = LabelUtil.getUserCreatorLabel(labels) + val engineTypeLabel = LabelUtil.getEngineTypeLabel(labels) + if (null == userCreatorLabel || null == engineTypeLabel) { throw new EntranceErrorException(LABEL_NOT_NULL.getErrorCode, LABEL_NOT_NULL.getErrorDesc) } - - if (concurrent.isDefined) { - - val engineTypeLabel = engineType.get.asInstanceOf[EngineTypeLabel] - val groupName = CONCURRENT + engineTypeLabel.getEngineType - groupName - - } else { - val userCreatorLabel = userCreator.get.asInstanceOf[UserCreatorLabel] - - val engineTypeLabel = engineType.get.asInstanceOf[EngineTypeLabel] - - val runtime = TaskUtils.getRuntimeMap(params) - val cache = - if ( - runtime.get(TaskConstant.READ_FROM_CACHE) != null && runtime - .get(TaskConstant.READ_FROM_CACHE) - .asInstanceOf[Boolean] - ) { - CACHE - } else "" - val groupName = - userCreatorLabel.getCreator + "_" + userCreatorLabel.getUser + "_" + engineTypeLabel.getEngineType + cache - groupName - } + val groupName = + userCreatorLabel.getCreator + "_" + userCreatorLabel.getUser + "_" + engineTypeLabel.getEngineType + groupName } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.scala new file mode 100644 index 0000000000..789e2ca2b1 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.scala @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler + +import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.utils.EntranceUtils +import org.apache.linkis.instance.label.client.InstanceLabelClient +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.rpc.Sender +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer +import org.apache.linkis.scheduler.queue.parallelqueue.{ParallelConsumerManager, ParallelGroup} + +import java.util +import java.util.concurrent.TimeUnit + +import scala.collection.JavaConverters._ + +class EntranceParallelConsumerManager(maxParallelismUsers: Int, schedulerName: String) + extends ParallelConsumerManager(maxParallelismUsers, schedulerName) { + + override protected def createConsumer(groupName: String): FIFOUserConsumer = { + val group = getSchedulerContext.getOrCreateGroupFactory.getGroup(groupName) + new EntranceFIFOUserConsumer(getSchedulerContext, getOrCreateExecutorService, group) + } + + if (EntranceConfiguration.ENTRANCE_GROUP_SCAN_ENABLED.getValue) { + Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable { + override def run(): Unit = Utils.tryAndWarn { + // refresh all group maxAllowRunningJobs + refreshAllGroupMaxAllowRunningJobs(EntranceUtils.getRunningEntranceNumber()) + logger.info("Finished to refresh consumer group maxAllowRunningJobs") + } + }, + EntranceConfiguration.ENTRANCE_GROUP_SCAN_INIT_TIME.getValue, + EntranceConfiguration.ENTRANCE_GROUP_SCAN_INTERVAL.getValue, + TimeUnit.MILLISECONDS + ) + } + + def refreshAllGroupMaxAllowRunningJobs(validInsCount: Int): Unit = { + listConsumers() + .foreach(item => { + item.getGroup match { + case group: ParallelGroup => + val maxAllowRunningJobs = Math.round(group.getMaxRunningJobs / validInsCount) + group.setMaxAllowRunningJobs(maxAllowRunningJobs) + logger + .info( + "group {} refresh maxAllowRunningJobs => {}/{}={}", + Array( + group.getGroupName, + group.getMaxRunningJobs.toString, + validInsCount.toString, + maxAllowRunningJobs.toString + ): _* + ) + case _ => + } + }) + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala index d5de2cc2da..1638b0fb1c 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceSchedulerContext.scala @@ -28,6 +28,11 @@ class EntranceSchedulerContext extends SchedulerContext { private var consumerManager: ConsumerManager = _ private var executorManager: ExecutorManager = _ + private var offlineFlag: Boolean = false + + def setOfflineFlag(offlineFlag: Boolean): Unit = this.offlineFlag = offlineFlag + def getOfflineFlag: Boolean = this.offlineFlag + def this( groupFactory: GroupFactory, consumerManager: ConsumerManager, diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala deleted file mode 100644 index 65bbbd39b4..0000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.common.io.FsPath -import org.apache.linkis.common.utils.Utils -import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ -import org.apache.linkis.entrance.exception.CacheNotReadyException -import org.apache.linkis.entrance.execute.EntranceJob -import org.apache.linkis.entrance.persistence.PersistenceManager -import org.apache.linkis.entrance.utils.JobHistoryHelper -import org.apache.linkis.governance.common.entity.job.JobRequest -import org.apache.linkis.manager.label.constant.LabelKeyConstant -import org.apache.linkis.protocol.constants.TaskConstant -import org.apache.linkis.protocol.utils.TaskUtils -import org.apache.linkis.scheduler.SchedulerContext -import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary._ -import org.apache.linkis.scheduler.exception.SchedulerErrorException -import org.apache.linkis.scheduler.executer.SuccessExecuteResponse -import org.apache.linkis.scheduler.queue.Group -import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer -import org.apache.linkis.server.BDPJettyServerHelper -import org.apache.linkis.storage.FSFactory -import org.apache.linkis.storage.fs.FileSystem - -import org.apache.commons.io.FilenameUtils -import org.apache.commons.lang3.StringUtils - -import java.util.concurrent.ExecutorService - -import scala.collection.JavaConverters._ - -import com.google.common.collect.Lists - -class ReadCacheConsumer( - schedulerContext: SchedulerContext, - executeService: ExecutorService, - private var group: Group, - persistenceManager: PersistenceManager -) extends FIFOUserConsumer(schedulerContext, executeService, group) { - - override protected def loop(): Unit = { - val event = Option(getConsumeQueue.take()) - event.foreach { - case job: EntranceJob => - job.getJobRequest match { - case jobRequest: JobRequest => - Utils.tryCatch { - val engineTpyeLabel = jobRequest.getLabels.asScala - .filter(l => l.getLabelKey.equalsIgnoreCase(LabelKeyConstant.ENGINE_TYPE_KEY)) - .headOption - .getOrElse(null) - val labelStrList = jobRequest.getLabels.asScala.map { case l => - l.getStringValue - }.toList - if (null == engineTpyeLabel) { - logger.error( - "Invalid engineType null, cannot process. jobReq : " + BDPJettyServerHelper.gson - .toJson(jobRequest) - ) - throw CacheNotReadyException( - INVALID_ENGINETYPE_NULL.getErrorCode, - INVALID_ENGINETYPE_NULL.getErrorDesc - ) - } - val readCacheBefore: Long = TaskUtils - .getRuntimeMap(job.getParams) - .getOrDefault(TaskConstant.READ_CACHE_BEFORE, 300L: java.lang.Long) - .asInstanceOf[Long] - val cacheResult = JobHistoryHelper.getCache( - jobRequest.getExecutionCode, - jobRequest.getExecuteUser, - labelStrList.asJava, - readCacheBefore - ) - if (cacheResult != null && StringUtils.isNotBlank(cacheResult.getResultLocation)) { - val resultSets = listResults(cacheResult.getResultLocation, job.getUser) - if (resultSets.size() > 0) { - for (resultSet: FsPath <- resultSets.asScala) { - val alias = FilenameUtils.getBaseName(resultSet.getPath) - val output = FsPath - .getFsPath( - cacheResult.getResultLocation, - FilenameUtils.getName(resultSet.getPath) - ) - .getSchemaPath -// persistenceManager.onResultSetCreated(job, new CacheOutputExecuteResponse(alias, output)) - throw CacheNotReadyException( - INVALID_RESULTSETS.getErrorCode, - INVALID_RESULTSETS.getErrorDesc - ) - // todo check - } -// persistenceManager.onResultSizeCreated(job, resultSets.size()) - } - val runtime = TaskUtils.getRuntimeMap(job.getParams) - runtime.put(TaskConstant.CACHE, java.lang.Boolean.FALSE) - TaskUtils.addRuntimeMap(job.getParams, runtime) - job.transitionCompleted(SuccessExecuteResponse(), "Result found in cache") - } else { - logger.info("Cache not found, submit to normal consumer.") - submitToExecute(job) - } - } { t => - logger.warn("Read cache failed, submit to normal consumer: ", t) - submitToExecute(job) - } - case _ => - } - case _ => - } - } - - private def listResults(resultLocation: String, user: String) = { - val dirPath = FsPath.getFsPath(resultLocation) - val fileSystem = FSFactory.getFsByProxyUser(dirPath, user).asInstanceOf[FileSystem] - Utils.tryFinally { - fileSystem.init(null) - if (fileSystem.exists(dirPath)) { - fileSystem.listPathWithError(dirPath).getFsPaths - } else { - Lists.newArrayList[FsPath]() - } - }(Utils.tryQuietly(fileSystem.close())) - } - - private def submitToExecute(job: EntranceJob): Unit = { - val runtime = TaskUtils.getRuntimeMap(job.getParams) - runtime.put(TaskConstant.READ_FROM_CACHE, java.lang.Boolean.FALSE) - TaskUtils.addRuntimeMap(job.getParams, runtime) - val groupName = schedulerContext.getOrCreateGroupFactory.getOrCreateGroup(job).getGroupName - val consumer = schedulerContext.getOrCreateConsumerManager.getOrCreateConsumer(groupName) - val index = consumer.getConsumeQueue.offer(job) - // index.map(getEventId(_, groupName)).foreach(job.setId) - if (index.isEmpty) { - throw new SchedulerErrorException( - JOB_QUEUE_IS_FULL.getErrorCode, - JOB_QUEUE_IS_FULL.getErrorDesc - ) - } - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala deleted file mode 100644 index a4cba19f34..0000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.entrance.persistence.PersistenceManager -import org.apache.linkis.entrance.scheduler.EntranceGroupFactory -import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer -import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager - -class ReadCacheConsumerManager(maxParallelismUsers: Int, persistenceManager: PersistenceManager) - extends ParallelConsumerManager(maxParallelismUsers) { - - override protected def createConsumer(groupName: String): FIFOUserConsumer = { - val group = getSchedulerContext.getOrCreateGroupFactory.getGroup(groupName) - if (groupName.endsWith(EntranceGroupFactory.CACHE)) { - logger.info("Create cache consumer with group: " + groupName) - new ReadCacheConsumer( - getSchedulerContext, - getOrCreateExecutorService, - group, - persistenceManager - ) - } else { - logger.info("Create normal consumer with group: " + groupName) - new FIFOUserConsumer(getSchedulerContext, getOrCreateExecutorService, group) - } - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala index aaaf131bd8..4e62430316 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala @@ -38,8 +38,8 @@ class JobTimeoutManager extends Logging { private[this] final val timeoutJobByName: ConcurrentMap[String, EntranceJob] = new ConcurrentHashMap[String, EntranceJob] - val timeoutCheck: Boolean = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue - val timeoutScanInterval: Int = EntranceConfiguration.TIMEOUT_SCAN_INTERVAL.getValue + private val timeoutCheck: Boolean = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue + private val timeoutScanInterval: Int = EntranceConfiguration.TIMEOUT_SCAN_INTERVAL.getValue def add(jobKey: String, job: EntranceJob): Unit = { logger.info(s"Adding timeout job: ${job.getId()}") @@ -77,75 +77,75 @@ class JobTimeoutManager extends Logging { } private def timeoutDetective(): Unit = { - if (timeoutCheck) { - def checkAndSwitch(job: EntranceJob): Unit = { - logger.info(s"Checking whether the job id ${job.getJobRequest.getId()} timed out. ") - val currentTimeSeconds = System.currentTimeMillis() / 1000 - // job.isWaiting == job in queue - val jobScheduleStartTimeSeconds = - if (job.isWaiting) job.createTime / 1000 else currentTimeSeconds - val queuingTimeSeconds = currentTimeSeconds - jobScheduleStartTimeSeconds - val jobRunningStartTimeSeconds = - if (job.getStartTime > 0) job.getStartTime / 1000 else currentTimeSeconds - val runningTimeSeconds = currentTimeSeconds - jobRunningStartTimeSeconds - if (!job.isCompleted) { - job.jobRequest.getLabels.asScala foreach { - case queueTimeOutLabel: JobQueuingTimeoutLabel => - if ( - job.isWaiting && queueTimeOutLabel.getQueuingTimeout > 0 && queuingTimeSeconds >= queueTimeOutLabel.getQueuingTimeout - ) { - logger.warn( - s"Job ${job.getJobRequest.getId()} queued time : ${queuingTimeSeconds} seconds, which was over queueTimeOut : ${queueTimeOutLabel.getQueuingTimeout} seconds, cancel it now! " - ) - job.onFailure( - s"Job queued ${queuingTimeSeconds} seconds over max queue time : ${queueTimeOutLabel.getQueuingTimeout} seconds.", - null - ) - } - case jobRunningTimeoutLabel: JobRunningTimeoutLabel => - if ( - job.isRunning && jobRunningTimeoutLabel.getRunningTimeout > 0 && runningTimeSeconds >= jobRunningTimeoutLabel.getRunningTimeout - ) { - logger.warn( - s"Job ${job.getJobRequest.getId()} run timeout ${runningTimeSeconds} seconds, which was over runTimeOut : ${jobRunningTimeoutLabel.getRunningTimeout} seconds, cancel it now! " - ) - job.onFailure( - s"Job run ${runningTimeSeconds} seconds over max run time : ${jobRunningTimeoutLabel.getRunningTimeout} seconds.", - null - ) - } - case _ => - } + def checkAndSwitch(job: EntranceJob): Unit = { + logger.info(s"Checking whether the job id ${job.getJobRequest.getId()} timed out. ") + val currentTimeSeconds = System.currentTimeMillis() / 1000 + // job.isWaiting == job in queue + val jobScheduleStartTimeSeconds = + if (job.isWaiting) job.createTime / 1000 else currentTimeSeconds + val queuingTimeSeconds = currentTimeSeconds - jobScheduleStartTimeSeconds + val jobRunningStartTimeSeconds = + if (job.getStartTime > 0) job.getStartTime / 1000 else currentTimeSeconds + val runningTimeSeconds = currentTimeSeconds - jobRunningStartTimeSeconds + if (!job.isCompleted) { + job.jobRequest.getLabels.asScala foreach { + case queueTimeOutLabel: JobQueuingTimeoutLabel => + if ( + job.isWaiting && queueTimeOutLabel.getQueuingTimeout > 0 && queuingTimeSeconds >= queueTimeOutLabel.getQueuingTimeout + ) { + logger.warn( + s"Job ${job.getJobRequest.getId()} queued time : ${queuingTimeSeconds} seconds, which was over queueTimeOut : ${queueTimeOutLabel.getQueuingTimeout} seconds, cancel it now! " + ) + job.onFailure( + s"Job queued ${queuingTimeSeconds} seconds over max queue time : ${queueTimeOutLabel.getQueuingTimeout} seconds.", + null + ) + } + case jobRunningTimeoutLabel: JobRunningTimeoutLabel => + if ( + job.isRunning && jobRunningTimeoutLabel.getRunningTimeout > 0 && runningTimeSeconds >= jobRunningTimeoutLabel.getRunningTimeout + ) { + logger.warn( + s"Job ${job.getJobRequest.getId()} run timeout ${runningTimeSeconds} seconds, which was over runTimeOut : ${jobRunningTimeoutLabel.getRunningTimeout} seconds, cancel it now! " + ) + job.onFailure( + s"Job run ${runningTimeSeconds} seconds over max run time : ${jobRunningTimeoutLabel.getRunningTimeout} seconds.", + null + ) + } + case _ => } } - - timeoutJobByName.asScala.foreach(item => { - logger.info(s"Running timeout detection!") - synchronized { - jobCompleteDelete(item._1) - if (jobExist(item._1)) checkAndSwitch(item._2) - } - }) } + + timeoutJobByName.asScala.foreach(item => { + logger.info(s"Running timeout detection!") + synchronized { + jobCompleteDelete(item._1) + if (jobExist(item._1)) checkAndSwitch(item._2) + } + }) } // Thread periodic scan timeout task - val woker = Utils.defaultScheduler.scheduleAtFixedRate( - new Runnable() { - - override def run(): Unit = { - Utils.tryCatch { - timeoutDetective() - } { case t: Throwable => - logger.error(s"TimeoutDetective task failed. ${t.getMessage}", t) + if (timeoutCheck) { + val woker = Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable() { + + override def run(): Unit = { + Utils.tryCatch { + timeoutDetective() + } { case t: Throwable => + logger.warn(s"TimeoutDetective task failed. ${t.getMessage}", t) + } } - } - }, - 0, - timeoutScanInterval, - TimeUnit.SECONDS - ) + }, + 0, + timeoutScanInterval, + TimeUnit.SECONDS + ) + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala new file mode 100644 index 0000000000..13dcefa9f9 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.utils + +import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.instance.label.client.InstanceLabelClient +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} +import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator +import org.apache.linkis.rpc.Sender + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters.asScalaBufferConverter + +object EntranceUtils extends Logging { + + private val SPLIT = "," + + private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory + + def getUserCreatorEcTypeKey( + userCreatorLabel: UserCreatorLabel, + engineTypeLabel: EngineTypeLabel + ): String = { + userCreatorLabel.getStringValue + SPLIT + engineTypeLabel.getStringValue + } + + def fromKeyGetLabels(key: String): (UserCreatorLabel, EngineTypeLabel) = { + if (StringUtils.isBlank(key)) (null, null) + else { + val labelStringValues = key.split(SPLIT) + if (labelStringValues.length < 2) return (null, null) + val userCreatorLabel = labelFactory + .createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY, labelStringValues(0)) + val engineTypeLabel = labelFactory + .createLabel[EngineTypeLabel](LabelKeyConstant.ENGINE_TYPE_KEY, labelStringValues(1)) + (userCreatorLabel, engineTypeLabel) + } + } + + def getDefaultCreatorECTypeKey(creator: String, ecType: String): String = { + val userCreatorLabel = + labelFactory.createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY) + val ecTypeLabel = EngineTypeLabelCreator.createEngineTypeLabel(ecType) + userCreatorLabel.setUser("*") + userCreatorLabel.setCreator(creator) + getUserCreatorEcTypeKey(userCreatorLabel, ecTypeLabel) + } + + def getRunningEntranceNumber(): Int = { + val entranceNum = Sender.getInstances(Sender.getThisServiceInstance.getApplicationName).length + val labelList = new util.ArrayList[Label[_]]() + val offlineRouteLabel = LabelBuilderFactoryContext.getLabelBuilderFactory + .createLabel[RouteLabel](LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE) + labelList.add(offlineRouteLabel) + var offlineIns: Array[ServiceInstance] = null + Utils.tryAndWarn { + offlineIns = InstanceLabelClient.getInstance + .getInstanceFromLabel(labelList) + .asScala + .filter(l => + null != l && l.getApplicationName + .equalsIgnoreCase(Sender.getThisServiceInstance.getApplicationName) + ) + .toArray + } + val entranceRealNumber = if (null != offlineIns) { + logger.info(s"There are ${offlineIns.length} offlining instance.") + entranceNum - offlineIns.length + } else { + entranceNum + } + /* + Sender.getInstances may get 0 instances due to cache in Sender. So this instance is the one instance. + */ + if (entranceRealNumber <= 0) { + logger.error( + s"Got ${entranceRealNumber} ${Sender.getThisServiceInstance.getApplicationName} instances." + ) + 1 + } else { + entranceRealNumber + } + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala index ec29128889..44e2357b34 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/JobHistoryHelper.scala @@ -69,6 +69,11 @@ object JobHistoryHelper extends Logging { else task.getStatus } + def getProgressByTaskID(taskID: Long): String = { + val task = getTaskByTaskID(taskID) + if (task == null) "0" else task.getProgress + } + def getRequestIpAddr(req: HttpServletRequest): String = { val addrList = List( Option(req.getHeader("x-forwarded-for")).getOrElse("").split(",")(0), @@ -123,7 +128,144 @@ object JobHistoryHelper extends Logging { sender.ask(jobReqBatchUpdate) } - private def getTaskByTaskID(taskID: Long): JobRequest = { + /** + * Get all consume queue task and batch update instances(获取所有消费队列中的任务进行批量更新) + * + * @param taskIdList + * @param retryWhenUpdateFail + */ + def updateAllConsumeQueueTask( + taskIdList: util.List[Long], + retryWhenUpdateFail: Boolean = false + ): Unit = { + + if (taskIdList.isEmpty) return + + val updateTaskIds = new util.ArrayList[Long]() + + if ( + EntranceConfiguration.ENTRANCE_UPDATE_BATCH_SIZE.getValue > 0 && + taskIdList.size() > EntranceConfiguration.ENTRANCE_UPDATE_BATCH_SIZE.getValue + ) { + for (i <- 0 until EntranceConfiguration.ENTRANCE_UPDATE_BATCH_SIZE.getValue) { + updateTaskIds.add(taskIdList.get(i)) + } + } else { + updateTaskIds.addAll(taskIdList) + } + val list = new util.ArrayList[Long]() + list.addAll(taskIdList) + try { + val successTaskIds = updateBatchInstancesEmpty(updateTaskIds) + if (retryWhenUpdateFail) { + list.removeAll(successTaskIds) + } else { + list.removeAll(updateTaskIds) + } + } catch { + case e: Exception => + logger.warn("update batch instances failed, wait for retry", e) + Thread.sleep(1000) + } + updateAllConsumeQueueTask(list, retryWhenUpdateFail) + + } + + /** + * Batch update instances(批量更新instances字段) + * + * @param taskIdList + * @return + */ + def updateBatchInstancesEmpty(taskIdList: util.List[Long]): util.List[Long] = { + val jobReqList = new util.ArrayList[JobRequest]() + taskIdList.asScala.foreach(taskID => { + val jobRequest = new JobRequest + jobRequest.setId(taskID) + jobRequest.setInstances("") + jobReqList.add(jobRequest) + }) + val jobReqBatchUpdate = JobReqBatchUpdate(jobReqList) + Utils.tryCatch { + val response = sender.ask(jobReqBatchUpdate) + response match { + case resp: util.List[JobRespProtocol] => + // todo filter success data, rpc have bug +// resp.asScala +// .filter(r => +// r.getStatus == SUCCESS_FLAG && r.getData.containsKey(JobRequestConstants.JOB_ID) +// ) +// .map(_.getData.get(JobRequestConstants.JOB_ID).asInstanceOf[java.lang.Long]) +// .toList + + taskIdList + case _ => + throw JobHistoryFailedException( + "update batch instances from jobhistory not a correct List type" + ) + } + } { + case errorException: ErrorException => throw errorException + case e: Exception => + val e1 = + JobHistoryFailedException( + s"update batch instances ${taskIdList.asScala.mkString(",")} error" + ) + e1.initCause(e) + throw e + } + } + + /** + * query wait for failover task(获取待故障转移的任务) + * + * @param reqMap + * @param statusList + * @param startTimestamp + * @param limit + * @return + */ + def queryWaitForFailoverTask( + reqMap: util.Map[String, java.lang.Long], + statusList: util.List[String], + startTimestamp: Long, + limit: Int + ): util.List[JobRequest] = { + val requestFailoverJob = RequestFailoverJob(reqMap, statusList, startTimestamp, limit) + val tasks = Utils.tryCatch { + val response = sender.ask(requestFailoverJob) + response match { + case responsePersist: JobRespProtocol => + val status = responsePersist.getStatus + if (status != SUCCESS_FLAG) { + logger.error(s"query from jobHistory status failed, status is $status") + throw JobHistoryFailedException("query from jobHistory status failed") + } + val data = responsePersist.getData + data.get(JobRequestConstants.JOB_HISTORY_LIST) match { + case tasks: List[JobRequest] => + tasks.asJava + case _ => + throw JobHistoryFailedException( + s"query from jobhistory not a correct List type, instances ${reqMap.keySet()}" + ) + } + case _ => + logger.error("get query response incorrectly") + throw JobHistoryFailedException("get query response incorrectly") + } + } { + case errorException: ErrorException => throw errorException + case e: Exception => + val e1 = + JobHistoryFailedException(s"query failover task error, instances ${reqMap.keySet()} ") + e1.initCause(e) + throw e + } + tasks + } + + def getTaskByTaskID(taskID: Long): JobRequest = { val jobRequest = new JobRequest jobRequest.setId(taskID) jobRequest.setSource(null) @@ -176,15 +318,15 @@ object JobHistoryHelper extends Logging { val ecResourceMap = if (resourceInfo == null) new util.HashMap[String, ResourceWithStatus] else resourceInfo if (resourceMap != null) { - resourceMap.asInstanceOf[util.HashMap[String, ResourceWithStatus]].putAll(ecResourceMap) + resourceMap.asInstanceOf[util.Map[String, ResourceWithStatus]].putAll(ecResourceMap) } else { metricsMap.put(TaskConstant.JOB_YARNRESOURCE, ecResourceMap) } - var engineInstanceMap: util.HashMap[String, AnyRef] = null + var engineInstanceMap: util.Map[String, AnyRef] = null if (metricsMap.containsKey(TaskConstant.JOB_ENGINECONN_MAP)) { engineInstanceMap = metricsMap .get(TaskConstant.JOB_ENGINECONN_MAP) - .asInstanceOf[util.HashMap[String, AnyRef]] + .asInstanceOf[util.Map[String, AnyRef]] } else { engineInstanceMap = new util.HashMap[String, AnyRef]() metricsMap.put(TaskConstant.JOB_ENGINECONN_MAP, engineInstanceMap) @@ -194,7 +336,7 @@ object JobHistoryHelper extends Logging { val ticketId = infoMap.get(TaskConstant.TICKET_ID).asInstanceOf[String] val engineExtraInfoMap = engineInstanceMap .getOrDefault(ticketId, new util.HashMap[String, AnyRef]) - .asInstanceOf[util.HashMap[String, AnyRef]] + .asInstanceOf[util.Map[String, AnyRef]] engineExtraInfoMap.putAll(infoMap) engineInstanceMap.put(ticketId, engineExtraInfoMap) } else { diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java new file mode 100644 index 0000000000..c5efb5633e --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/SQLExplainTest.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class SQLExplainTest { + + @Test + void isSelectCmdNoLimit() { + + String code = "SELECT * from dual WHERE (1=1)LIMIT 1;"; + boolean res = SQLExplain.isSelectCmdNoLimit(code); + Assertions.assertEquals(false, res); + + code = "SELECT * from dual"; + res = SQLExplain.isSelectCmdNoLimit(code); + Assertions.assertEquals(true, res); + + code = "SELECT * from dual LIMIT 1;"; + res = SQLExplain.isSelectCmdNoLimit(code); + Assertions.assertEquals(false, res); + } + + @Test + void isSelectOverLimit() { + String code = "SELECT * from dual WHERE (1=1)LIMIT 5001;"; + boolean res = SQLExplain.isSelectOverLimit(code); + Assertions.assertEquals(true, res); + + code = "SELECT * from dual"; + res = SQLExplain.isSelectOverLimit(code); + Assertions.assertEquals(false, res); + + code = "SELECT * from dual LIMIT 4000;"; + res = SQLExplain.isSelectOverLimit(code); + Assertions.assertEquals(false, res); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java new file mode 100644 index 0000000000..c965529b57 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtilsTest.java @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class TemplateConfUtilsTest { + + @Test + void getCustomTemplateConfName() { + String sqlCode = + "" + + "--注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + String res = TemplateConfUtils.getCustomTemplateConfName(sqlCode, "sql"); + Assertions.assertEquals(res, ""); + + String sqlCode2 = + "" + + "---@set 123=注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode2, "sql"); + Assertions.assertEquals(res, ""); + + String sqlCode3 = + "" + + "---@set ec.resource.name=345\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + "---@set ec.resource.name=456\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode3, "sql"); + Assertions.assertEquals(res, "345"); + + String sqlCode4 = + "" + + "---@set ec.resource.name= name1 \n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode4, "sql"); + Assertions.assertEquals(res, "name1"); + + String sqlCode5 = + "" + + "##@set ec.resource.name=pyname1\n" + + "select * from table;\n" + + " --注解 \n" + + "#注解\n" + + "##@set ec.resource.name= 123 \n" + + " select \"--注解\" as test\n" + + "#@set yy=123\n" + + " #注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode5, "python"); + Assertions.assertEquals(res, "pyname1"); + + String sqlCode6 = + "" + + "///@set ec.resource.name= scalaname1 \n" + + " select \"//注解\" as test\n" + + "//@set yy=123\n" + + " #注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode6, "scala"); + Assertions.assertEquals(res, "scalaname1"); + + String sqlCode7 = + "" + + "---@set ec.resource.name= hqlname1 \n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode7, "hql"); + Assertions.assertEquals(res, "hqlname1"); + + String sqlCode8 = + "---@set ec.resource.name=linkis_test2;\n" + + " ---@set ec.resource.name=scriptis_test hive;\n" + + " select * from dss autotest.demo data limit 100;"; + res = TemplateConfUtils.getCustomTemplateConfName(sqlCode8, "hql"); + Assertions.assertEquals(res, "linkis_test2"); + } + + @Test + void getCustomTemplateConfName2() { + + String sqlCode9 = + "---@set ec.resource.name=linkis_test2;\r\n---@set ec.resource.name=scriptis_test_hive;\r\n--@set limitn=100\r\nselect * from dss_autotest.demo_data limit ${limitn};\r\n"; + + String res = TemplateConfUtils.getCustomTemplateConfName(sqlCode9, "hql"); + Assertions.assertEquals(res, "linkis_test2"); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java new file mode 100644 index 0000000000..622d06c4e8 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import java.util.Arrays; + +import org.junit.jupiter.api.Test; +import org.junit.platform.commons.util.StringUtils; + +public class TestCommentHelper { + String sqlCode = + "" + + "--注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + String scalaCode = + "" + + "// 注解\n" + + "print(1+1)\n" + + "//@set yy=123\n" + + " print(2)\n" + + " // 注解 \n" + + "// test\n" + + "print(\"//注解测试\")"; + + String scalaCodeRes = "print(1+1)\n" + "print(2)\n" + "print(\"//注解测试\")"; + + @Test + void sqlDealCommentTest() { + String code = SQLCommentHelper.dealComment(sqlCode); + // System.out.println(code); + } + + @Test + void scalaDealCommentTest() { + String code = ScalaCommentHelper.dealComment(scalaCode); + String[] lines = + Arrays.stream(code.split("\n")) + .map(String::trim) + .filter(x -> StringUtils.isNotBlank(x)) + .toArray(String[]::new); + String result = String.join("\n", lines); + // assertEquals(result,scalaCodeRes); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java new file mode 100644 index 0000000000..fabff88473 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/java/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import org.apache.linkis.entrance.log.Cache; +import org.apache.linkis.entrance.log.HDFSCacheLogWriter; + +import org.apache.commons.lang3.StringUtils; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.UUID; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import oshi.util.FileUtil; + +class TestHDFSCacheLogWriter { + + @Test + void write() throws IOException { + + Cache cache = new Cache(5); + String fileName = UUID.randomUUID().toString().replace("-", "") + "-test.log"; + String logPath = System.getProperty("java.io.tmpdir") + File.separator + fileName; + System.out.println(logPath); + String chartSet = "utf-8"; + String username = System.getProperty("user.name"); + + File file = new File(logPath); + file.createNewFile(); + + HDFSCacheLogWriter logWriter = + new HDFSCacheLogWriter( + // "D:\\DataSphere\\linkis\\docs\\test.log", + logPath, chartSet, cache, username); + + String[] msgArr = + new String[] { + "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", + "17", "18" + }; + + List msgList = new ArrayList(Arrays.asList(msgArr)); + String msg = String.join("\n", msgList); + + logWriter.write(msg); + logWriter.flush(); + + List list = FileUtil.readFile(logPath); + String res = String.join("\n", list); + + res = res.replace("\n\n", "\n"); + res = StringUtils.strip(res, " \n"); + Assertions.assertEquals(res, msg); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/conf/LabelCommonConfig.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/conf/LabelCommonConfig.java index 89169eb58f..16a23c773b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/conf/LabelCommonConfig.java +++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/conf/LabelCommonConfig.java @@ -64,7 +64,7 @@ public class LabelCommonConfig { CommonVars.apply("wds.linkis.appconn.engine.version", "1"); public static final CommonVars FLINK_ENGINE_VERSION = - CommonVars.apply("wds.linkis.flink.engine.version", "1.12.2"); + CommonVars.apply("wds.linkis.flink.engine.version", "1.16.2"); public static final CommonVars SQOOP_ENGINE_VERSION = CommonVars.apply("wds.linkis.sqoop.engine.version", "1.4.6"); diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/constant/LabelKeyConstant.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/constant/LabelKeyConstant.java index 8021b35851..fbaea61811 100644 --- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/constant/LabelKeyConstant.java +++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/constant/LabelKeyConstant.java @@ -66,5 +66,9 @@ public class LabelKeyConstant { public static final String ENGINGE_CONN_RUNTIME_MODE_KEY = "engingeConnRuntimeMode"; + public static final String TEMPLATE_CONF_KEY = "ec.conf.templateId"; + + public static final String TEMPLATE_CONF_NAME_KEY = "ec.resource.name"; + public static final String MANAGER_KEY = "manager"; } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineAskAsyncResponse.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineAskAsyncResponse.java index 364af7fd0a..86fc06698f 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineAskAsyncResponse.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineAskAsyncResponse.java @@ -45,4 +45,15 @@ public ServiceInstance getManagerInstance() { public void setManagerInstance(ServiceInstance managerInstance) { this.managerInstance = managerInstance; } + + @Override + public String toString() { + return "EngineAskAsyncResponse{" + + "id='" + + id + + '\'' + + ", managerInstance=" + + managerInstance + + '}'; + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnReleaseRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnReleaseRequest.java index 31e269d490..896397033e 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnReleaseRequest.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnReleaseRequest.java @@ -82,4 +82,23 @@ public NodeStatus getNodeStatus() { public void setNodeStatus(NodeStatus nodeStatus) { this.nodeStatus = nodeStatus; } + + @Override + public String toString() { + return "EngineConnReleaseRequest{" + + "serviceInstance=" + + serviceInstance + + ", user='" + + user + + '\'' + + ", msg='" + + msg + + '\'' + + ", nodeStatus=" + + nodeStatus + + ", ticketId='" + + ticketId + + '\'' + + '}'; + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallback.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallback.java index d162257cab..ea0dca16ec 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallback.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineConnStatusCallback.java @@ -52,4 +52,17 @@ public NodeStatus getStatus() { public String getInitErrorMsg() { return initErrorMsg; } + + @Override + public String toString() { + return "EngineConnStatusCallback{" + + "serviceInstance=" + + serviceInstance + + ", ticketId='" + + ticketId + + '\'' + + ", status=" + + status + + '}'; + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateError.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateError.java index f9c55b1011..0951b5b276 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateError.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateError.java @@ -58,4 +58,9 @@ public Boolean getRetry() { public void setRetry(Boolean retry) { this.retry = retry; } + + @Override + public String toString() { + return "EngineCreateError{" + "id='" + id + '\'' + ", retry=" + retry + '}'; + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateSuccess.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateSuccess.java index 37ae583f88..1b7496c720 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateSuccess.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineCreateSuccess.java @@ -45,4 +45,9 @@ public EngineNode getEngineNode() { public void setEngineNode(EngineNode engineNode) { this.engineNode = engineNode; } + + @Override + public String toString() { + return "EngineCreateSuccess{" + "id='" + id + '\'' + ", engineNode=" + engineNode + '}'; + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineInfoClearRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineInfoClearRequest.java index 1820fe8c94..36aa697284 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineInfoClearRequest.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineInfoClearRequest.java @@ -43,4 +43,9 @@ public void setUser(String user) { public String getUser() { return user; } + + @Override + public String toString() { + return "EngineInfoClearRequest{" + "engineNode=" + engineNode + ", user='" + user + '\'' + '}'; + } } diff --git a/linkis-dist/bin/checkEnv.sh b/linkis-dist/bin/checkEnv.sh index 68682a2338..049b2e1171 100644 --- a/linkis-dist/bin/checkEnv.sh +++ b/linkis-dist/bin/checkEnv.sh @@ -13,13 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # - +source ~/.bash_profile shellDir=`dirname $0` workDir=`cd ${shellDir}/..;pwd` source ${workDir}/bin/common.sh source ${workDir}/deploy-config/linkis-env.sh source ${workDir}/deploy-config/db.sh - +source ~/.bash_profile say() { printf 'check command fail \n %s\n' "$1" } @@ -169,12 +169,6 @@ echo "check hdfs" need_cmd hdfs echo "check shell" need_cmd $SHELL -echo "check spark-submit" -need_cmd spark-submit -echo "check spark-shell" -need_cmd spark-shell -echo "check spark-sql" -need_cmd spark-sql echo "check hadoop" need_cmd hadoop @@ -187,6 +181,12 @@ checkPythonAndJava checkMysql if [ "$ENABLE_SPARK" == "true" ]; then + echo "check spark-submit" + need_cmd spark-submit + echo "check spark-shell" + need_cmd spark-shell + echo "check spark-sql" + need_cmd spark-sql checkSpark fi @@ -203,8 +203,10 @@ echo -e "\n<-----End to check service status---->" # --- check Service Port echo -e "\n3. <-----Start to check service Port---->" -SERVER_PORT=$EUREKA_PORT -check_service_port +if [ "$DISCOVERY" == "EUREKA" ]; then + SERVER_PORT=$EUREKA_PORT + check_service_port +fi SERVER_PORT=$GATEWAY_PORT check_service_port @@ -227,4 +229,4 @@ if [ "$portIsOccupy" = true ];then exit 1 fi -echo "\n <-----End to check service Port---->" \ No newline at end of file +echo -e "\n<-----End to check service Port---->" diff --git a/linkis-dist/bin/install.sh b/linkis-dist/bin/install.sh index d7e52128ba..24cada6657 100644 --- a/linkis-dist/bin/install.sh +++ b/linkis-dist/bin/install.sh @@ -110,6 +110,15 @@ else isSuccess "cp ${LINKIS_PACKAGE} to $LINKIS_HOME" fi +if [ "$DISCOVERY" == "NACOS" ]; then + rm -rf $LINKIS_HOME/conf/application-* + mv $LINKIS_HOME/conf/nacos/* $LINKIS_HOME/conf + echo "Use NACOS mode" +else + echo "Use EUREKA mode" +fi +rm -rf $LINKIS_HOME/conf/nacos + cp ${LINKIS_CONFIG_PATH} $LINKIS_HOME/conf cp ${LINKIS_DB_CONFIG_PATH} $LINKIS_HOME/conf @@ -127,6 +136,7 @@ RANDOM_LINKISCLI_TOKEN="LINKISCLI-`cat /proc/sys/kernel/random/uuid | awk -F- '{ if [ $DEBUG_MODE != "true" ];then sed -i ${txt} "s#BML-AUTH#$RANDOM_BML_TOKEN#g" $LINKIS_HOME/conf/linkis-cli/linkis-cli.properties sed -i ${txt} "s#BML-AUTH#$RANDOM_BML_TOKEN#g" $common_conf + sed -i ${txt} "s#BML-AUTH#$RANDOM_BML_TOKEN#g" $LINKIS_HOME/admin/configuration_helper.sh sed -i ${txt} "s#LINKIS_CLI_TEST#$RANDOM_LINKIS_CLI_TEST_TOKEN#g" $common_conf sed -i ${txt} "s#WS-AUTH#$RANDOM_WS_TOKEN#g" $common_conf sed -i ${txt} "s#DSM-AUTH#$RANDOM_DSM_TOKEN#g" $common_conf @@ -406,34 +416,50 @@ fi currentTime=`date +%Y%m%d%H%M%S` -##eureka -sed -i ${txt} "s#defaultZone:.*#defaultZone: $EUREKA_URL#g" $LINKIS_HOME/conf/application-eureka.yml -sed -i ${txt} "s#port:.*#port: $EUREKA_PORT#g" $LINKIS_HOME/conf/application-eureka.yml sed -i ${txt} "s#linkis.app.version:.*#linkis.app.version: $LINKIS_VERSION-$currentTime#g" $LINKIS_HOME/conf/application-eureka.yml - -##server application.yml -sed -i ${txt} "s#defaultZone:.*#defaultZone: $EUREKA_URL#g" $LINKIS_HOME/conf/application-linkis.yml sed -i ${txt} "s#linkis.app.version:.*#linkis.app.version: $LINKIS_VERSION-$currentTime#g" $LINKIS_HOME/conf/application-linkis.yml - -sed -i ${txt} "s#defaultZone:.*#defaultZone: $EUREKA_URL#g" $LINKIS_HOME/conf/application-engineconn.yml sed -i ${txt} "s#linkis.app.version:.*#linkis.app.version: $LINKIS_VERSION-$currentTime#g" $LINKIS_HOME/conf/application-engineconn.yml -if [ "$EUREKA_PREFER_IP" == "true" ]; then - sed -i ${txt} "s/# prefer-ip-address:/prefer-ip-address:/g" $LINKIS_HOME/conf/application-eureka.yml +sed -i ${txt} "s#DISCOVERY=.*#DISCOVERY=$DISCOVERY#g" $LINKIS_HOME/sbin/common.sh + +if [ "$DISCOVERY" == "EUREKA" ]; then + ##eureka + sed -i ${txt} "s#defaultZone:.*#defaultZone: $EUREKA_URL#g" $LINKIS_HOME/conf/application-eureka.yml + sed -i ${txt} "s#port:.*#port: $EUREKA_PORT#g" $LINKIS_HOME/conf/application-eureka.yml + sed -i ${txt} "s#linkis.app.version:.*#linkis.app.version: $LINKIS_VERSION-$currentTime#g" $LINKIS_HOME/conf/application-eureka.yml + + ##server application.yml + sed -i ${txt} "s#defaultZone:.*#defaultZone: $EUREKA_URL#g" $LINKIS_HOME/conf/application-linkis.yml - sed -i ${txt} "s/# prefer-ip-address:/prefer-ip-address:/g" $LINKIS_HOME/conf/application-linkis.yml - sed -i ${txt} "s/# instance-id:/instance-id:/g" $LINKIS_HOME/conf/application-linkis.yml + sed -i ${txt} "s#defaultZone:.*#defaultZone: $EUREKA_URL#g" $LINKIS_HOME/conf/application-engineconn.yml - sed -i ${txt} "s/# prefer-ip-address:/prefer-ip-address:/g" $LINKIS_HOME/conf/application-engineconn.yml - sed -i ${txt} "s/# instance-id:/instance-id:/g" $LINKIS_HOME/conf/application-linkis.yml + if [ "$EUREKA_PREFER_IP" == "true" ]; then + sed -i ${txt} "s/# prefer-ip-address:/prefer-ip-address:/g" $LINKIS_HOME/conf/application-eureka.yml + + sed -i ${txt} "s/# prefer-ip-address:/prefer-ip-address:/g" $LINKIS_HOME/conf/application-linkis.yml + sed -i ${txt} "s/# instance-id:/instance-id:/g" $LINKIS_HOME/conf/application-linkis.yml + + sed -i ${txt} "s/# prefer-ip-address:/prefer-ip-address:/g" $LINKIS_HOME/conf/application-engineconn.yml + sed -i ${txt} "s/# instance-id:/instance-id:/g" $LINKIS_HOME/conf/application-linkis.yml + + sed -i ${txt} "s#linkis.discovery.prefer-ip-address.*#linkis.discovery.prefer-ip-address=true#g" $common_conf + fi + export DISCOVERY_SERVER_ADDRES=$EUREKA_INSTALL_IP:$EUREKA_POR +fi + +if [ "$DISCOVERY" == "NACOS" ]; then + sed -i ${txt} "s#server-addr:.*#server-addr: $NACOS_SERVER_ADDR#g" $LINKIS_HOME/conf/application-linkis.yml + sed -i ${txt} "s#server-addr:.*#server-addr: $NACOS_SERVER_ADDR#g" $LINKIS_HOME/conf/application-engineconn.yml sed -i ${txt} "s#linkis.discovery.prefer-ip-address.*#linkis.discovery.prefer-ip-address=true#g" $common_conf + + export DISCOVERY_SERVER_ADDRES=$NACOS_SERVER_ADDR fi echo "update conf $common_conf" sed -i ${txt} "s#wds.linkis.server.version.*#wds.linkis.server.version=$LINKIS_SERVER_VERSION#g" $common_conf sed -i ${txt} "s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT#g" $common_conf -sed -i ${txt} "s#linkis.discovery.server-address.*#linkis.discovery.server-address=http://$EUREKA_INSTALL_IP:$EUREKA_PORT#g" $common_conf +sed -i ${txt} "s#linkis.discovery.server-address.*#linkis.discovery.server-address=http://$DISCOVERY_SERVER_ADDRES#g" $common_conf if [[ 'postgresql' = "$dbType" ]];then sed -i ${txt} "s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:postgresql://${PG_HOST}:${PG_PORT}/${PG_DB}?currentSchema=${PG_SCHEMA}\&stringtype=unspecified#g" $common_conf sed -i ${txt} "s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$PG_USER#g" $common_conf @@ -532,6 +558,7 @@ sed -i ${txt} "s#spring.eureka.instance.metadata-map.linkis.conf.version.*#spri if [ "$RESULT_SET_ROOT_PATH" != "" ] then sed -i ${txt} "s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g" $entrance_conf + sed -i ${txt} "s#resultSetRootDir=.*#resultSetRootDir=$RESULT_SET_ROOT_PATH#g" $LINKIS_HOME/admin/linkis_task_res_log_clear.sh fi publicservice_conf=$LINKIS_HOME/conf/linkis-ps-publicservice.properties diff --git a/linkis-dist/deploy-config/linkis-env.sh b/linkis-dist/deploy-config/linkis-env.sh index f37d0720a8..fed49826b5 100644 --- a/linkis-dist/deploy-config/linkis-env.sh +++ b/linkis-dist/deploy-config/linkis-env.sh @@ -111,6 +111,9 @@ SPARK_CONF_DIR=/appcom/config/spark-config # Linkis in a distributed manner and set the following microservice parameters # +### DISCOVERY +DISCOVERY=EUREKA + ### EUREKA install information ### You can access it in your browser at the address below:http://${EUREKA_INSTALL_IP}:${EUREKA_PORT} #EUREKA: Microservices Service Registration Discovery Center @@ -119,6 +122,10 @@ EUREKA_PORT=20303 export EUREKA_PREFER_IP=false #EUREKA_HEAP_SIZE="512M" +### NACOS install information +### NACOS +NACOS_SERVER_ADDR=127.0.0.1:8848 + ##linkis-mg-gateway #GATEWAY_INSTALL_IP=127.0.0.1 GATEWAY_PORT=9001 diff --git a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml index 6d26ae863a..85050d4d21 100644 --- a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml +++ b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml @@ -26,6 +26,8 @@ data: DROP TABLE IF EXISTS `linkis_ps_configuration_key_engine_relation`; DROP TABLE IF EXISTS `linkis_ps_configuration_config_value`; DROP TABLE IF EXISTS `linkis_ps_configuration_category`; + DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; + DROP TABLE IF EXISTS `linkis_ps_configutation_lm_across_cluster_rule`; DROP TABLE IF EXISTS `linkis_ps_job_history_group_history`; DROP TABLE IF EXISTS `linkis_ps_job_history_detail`; DROP TABLE IF EXISTS `linkis_ps_common_lock`; @@ -84,19 +86,23 @@ data: DROP TABLE IF EXISTS `linkis_mg_gateway_auth_token`; {{- end }} - CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_config_key`( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', - `description` varchar(200) DEFAULT NULL, - `name` varchar(50) DEFAULT NULL, - `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', - `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', - `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', - `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', - `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', - `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', - `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', - `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + CREATE TABLE `linkis_ps_configuration_config_key`( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', + `description` varchar(200) DEFAULT NULL, + `name` varchar(50) DEFAULT NULL, + `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', + `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', + `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', + `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', + `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', + `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', + `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', + `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; @@ -132,7 +138,37 @@ data: PRIMARY KEY (`id`), UNIQUE INDEX(`label_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; - + CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_key_limit_for_user` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `user_name` VARCHAR(50) NOT NULL COMMENT 'username', + `combined_label_value` VARCHAR(128) NOT NULL COMMENT 'Combined label combined_userCreator_engineType such as hadoop-IDE,spark-2.4.3', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT 'configuration value', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'upper limit value', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Lower limit value (reserved)', + `latest_update_template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid template id recorded by the third party', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT 'Is it valid? Reserved Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT 'Creator', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Update by', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) + )ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + CREATE TABLE IF NOT EXISTS linkis_ps_configutation_lm_across_cluster_rule ( + id INT AUTO_INCREMENT COMMENT 'Rule ID, auto-increment primary key', + cluster_name char(32) NOT NULL COMMENT 'Cluster name, cannot be empty', + creator char(32) NOT NULL COMMENT 'Creator, cannot be empty', + username char(32) NOT NULL COMMENT 'User, cannot be empty', + create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Creation time, cannot be empty', + create_by char(32) NOT NULL COMMENT 'Creator, cannot be empty', + update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Modification time, cannot be empty', + update_by char(32) NOT NULL COMMENT 'Updater, cannot be empty', + rules varchar(256) NOT NULL COMMENT 'Rule content, cannot be empty', + is_valid VARCHAR(2) DEFAULT 'N' COMMENT 'Is it valid Y/N', + PRIMARY KEY (id), + UNIQUE KEY idx_creator_username (creator, username) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- -- New linkis job -- @@ -146,7 +182,7 @@ data: `labels` text DEFAULT NULL COMMENT 'job labels', `params` text DEFAULT NULL COMMENT 'job params', `progress` varchar(32) DEFAULT NULL COMMENT 'Job execution progress', - `status` varchar(50) DEFAULT NULL COMMENT 'Script execution status, must be one of the following: Inited, WaitForRetry, Scheduled, Running, Succeed, Failed, Cancelled, Timeout', + `status` varchar(50) DEFAULT NULL, `log_path` varchar(200) DEFAULT NULL COMMENT 'File path of the job log', `error_code` int DEFAULT NULL COMMENT 'Error code. Generated when the execution of the script fails', `error_desc` varchar(1000) DEFAULT NULL COMMENT 'Execution description. Generated when the execution of script fails', @@ -180,6 +216,7 @@ data: CREATE TABLE IF NOT EXISTS `linkis_ps_common_lock` ( `id` int(11) NOT NULL AUTO_INCREMENT, `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `locker` varchar(255) COLLATE utf8_bin NOT NULL, `time_out` longtext COLLATE utf8_bin, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, @@ -195,6 +232,8 @@ data: CREATE TABLE IF NOT EXISTS `linkis_ps_udf_manager` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -207,6 +246,8 @@ data: `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `shared_group` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -214,7 +255,9 @@ data: ( `id` bigint(20) PRIMARY KEY NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, - `user_name` varchar(50) NOT NULL + `user_name` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- @@ -229,7 +272,8 @@ data: `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `category` varchar(50) DEFAULT NULL COMMENT 'Used to distinguish between udf and function', - PRIMARY KEY (`id`) + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_parent_name_uname_category` (`parent`,`name`,`user_name`,`category`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -241,7 +285,10 @@ data: `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `user_name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; CREATE TABLE IF NOT EXISTS `linkis_ps_udf_baseinfo` ( @@ -271,6 +318,7 @@ data: `use_format` varchar(255) DEFAULT NULL, `description` varchar(255) NOT NULL COMMENT 'version desc', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `md5` varchar(100) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -503,7 +551,8 @@ data: `max_version` int(20) DEFAULT 10 COMMENT 'The default is 10, which means to keep the latest 10 versions', `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Updated time', `updator` varchar(50) DEFAULT NULL COMMENT 'updator', - `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', + `enable_flag` tinyint(1) NOT NULL DEFAULT '1' , + unique key `uniq_rid_eflag`(`resource_id`, `enable_flag`), PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4; @@ -522,7 +571,7 @@ data: `end_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Stoped time', `client_ip` varchar(200) NOT NULL COMMENT 'Client ip', `updator` varchar(50) DEFAULT NULL COMMENT 'updator', - `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', + `enable_flag` tinyint(1) NOT NULL DEFAULT '1', unique key `resource_id_version`(`resource_id`, `version`), PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; @@ -565,7 +614,7 @@ data: `operation` varchar(20) NOT NULL COMMENT 'Operation type. upload = 0, update = 1', `state` varchar(20) NOT NULL DEFAULT 'Schduled' COMMENT 'Current status of the task:Schduled, Running, Succeed, Failed,Cancelled', `submit_user` varchar(20) NOT NULL DEFAULT '' COMMENT 'Job submission user name', - `system` varchar(20) DEFAULT 'dss' COMMENT 'Subsystem name: wtss', + `system` varchar(20) DEFAULT 'dss', `instance` varchar(128) NOT NULL COMMENT 'Material library example', `client_ip` varchar(50) DEFAULT NULL COMMENT 'Request IP', `extra_params` text COMMENT 'Additional key information. Such as the resource IDs and versions that are deleted in batches, and all versions under the resource are deleted', @@ -1008,12 +1057,6 @@ data: INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'spark引擎设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); -- hive INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','1g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive'); @@ -1231,8 +1274,9 @@ data: INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01101','ECM资源不足,请联系管理员扩容','ECM resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,请联系管理员扩容','ECM memory resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01103','ECM CPU资源不足,请联系管理员扩容','ECM CPU resources are insufficient',0); - INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01004','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); - INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01005','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01104','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01105','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12001','队列CPU资源不足,可以调整Spark执行器个数','Queue CPU resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12002','队列内存资源不足,可以调整Spark执行器个数','Insufficient queue memory',0); @@ -1256,8 +1300,11 @@ data: INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13005','Spark app应用退出,可能是复杂任务导致','Spark application has already stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','Spark context退出,可能是复杂任务导致','Spark application sc has already stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','Pyspark子进程意外退出,可能是复杂任务导致','Pyspark process has stopped',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13008','任务产生的序列化结果总大小超过了配置的spark.driver.maxResultSize限制。请检查您的任务,看看是否有可能减小任务产生的结果大小,或则可以考虑压缩或合并结果,以减少传输的数据量','is bigger than spark.driver.maxResultSize',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13009','您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败','ERROR EC exits unexpectedly and actively kills the task',0); + -- 21 cluster Authority 22 db Authority - INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue (\\S+)',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue ([A-Za-z._0-9]+)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21002','创建Python解释器失败,请联系管理员','initialize python executor failed',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21003','创建单机Python解释器失败,请联系管理员','PythonSession process cannot be initialized',0); diff --git a/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml b/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml index e6f419744c..6437188298 100644 --- a/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml +++ b/linkis-dist/helm/charts/linkis/templates/configmap-linkis-config.yaml @@ -241,8 +241,8 @@ data: wds.linkis.gateway.conf.enable.token.auth=true wds.linkis.is.gateway=true wds.linkis.server.mybatis.mapperLocations=classpath*:mapper/common/*.xml,classpath*:mapper/mysql/*.xml - wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.instance.label.entity - wds.linkis.server.mybatis.BasePackage=org.apache.linkis.instance.label.dao,org.apache.linkis.gateway.authentication.dao + wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.instance.label.entity,org.apache.linkis.jobhistory.entity + wds.linkis.server.mybatis.BasePackage=org.apache.linkis.instance.label.dao,org.apache.linkis.gateway.authentication.dao,org.apache.linkis.jobhistory.dao wds.linkis.label.entity.packages=org.apache.linkis.gateway.ujes.route.label wds.linkis.login_encrypt.enable=false ##LDAP:q @@ -317,6 +317,11 @@ data: wds.linkis.resultSet.store.path=hdfs://{{ .Values.linkis.locations.runtimeDir }} {{- end }} + ##mybatis + wds.linkis.server.mybatis.mapperLocations=classpath*:mapper/common/*.xml,classpath*:mapper/mysql/*.xml + wds.linkis.server.mybatis.BasePackage=org.apache.linkis.publicservice.common.lock.dao + wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.publicservice.common.lock.entity + ##Spring spring.server.port={{ .Values.cgEntrance.port }} diff --git a/linkis-dist/package/admin/clear_ec_record.sh b/linkis-dist/package/admin/clear_ec_record.sh new file mode 100644 index 0000000000..5cd1525263 --- /dev/null +++ b/linkis-dist/package/admin/clear_ec_record.sh @@ -0,0 +1,51 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# description: clear linkis_ps_job_history_group_history 10 days ago record +# +if [ -f ${LINKIS_CONF_DIR}/db.sh ] +then + export LINKIS_DB_CONFIG_PATH=${LINKIS_CONF_DIR}/db.sh +else + if [ -f ${LINKIS_HOME}/conf/db.sh ] + then + export LINKIS_DB_CONFIG_PATH=${LINKIS_HOME}/conf/db.sh + else + echo "can not find db.sh" + exit + fi +fi +source ${LINKIS_DB_CONFIG_PATH} + +delete_day=`date -d "-10 days" "+%Y-%m-%d"` +delte_time="$delete_day 00:00:00" +echo "start to delete linkis_cg_ec_resource_info_record before $delte_time" +parm="release_time <=\"$delte_time\" " + +count=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT count(1) FROM linkis_cg_ec_resource_info_record where $parm limit 1"` +maxid=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT MAX(id) FROM linkis_cg_ec_resource_info_record where $parm limit 1"` +echo "will delete count:$count" +echo "maxid:$maxid" + +while [ $count -gt 1 ];do + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "DELETE FROM linkis_cg_ec_resource_info_record where id <= $maxid limit 5000;" + count=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT count(1) FROM linkis_cg_ec_resource_info_record where $parm limit 1"` + echo "count change : $count" + sleep 1s +done + +echo "clear_ec_record.sh over" \ No newline at end of file diff --git a/linkis-dist/package/admin/clear_history_task.sh b/linkis-dist/package/admin/clear_history_task.sh new file mode 100644 index 0000000000..75c49cb715 --- /dev/null +++ b/linkis-dist/package/admin/clear_history_task.sh @@ -0,0 +1,49 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# description: clear linkis_ps_job_history_group_history 3 month record +# +if [ -f ${LINKIS_CONF_DIR}/db.sh ] +then + export LINKIS_DB_CONFIG_PATH=${LINKIS_CONF_DIR}/db.sh +else + if [ -f ${LINKIS_HOME}/conf/db.sh ] + then + export LINKIS_DB_CONFIG_PATH=${LINKIS_HOME}/conf/db.sh + else + echo "can not find db.sh" + exit + fi +fi +source ${LINKIS_DB_CONFIG_PATH} + +delete_day=`date -d "-90 days" "+%Y-%m-%d"` +delte_time="$delete_day 00:00:00" +echo "start to delete linkis_ps_job_history_group_history before $delte_time" +parm="created_time <=\"$delte_time\" " + +count=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT count(1) FROM linkis_ps_job_history_group_history where $parm limit 1 "` +maxid=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT MAX(id) FROM linkis_ps_job_history_group_history where $parm limit 1 "` +echo "will delete count:$count" +echo "maxid:$maxid" + +while [ $count -gt 1 ];do + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "DELETE FROM linkis_ps_job_history_group_history where id <= $maxid limit 5000;" + count=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT count(1) FROM linkis_ps_job_history_group_history where $parm limit 1 "` + echo "count change : $count" + sleep 1s +done \ No newline at end of file diff --git a/linkis-dist/package/admin/configuration_helper.sh b/linkis-dist/package/admin/configuration_helper.sh new file mode 100644 index 0000000000..8c918dfe61 --- /dev/null +++ b/linkis-dist/package/admin/configuration_helper.sh @@ -0,0 +1,89 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +cd `dirname $0` +cd .. +INSTALL_HOME=`pwd` + + +function print_usage(){ + echo "Usage: configuration_helper.sh [add | get | delete] [engineType] [version] [creator] [configKey] [configValue option]" + echo "get eq: sh configuration_helper.sh get spark 2.4.3 test wds.linkis.rm.instance hadoop" + echo "delete eq: sh configuration_helper.sh delete spark 2.4.3 test wds.linkis.rm.instance hadoop" + echo "add eq: sh configuration_helper.sh add spark 2.4.3 test wds.linkis.rm.instance hadoop 6" + echo "add eq: sh configuration_helper.sh add spark 2.4.3 test wds.linkis.rm.instance hadoop 6 force" + echo "add tips: add with force will ignore check error" + echo "Most commands print help when invoked w/o parameters." +} + +if [ $# -lt 6 ]; then + print_usage + exit 2 +fi + +# set LINKIS_HOME +if [ "$LINKIS_HOME" = "" ]; then + export LINKIS_HOME=$INSTALL_HOME +fi + +# set LINKIS_CONF_DIR +if [ "$LINKIS_CONF_DIR" = "" ]; then + export LINKIS_CONF_DIR=$LINKIS_HOME/conf +fi +linkisMainConf=$LINKIS_CONF_DIR/linkis.properties +gatewayUrl=$(grep wds.linkis.gateway.url $linkisMainConf | cut -d"=" -f2) +echo "gatewayUrl: $gatewayUrl" +engineType=$2 +version=$3 +creator=$4 +configKey=$5 +user=$6 +configValue=$7 +COMMAND=$1 +if [ "$8" = "force" ]; then + force=true +fi + +get() +{ + requestUrl="$gatewayUrl/api/rest_j/v1/configuration/keyvalue?creator=$creator&engineType=$engineType&version=$version&configKey=$configKey" + curl --location --request GET $requestUrl -H "Token-Code:BML-AUTH" -H "Token-User:$user" +} + +delete() +{ + requestUrl="$gatewayUrl/api/rest_j/v1/configuration/keyvalue" + requestBody="{\"engineType\":\"$engineType\",\"version\":\"$version\",\"creator\":\"$creator\",\"configKey\":\"$configKey\"}" + curl -i -X DELETE $requestUrl -H "Accept: application/json" -H "Content-Type: application/json" -H "Token-Code:BML-AUTH" -H "Token-User:$user" -d "$requestBody" +} + +add() +{ + requestUrl="$gatewayUrl/api/rest_j/v1/configuration/keyvalue" + requestBody="{\"engineType\":\"$engineType\",\"version\":\"$version\",\"creator\":\"$creator\",\"configKey\":\"$configKey\",\"configValue\":\"$configValue\",\"force\":\"$force\",\"user\":\"$user\"}" + curl -i -X POST $requestUrl -H "Accept: application/json" -H "Content-Type: application/json" -H "Token-Code:BML-AUTH" -H "Token-User:hadoop" -d "$requestBody" +} + +case $COMMAND in + add|get|delete) + $COMMAND + ;; + *) + print_usage + exit 2 + ;; +esac diff --git a/linkis-dist/package/admin/linkis_task_res_log_clear.sh b/linkis-dist/package/admin/linkis_task_res_log_clear.sh new file mode 100644 index 0000000000..4272633e23 --- /dev/null +++ b/linkis-dist/package/admin/linkis_task_res_log_clear.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#!/bin/bash +expiredDays=365 +resultSetRootDir=/tmp/linkis +logRootDir=/tmp/linkis +userResultSetDir=$(hdfs dfs -ls $resultSetRootDir | awk '{print $8}') +realLogRootDir=$logRootDir/log +echo userResultSetDirs: $userResultSetDir +echo realLogRootDir: $realLogRootDir + +if [ -z $LINKIS_LOG_DIR ];then + expiredFileRecordDir=${LINKIS_HOME}/expiredFileRecord +else + expiredFileRecordDir=$LINKIS_LOG_DIR/expiredFileRecord +fi + +function createExpiredFileRecoredDir(){ + if [ ! -d $expiredFileRecordDir ];then + mkdir -p $expiredFileRecordDir + fi +} + +createExpiredFileRecoredDir +expireDate=$(date -d -${expiredDays}day +%Y-%m-%d) +expireResultSetFile=$expiredFileRecordDir/linkis_expire_resultset_dir_${expireDate}.txt +expireLogFile=$expiredFileRecordDir/linkis_expire_log_dir_${expireDate}.txt + +hdfs dfs -ls $realLogRootDir | awk '$8 ~ /.*linkis\/log\/[0-9|\-|\_]*/ {cmd = "date -d -12month +%Y-%m-%d";cmd | getline oneMonthAgo;if($6 < oneMonthAgo) print $8}' >> $expireLogFile + +for i in $userResultSetDir +do + hdfs dfs -ls $i/linkis | awk '$8 ~ /.*linkis\/[0-9\-]{10}/ {cmd = "date -d -12month +%Y-%m-%d";cmd | getline oneMonthAgo;if($6 < oneMonthAgo) print $8}' >> $expireResultSetFile +done + +cat $expireLogFile | xargs -n 1000 hdfs dfs -rm -r -f + +cat $expireResultSetFile | xargs -n 1000 hdfs dfs -rm -r -f + + diff --git a/linkis-dist/package/conf/linkis-cg-entrance.properties b/linkis-dist/package/conf/linkis-cg-entrance.properties index bc43125b1d..4e2741e13e 100644 --- a/linkis-dist/package/conf/linkis-cg-entrance.properties +++ b/linkis-dist/package/conf/linkis-cg-entrance.properties @@ -35,5 +35,5 @@ wds.linkis.entrance.user.creator.ip.interceptor.switch=false ## you may set service version if you want to distinguish different configuration version spring.eureka.instance.metadata-map.linkis.conf.version=v1 -## clean dirty data when the entrance start -linkis.entrance.auto.clean.dirty.data.enable=true \ No newline at end of file +wds.linkis.server.mybatis.BasePackage=org.apache.linkis.publicservice.common.lock.dao +wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.publicservice.common.lock.entity \ No newline at end of file diff --git a/linkis-dist/package/conf/linkis-et-monitor-file.properties b/linkis-dist/package/conf/linkis-et-monitor-file.properties new file mode 100644 index 0000000000..22a45841d8 --- /dev/null +++ b/linkis-dist/package/conf/linkis-et-monitor-file.properties @@ -0,0 +1,143 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +jobhistory.errorcode.01002={"alert_title":"Linkis Service load is too high, please contact Linkis owner","alert_info":"Linkis Linkis Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01003={"alert_title":"Linkis Service load is too high, please contact Linkis owner","alert_info":"Linkis Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01101={"alert_title":"Linkis Service load is too high, please contact Linkis owner","alert_info":" Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01102={"alert_title":"Linkis ECM memory Service load is too high, please contact Linkis owner","alert_info":" Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01103={"alert_title":"Linkis ECM CPU Service load is too high, please contact Linkis owner","alert_info":" Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01104={"alert_title":"Linkis ECM instances Service load is too high, please contact Linkis owner","alert_info":" Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01105={"alert_title":"Linkis Memory Service load is too high, please contact Linkis owner","alert_info":"The machine has insufficient memory. Please contact the administrator to expand the memory.","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.unfinished.time.exceed.sec.43200={"alert_title":"Linkis task execute timeout","alert_info":"Linkis task execute timeout 12h","alert_way":"1,2,3","alert_reciver":"","alert_level":"4","alert_obj":"Linkis","can_recover":"0"} + +ecm.resource.monitor.im.12003=\ + {"alert_title":"Linkis Monitor Resource Alert",\ + "alert_info":" $instance ECM Memory/cpu insufficient resource < 0.2 please contact Linkis owner: $name",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12004=\ + {"alert_title":"Linkis Monitor Resource Alert",\ + "alert_info":" $instance ECM Memory/cpu insufficient resource < 0.2 please contact Linkis owner: $name",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"2",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12005=\ + {"alert_title":"Linkis entrance user running task monitor",\ + "alert_info":"User $username runninng task at linkis ( $url ) > $runningtask ,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12006=\ + {"alert_title":"Linkis entrance user queued task monitor",\ + "alert_info":"User $username queued task at linkis ( $url ) > $queuedtask ,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12007=\ + {"alert_title":"Linkis entrance user total task monitor",\ + "alert_info":"User $username queued task at linkis ( $url ) > $tasktotal ,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12008=\ + {"alert_title":"Linkis entrance all running task monitor",\ + "alert_info":"linkis ( $url ) running task > $taskminor,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12009=\ + {"alert_title":"Linkis entrance all running task monitor",\ + "alert_info":"linkis ( $url ) all task > $taskmajor,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"2",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} + + + +user.mode.monitor.im.12011=\ + {"alert_title":"User mode execution timeout alarm",\ + "alert_info":"User mode execution timeout alarm Linkis url: $url engineType:$engineType Task ID: $jobId please attention $name",\ + "alert_way":"1,2,3",\ + "alert_reciver":"alexyang,hadoop",\ + "alert_level":"2",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +user.mode.monitor.im.12012=\ + {"alert_title":"User mode execution failure alarm",\ + "alert_info":"User mode execution failure alarm Linkis url: $url Engine: $engineType TaskID: $jobId ER=rrorCode?$errorCode errorMsg: $errorMsg please attention $name",\ + "alert_way":"1,2,3",\ + "alert_reciver":"alexyang,hadoop",\ + "alert_level":"2",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} + +user.mode.monitor.im.12017=\ + {"alert_title":"@alert_title",\ + "alert_info":"task execute failed, reason $msg",\ + "alert_way":"1,2,3",\ + "alert_reciver":"@alert_reciver",\ + "alert_level":"@alert_level",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +user.mode.monitor.im.12018=\ + {"alert_title":"@alert_title",\ + "alert_info":"task execute time out $timeout",\ + "alert_way":"1,2,3",\ + "alert_reciver":"@alert_reciver",\ + "alert_level":"@alert_level",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} + +thread.monitor.timeout.im.12014=\ + {"alert_title":" Linkis Shell Timeout Alert ",\ + "alert_info":"Monitor Shell execute time out $shellName",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +jobhistory.result.monitor.im.12015=\ + {"alert_title":"Linkis \u4EFB\u52A1\u72B6\u6001\u901A\u77E5",\ + "alert_info":"Task ID:$id final status: $status",\ + "alert_way":"1,2,3",\ + "alert_reciver":"",\ + "alert_level":"4",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +jobhistory.result.monitor.im.12016=\ + {"alert_title":"@alert_title",\ + "alert_info":"[Alarm time]$date\n[Subsystem]$sysid\n[Alarm IP]$ip\n[Alarm object]$object\n[Alarm information]$detail",\ + "alert_way":"1,2,3",\ + "alert_reciver":"@alert_reciver",\ + "alert_level":"@alert_level",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} + + diff --git a/linkis-dist/package/conf/linkis-et-monitor.properties b/linkis-dist/package/conf/linkis-et-monitor.properties new file mode 100644 index 0000000000..6d802941ca --- /dev/null +++ b/linkis-dist/package/conf/linkis-et-monitor.properties @@ -0,0 +1,84 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.monitor.scan.app.instance.entity,org.apache.linkis.monitor.scan.app.jobhistory.entity,org.apache.linkis.bml.cleaner.dao +wds.linkis.server.mybatis.BasePackage=org.apache.linkis.monitor.scan.app.instance.dao,org.apache.linkis.monitor.scan.app.jobhistory.dao,org.apache.linkis.bml.cleaner.dao + + +# alert server url +linkis.alert.url=http://127.0.0.1/ + +# alert receiver +linkis.alert.receiver.default=hadoop + +# monitor ecm resource +linkis.monitor.ecm.resource.cron=0 0/10 * * * ? + +# Resource remaining ratio +linkis.monitor.ecmResourceTask.major=0.005 +linkis.monitor.ecmResourceTask.minor=0.01 + +# entrance task metrics cron +linkis.monitor.entrance.task.cron=0 0/10 * * * ? + +# Timeout task cron +linkis.monitor.jobHistory.timeout.cron=0 0/20 * * * ? + +# time out interval 24h +linkis.monitor.scanner.timeout.interval.seconds=86400 + + +# Finished task cron +linkis.monitor.jobHistory.finished.cron=0 0/20 * * * ? + +# linkis user mode cron +linkis.monitor.user.mode.cron=0 0/20 * * * ? + +# user mode for engine +linkis.monitor.user.enginelist=[{"engineType":"hive-2.3.3","code":"show tables","runType":"hql","executeUser":"hadoop"},\ + {"engineType":"spark-2.4.3","code":"show tables","runType":"sql","executeUser":"hadoop"},\ + {"engineType":"shell-1","code":"pwd","runType":"sh","executeUser":"hadoop"}] + +linkis.monitor.user.mode.timeOut=300 + + +# bml clear cron +linkis.monitor.bml.clear.history.version.cron=0 0 12 * * ? +# bml max version +linkis.bml.cleaner.version.max.num=50 +# keep verssion +linkis.bml.cleaner.version.keep.num=20 +# clear max interval +linkis.bml.cleaner.previous.interval.days=30 +# once scan limit +linkis.bml.cleaner.once.limit.num=100 + +# clear db ec record cron +linkis.monitor.clear.ecRecord.cron=0 10 12 * * ? + +# clear task log cron +linkis.monitor.clear.taskLog.cron=0 10 12 * * ? + +# clear db task history cron +linkis.monitor.clear.history.task.cron=0 0 13 * * ? + +# poll size +linkis.monitor.scheduled.pool.cores.num=20 + +# shell time out conf minute +linkis.monitor.shell.time.out.minute=60 + +##Spring +spring.server.port=9119 diff --git a/linkis-dist/package/conf/linkis-mg-gateway.properties b/linkis-dist/package/conf/linkis-mg-gateway.properties index ef2d740c40..1f1d2416b4 100644 --- a/linkis-dist/package/conf/linkis-mg-gateway.properties +++ b/linkis-dist/package/conf/linkis-mg-gateway.properties @@ -20,8 +20,8 @@ wds.linkis.gateway.conf.enable.proxy.user=false wds.linkis.gateway.conf.url.pass.auth=/dss/ wds.linkis.gateway.conf.enable.token.auth=true wds.linkis.is.gateway=true -wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.instance.label.entity -wds.linkis.server.mybatis.BasePackage=org.apache.linkis.instance.label.dao,org.apache.linkis.gateway.authentication.dao +wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.instance.label.entity,org.apache.linkis.jobhistory.entity +wds.linkis.server.mybatis.BasePackage=org.apache.linkis.instance.label.dao,org.apache.linkis.gateway.authentication.dao,org.apache.linkis.jobhistory.dao wds.linkis.label.entity.packages=org.apache.linkis.gateway.ujes.route.label wds.linkis.login_encrypt.enable=false ##LDAP diff --git a/linkis-dist/package/conf/nacos/application-engineconn.yml b/linkis-dist/package/conf/nacos/application-engineconn.yml new file mode 100644 index 0000000000..0cf4dd90b9 --- /dev/null +++ b/linkis-dist/package/conf/nacos/application-engineconn.yml @@ -0,0 +1,38 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +spring: + application: + name: linkis-cg-engineconn + servlet: + multipart: + max-file-size: 500MB + max-request-size: 500MB + file-size-threshold: 50MB + cloud: + nacos: + discovery: + server-addr: 127.0.0.1:8848 + metadata: + prometheus.path: ${prometheus.path:/api/rest_j/v1/actuator/prometheus} + linkis.app.version: ${linkis.app.version} + +management: + endpoints: + web: + exposure: + include: refresh,info,health,metrics + +logging: + config: classpath:log4j2.xml diff --git a/linkis-dist/package/conf/nacos/application-linkis.yml b/linkis-dist/package/conf/nacos/application-linkis.yml new file mode 100644 index 0000000000..07dc8adeb3 --- /dev/null +++ b/linkis-dist/package/conf/nacos/application-linkis.yml @@ -0,0 +1,59 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +nacos: + discovery: + server-addr: 127.0.0.1:8848 + metadata: + prometheus.path: ${prometheus.path:/api/rest_j/v1/actuator/prometheus} + linkis.app.version: ${linkis.app.version} + +management: + endpoints: + web: + exposure: + include: refresh,info,health,metrics + +logging: + config: classpath:log4j2.xml + +pagehelper: + helper-dialect: mysql + reasonable: true + support-methods-arguments: true + params: countSql + +spring: + servlet: + multipart: + max-file-size: 500MB + max-request-size: 500MB + file-size-threshold: 50MB +#ribbon: +# ReadTimeout: 10000 +# ConnectTimeout: 10000 + cloud: + nacos: + discovery: + server-addr: 127.0.0.1:8848 + metadata: + prometheus.path: ${prometheus.path:/api/rest_j/v1/actuator/prometheus} + linkis.app.version: ${linkis.app.version} + + +##disable kinif4j.production when you want to use apidoc during development +knife4j: + enable: true + production: true diff --git a/linkis-dist/package/db/linkis_ddl.sql b/linkis-dist/package/db/linkis_ddl.sql index 739fa0ba71..052da64168 100644 --- a/linkis-dist/package/db/linkis_ddl.sql +++ b/linkis-dist/package/db/linkis_ddl.sql @@ -30,23 +30,25 @@ SET FOREIGN_KEY_CHECKS=0; DROP TABLE IF EXISTS `linkis_ps_configuration_config_key`; CREATE TABLE `linkis_ps_configuration_config_key`( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', - `description` varchar(200) DEFAULT NULL, - `name` varchar(50) DEFAULT NULL, - `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', - `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', - `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', - `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', - `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', - `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', - `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', - `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', - `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', - `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', - `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', + `description` varchar(200) DEFAULT NULL, + `name` varchar(50) DEFAULT NULL, + `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', + `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', + `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', + `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', + `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', + `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', + `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', + `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', + `template_required` tinyint(1) DEFAULT 0 COMMENT 'template required 0 none / 1 must', + PRIMARY KEY (`id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_key_engine_relation`; @@ -63,7 +65,7 @@ DROP TABLE IF EXISTS `linkis_ps_configuration_config_value`; CREATE TABLE `linkis_ps_configuration_config_value`( `id` bigint(20) NOT NULL AUTO_INCREMENT, `config_key_id` bigint(20), - `config_value` varchar(200), + `config_value` varchar(500), `config_label_id`int(20), `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -84,6 +86,61 @@ CREATE TABLE `linkis_ps_configuration_category` ( UNIQUE INDEX `uniq_label_id` (`label_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; +CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_template_config_key` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `template_name` VARCHAR(200) NOT NULL COMMENT 'Configuration template name redundant storage', + `template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid template id recorded by the third party', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT 'configuration value', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'upper limit value', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Lower limit value (reserved)', + `validate_range` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Verification regularity (reserved)', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT 'Is it valid? Reserved Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT 'Creator', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Update by', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_tid_kid` (`template_uuid`, `key_id`), + UNIQUE INDEX `uniq_tname_kid` (`template_uuid`, `key_id`) + )ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; +CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_key_limit_for_user` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `user_name` VARCHAR(50) NOT NULL COMMENT 'username', + `combined_label_value` VARCHAR(128) NOT NULL COMMENT 'Combined label combined_userCreator_engineType such as hadoop-IDE,spark-2.4.3', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT 'configuration value', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'upper limit value', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Lower limit value (reserved)', + `latest_update_template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid template id recorded by the third party', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT 'Is it valid? Reserved Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT 'Creator', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Update by', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_ps_configutation_lm_across_cluster_rule`; +CREATE TABLE IF NOT EXISTS linkis_ps_configutation_lm_across_cluster_rule ( + id INT AUTO_INCREMENT COMMENT 'Rule ID, auto-increment primary key', + cluster_name char(32) NOT NULL COMMENT 'Cluster name, cannot be empty', + creator char(32) NOT NULL COMMENT 'Creator, cannot be empty', + username char(32) NOT NULL COMMENT 'User, cannot be empty', + create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Creation time, cannot be empty', + create_by char(32) NOT NULL COMMENT 'Creator, cannot be empty', + update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Modification time, cannot be empty', + update_by char(32) NOT NULL COMMENT 'Updater, cannot be empty', + rules varchar(256) NOT NULL COMMENT 'Rule content, cannot be empty', + is_valid VARCHAR(2) DEFAULT 'N' COMMENT 'Is it valid Y/N', + PRIMARY KEY (id), + UNIQUE KEY idx_creator_username (creator, username) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + -- -- New linkis job -- @@ -135,6 +192,7 @@ DROP TABLE IF EXISTS `linkis_ps_common_lock`; CREATE TABLE `linkis_ps_common_lock` ( `id` int(11) NOT NULL AUTO_INCREMENT, `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `locker` varchar(255) COLLATE utf8_bin NOT NULL, `time_out` longtext COLLATE utf8_bin, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, @@ -153,6 +211,8 @@ DROP TABLE IF EXISTS `linkis_ps_udf_manager`; CREATE TABLE `linkis_ps_udf_manager` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -166,6 +226,8 @@ CREATE TABLE `linkis_ps_udf_shared_group` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `shared_group` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -174,7 +236,9 @@ CREATE TABLE `linkis_ps_udf_shared_info` ( `id` bigint(20) PRIMARY KEY NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, - `user_name` varchar(50) NOT NULL + `user_name` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- @@ -190,7 +254,8 @@ CREATE TABLE `linkis_ps_udf_tree` ( `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `category` varchar(50) DEFAULT NULL COMMENT 'Used to distinguish between udf and function', - PRIMARY KEY (`id`) + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_parent_name_uname_category` (`parent`,`name`,`user_name`,`category`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -203,7 +268,10 @@ CREATE TABLE `linkis_ps_udf_user_load` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `user_name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; @@ -235,6 +303,7 @@ CREATE TABLE `linkis_ps_udf_version` ( `use_format` varchar(255) DEFAULT NULL, `description` varchar(255) NOT NULL COMMENT 'version desc', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `md5` varchar(100) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -486,6 +555,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources` ( `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Updated time', `updator` varchar(50) DEFAULT NULL COMMENT 'updator', `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', + unique key `uniq_rid_eflag`(`resource_id`, `enable_flag`), PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4; diff --git a/linkis-dist/package/db/linkis_ddl_pg.sql b/linkis-dist/package/db/linkis_ddl_pg.sql index c205d76590..36cb554c98 100644 --- a/linkis-dist/package/db/linkis_ddl_pg.sql +++ b/linkis-dist/package/db/linkis_ddl_pg.sql @@ -40,10 +40,13 @@ CREATE TABLE linkis_ps_configuration_config_key ( is_hidden bool NULL, is_advanced bool NULL, "level" int2 NULL, + boundary_type int2 null, "treeName" varchar(20) NULL, en_description varchar(200) NULL, en_name varchar(100) NULL, "en_treeName" varchar(100) NULL, + boundary_type int2 NOT NULL, + template_required bool NULL, CONSTRAINT linkis_configuration_config_key_pkey PRIMARY KEY (id) ); COMMENT ON COLUMN "linkis_ps_configuration_config_key"."key" IS 'Set key, e.g. spark.executor.instances'; @@ -58,7 +61,7 @@ COMMENT ON COLUMN "linkis_ps_configuration_config_key"."treeName" IS 'Reserved f COMMENT ON COLUMN "linkis_ps_configuration_config_key"."treeName" IS 'english description'; COMMENT ON COLUMN "linkis_ps_configuration_config_key"."treeName" IS 'english name'; COMMENT ON COLUMN "linkis_ps_configuration_config_key"."treeName" IS 'english treeName'; - +CREATE UNIQUE INDEX uniq_key_ectype ON linkis_ps_configuration_config_key USING btree ("key","engine_conn_type"); DROP TABLE IF EXISTS "linkis_ps_configuration_key_engine_relation"; CREATE TABLE linkis_ps_configuration_key_engine_relation ( @@ -98,6 +101,53 @@ CREATE TABLE linkis_ps_configuration_category ( ); CREATE UNIQUE INDEX uniq_label_id_cc ON linkis_ps_configuration_category USING btree (label_id); +DROP TABLE IF EXISTS linkis_ps_configuration_template_config_key; +CREATE TABLE linkis_ps_configuration_template_config_key ( + id BIGINT PRIMARY KEY NOT NULL, + template_name VARCHAR(200) NOT NULL, + template_uuid VARCHAR(36) NOT NULL, + key_id BIGINT NOT NULL, + config_value VARCHAR(200) NULL DEFAULT NULL, + max_value VARCHAR(50) NULL DEFAULT NULL, + min_value VARCHAR(50) NULL DEFAULT NULL, + validate_range VARCHAR(50) NULL DEFAULT NULL, + is_valid VARCHAR(2) DEFAULT 'Y', + create_by VARCHAR(50) NOT NULL, + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW(), + update_by VARCHAR(50) NULL DEFAULT NULL, + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW() +); + +DROP TABLE IF EXISTS linkis_ps_configuration_key_limit_for_user; +CREATE TABLE IF NOT EXISTS linkis_ps_configuration_key_limit_for_user ( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1), + user_name varchar(50) NOT NULL, + combined_label_value varchar(128) NOT NULL, + key_id bigint NOT NULL, + config_value varchar(200) NULL DEFAULT NULL, + max_value varchar(50) NULL DEFAULT NULL, + min_value varchar(50) NULL DEFAULT NULL, + latest_update_template_uuid varchar(36) NOT NULL, + is_valid varchar(2) DEFAULT 'Y', + create_by varchar(50) NOT NULL, + create_time timestamp without time zone DEFAULT now(), + update_by varchar(50) NULL DEFAULT NULL, + update_time timestamp without time zone DEFAULT now() + ); + +DROP TABLE IF EXISTS linkis_ps_configutation_lm_across_cluster_rule; +CREATE TABLE IF NOT EXISTS linkis_ps_configutation_lm_across_cluster_rule ( + rule_id INT PRIMARY KEY AUTOINCREMENT, + cluster_name varchar(32) NOT NULL, + creator varchar(32) NOT NULL, + username varchar(32) NOT NULL, + create_time TIMESTAMP NOT NULL DEFAULT NOW(), + create_by varchar(32) NOT NULL, + update_time TIMESTAMP NOT NULL DEFAULT NOW(), + update_by varchar(32), + rules TEXT NOT NULL, + is_valid varchar(1) DEFAULT'N' +); DROP TABLE IF EXISTS "linkis_ps_job_history_group_history"; CREATE TABLE linkis_ps_job_history_group_history ( @@ -176,6 +226,7 @@ DROP TABLE IF EXISTS "linkis_ps_common_lock"; CREATE TABLE linkis_ps_common_lock ( id bigserial NOT NULL, lock_object varchar(255) NULL, + locker varchar(255) NOT NULL, time_out text NULL, update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, @@ -188,6 +239,8 @@ DROP TABLE IF EXISTS "linkis_ps_udf_manager"; CREATE TABLE linkis_ps_udf_manager ( id bigserial NOT NULL, user_name varchar(20) NULL, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, + create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT linkis_udf_manager_pkey PRIMARY KEY (id) ); @@ -197,6 +250,8 @@ CREATE TABLE linkis_ps_udf_shared_group ( id bigserial NOT NULL, udf_id int8 NOT NULL, shared_group varchar(50) NOT NULL, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, + create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT linkis_udf_shared_group_pkey PRIMARY KEY (id) ); @@ -206,6 +261,8 @@ CREATE TABLE linkis_ps_udf_shared_info ( id bigserial NOT NULL, udf_id int8 NOT NULL, user_name varchar(50) NOT NULL, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, + create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT linkis_ps_udf_shared_info_pkey PRIMARY KEY (id) ); @@ -224,16 +281,18 @@ CREATE TABLE linkis_ps_udf_tree ( ); COMMENT ON COLUMN "linkis_ps_udf_tree"."name" IS 'Category name of the function. It would be displayed in the front-end'; COMMENT ON COLUMN "linkis_ps_udf_tree"."category" IS 'Used to distinguish between udf and function'; - +CREATE UNIQUE INDEX uniq_parent_name_uname_category ON linkis_ps_udf_tree USING btree (parent,name,user_name,category); DROP TABLE IF EXISTS "linkis_ps_udf_user_load"; CREATE TABLE linkis_ps_udf_user_load ( id bigserial NOT NULL, udf_id int4 NOT NULL, user_name varchar(50) NOT NULL, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, + create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT linkis_ps_udf_user_load_pkey PRIMARY KEY (id) ); - +CREATE UNIQUE INDEX uniq_uid_uname ON linkis_ps_udf_user_load USING btree (udf_id, user_name); DROP TABLE IF EXISTS "linkis_ps_udf_baseinfo"; CREATE TABLE linkis_ps_udf_baseinfo ( @@ -264,6 +323,7 @@ CREATE TABLE linkis_ps_udf_version ( use_format varchar(255) NULL, description varchar(255) NOT NULL, create_time timestamptz(6) NOT NULL DEFAULT CURRENT_TIMESTAMP, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, md5 varchar(100) NULL, CONSTRAINT linkis_ps_udf_version_pkey PRIMARY KEY (id) ); @@ -509,7 +569,7 @@ COMMENT ON COLUMN "linkis_ps_bml_resources"."max_version" IS '默认为10,指 COMMENT ON COLUMN "linkis_ps_bml_resources"."update_time" IS '更新时间'; COMMENT ON COLUMN "linkis_ps_bml_resources"."updator" IS '更新者'; COMMENT ON COLUMN "linkis_ps_bml_resources"."enable_flag" IS '状态,1:正常,0:冻结'; - +CREATE UNIQUE INDEX uniq_rid_eflag ON linkis_ps_bml_resources USING btree (resource_id, enable_flag); DROP TABLE IF EXISTS "linkis_ps_bml_resources_version"; CREATE TABLE linkis_ps_bml_resources_version ( diff --git a/linkis-dist/package/db/linkis_dml.sql b/linkis-dist/package/db/linkis_dml.sql index 0a15f31ca1..8a1dcffa3a 100644 --- a/linkis-dist/package/db/linkis_dml.sql +++ b/linkis-dist/package/db/linkis_dml.sql @@ -83,19 +83,17 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'spark引擎设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python3', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.conf', '多个参数使用分号[;]分隔 例如spark.shuffle.spill=true;', 'spark自定义配置参数',null, 'None', NULL, 'spark',0, 1, 1,'spark资源设置', 0, 'Spark Resource Settings','Multiple parameters are separated by semicolons [;] For example, spark.sql.shuffle.partitions=10;', 'Spark Custom Configuration Parameters'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.locality.wait', '范围:0-3,单位:秒', '任务调度本地等待时间', '3s', 'OFT', '[\"0s\",\"1s\",\"2s\",\"3s\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0-3, Unit: second', 'Task Scheduling Local Waiting Time'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.memory.fraction', '范围:0.4,0.5,0.6,单位:百分比', '执行内存和存储内存的百分比', '0.6', 'OFT', '[\"0.4\",\"0.5\",\"0.6\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0.4, 0.5, 0.6, in percentage', 'Percentage Of Execution Memory And Storage Memory'); + -- hive INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '20', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','1g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:-1-10000,单位:个', 'reduce数', '-1', 'NumInterval', '[-1,10000]', '0', '1', '1', 'hive资源设置', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'hive引擎设置', 'hive'); - -- python INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', 'python驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', 'python驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python'); @@ -380,8 +378,8 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01101','ECM资源不足,请联系管理员扩容','ECM resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,请联系管理员扩容','ECM memory resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01103','ECM CPU资源不足,请联系管理员扩容','ECM CPU resources are insufficient',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01004','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01005','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01104','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01105','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12001','队列CPU资源不足,可以调整Spark执行器个数','Queue CPU resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12002','队列内存资源不足,可以调整Spark执行器个数','Insufficient queue memory',0); @@ -406,8 +404,11 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13005','Spark app应用退出,可能是复杂任务导致','Spark application has already stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','Spark context退出,可能是复杂任务导致','Spark application sc has already stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','Pyspark子进程意外退出,可能是复杂任务导致','Pyspark process has stopped',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13008','任务产生的序列化结果总大小超过了配置的spark.driver.maxResultSize限制。请检查您的任务,看看是否有可能减小任务产生的结果大小,或则可以考虑压缩或合并结果,以减少传输的数据量','is bigger than spark.driver.maxResultSize',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13009','您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败','ERROR EC exits unexpectedly and actively kills the task',0); + -- 21 cluster Authority 22 db Authority -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue ([A-Za-z._0-9]+)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21002','创建Python解释器失败,请联系管理员','initialize python executor failed',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21003','创建单机Python解释器失败,请联系管理员','PythonSession process cannot be initialized',0); @@ -508,7 +509,9 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43013','索引使用错误','IndexError',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43014','sql语法有问题','raise ParseException',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43015','当前节点需要的CS表解析失败,请检查当前CSID对应的CS表是否存在','Cannot parse cs table for node',0); - +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43016','模块 %s 没有属性 %s ,请确认代码引用是否正常','AttributeError: \'(\\S+)\' object has no attribute \'(\\S+)\'',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43017','存在参数无效或拼写错误,请确认 %s 参数正确性','KeyError: (\\(.+\\))',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43018','文件未找到,请确认该路径( %s )是否存在','FileNotFoundError.*No such file or directory\\:\\s\'(\\S+)\'',0); -- 46 importExport INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46001','找不到导入文件地址:%s','java.io.FileNotFoundException: (\\S+) \\(No such file or directory\\)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46002','导出为excel时临时文件目录权限异常','java.io.IOException: Permission denied(.+)at org.apache.poi.xssf.streaming.SXSSFWorkbook.createAndRegisterSXSSFSheet',0); diff --git a/linkis-dist/package/db/module/linkis_configuration.sql b/linkis-dist/package/db/module/linkis_configuration.sql index fefa6f9f99..a171c71819 100644 --- a/linkis-dist/package/db/module/linkis_configuration.sql +++ b/linkis-dist/package/db/module/linkis_configuration.sql @@ -14,23 +14,29 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - + DROP TABLE IF EXISTS `linkis_ps_configuration_config_key`; -CREATE TABLE `linkis_ps_configuration_config_key`( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', - `description` varchar(200) DEFAULT NULL, - `name` varchar(50) DEFAULT NULL, - `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', - `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', - `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', - `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', - `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', - `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', - `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', - `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +CREATE TABLE `linkis_ps_configuration_config_key` +( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', + `description` varchar(200) DEFAULT NULL, + `name` varchar(50) DEFAULT NULL, + `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', + `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', + `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', + `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', + `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', + `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', + `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', + `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', + PRIMARY KEY (`id`) +); + DROP TABLE IF EXISTS `linkis_ps_configuration_key_engine_relation`; diff --git a/linkis-dist/package/db/module/linkis_configuration_dml.sql b/linkis-dist/package/db/module/linkis_configuration_dml.sql index 0d989eba38..7f34f8a75f 100644 --- a/linkis-dist/package/db/module/linkis_configuration_dml.sql +++ b/linkis-dist/package/db/module/linkis_configuration_dml.sql @@ -60,11 +60,6 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:3-15,单位:G', 'spark执行器内存大小', '3g', 'Regex', '^([3-9]|1[0-5])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '1', '1', '1', 'spark资源设置','spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','2g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); -- hive INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); diff --git a/linkis-dist/package/db/module/linkis_udf.sql b/linkis-dist/package/db/module/linkis_udf.sql index 999793b1cc..3e7b2c4f13 100644 --- a/linkis-dist/package/db/module/linkis_udf.sql +++ b/linkis-dist/package/db/module/linkis_udf.sql @@ -25,6 +25,8 @@ DROP TABLE IF EXISTS `linkis_ps_udf_manager`; CREATE TABLE `linkis_ps_udf_manager` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -73,9 +75,12 @@ CREATE TABLE `linkis_ps_udf_tree` ( DROP TABLE IF EXISTS `linkis_ps_udf_user_load`; CREATE TABLE `linkis_ps_udf_user_load` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, - `udf_id` int(11) NOT NULL, + `udf_id` bigint(20) NOT NULL, `user_name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; diff --git a/linkis-dist/package/db/udf/udf_sys.sql b/linkis-dist/package/db/udf/udf_sys.sql new file mode 100644 index 0000000000..903834596c --- /dev/null +++ b/linkis-dist/package/db/udf/udf_sys.sql @@ -0,0 +1,813 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- 字符串函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","字符串函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="字符串函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substring","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substring"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","",""," Column substring(Column str, int pos, int len)","Returns the substring from string str before count occurrences of the delimiter delim.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","concat","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "concat"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string concat(STRING|BINARY a, STRING|BINARY b...)","Returns the string or bytes resulting from concatenating the strings or bytes passed in as parameters in order. For example, concat('foo', 'bar') results in 'foobar'. Note that this function can take any number of input strings.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","concat_ws","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "concat_ws"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string concat_ws(string SEP, array)","Like concat(), but with custom separator SEP.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","decode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "decode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string decode(binary bin, string charset)","Decodes the first argument into a String using the provided character set (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16'). If either argument is null, the result will also be null. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","elt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "elt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string elt(N int,str1 string,str2 string,str3 string,...)","Return string at index number. For example elt(2,'hello','world') returns 'world'.?Returns NULL if N is less than 1 or greater than the number of arguments.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","format_number","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "format_number"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string format_number(number x, int d)","Formats the number X to a format like '#,###,###.##', rounded to D decimal places, and returns the result as a string. If D is 0, the result has no decimal point or fractional part.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","get_json_object","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "get_json_object"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string get_json_object(string json_string, string path)","Extracts json object from a json string based on json path specified, and returns json string of the extracted json object. It will return null if the input json string is invalid.?NOTE: The json path can only have the characters [0-9a-z_], i.e., no upper",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lower","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lower"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string lower(string A) lcase(string A)","Returns the string resulting from converting all characters of B to lower case. For example, lower('fOoBaR') results in 'foobar'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lcase","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lcase"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string lcase(string A)","Returns the string resulting from converting all characters of B to lower case. For example, lower('fOoBaR') results in 'foobar'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lpad","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lpad"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string lpad(string str, int len, string pad)","Returns str, left-padded with pad to a length of len. If str is longer than len, the return value is shortened to len characters. In case of empty pad string, the return value is null.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ltrim","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ltrim"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string ltrim(string A)","Returns the string resulting from trimming spaces from the beginning(left hand side) of A. For example, ltrim(' foobar ') results in 'foobar '.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","parse_url","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "parse_url"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string parse_url(string urlString, string partToExtract [, string keyToExtract])","Returns the specified part from the URL. Valid values for partToExtract include HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, and USERINFO. For example, parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') returns 'facebook.com'. Als",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","printf","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "printf"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string printf(String format, Obj... args)","Returns the input formatted according do printf-style format strings .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regexp_extract","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regexp_extract"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string regexp_extract(string subject, string pattern, int index)","Returns the string extracted using the pattern. For example, regexp_extract('foothebar', 'foo(.*?)(bar)', 2) returns 'bar.' Note that some care is necessary in using predefined character classes: using 's' as the second argument will match the letter s; '",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regexp_replace","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regexp_replace"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string regexp_replace(string INITIAL_STRING, string PATTERN, string REPLACEMENT)","Returns the string resulting from replacing all substrings in INITIAL_STRING that match the java regular expression syntax defined in PATTERN with instances of REPLACEMENT. For example, regexp_replace(foobar, oo|ar, ) returns 'fb.' Note that some care is ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","repeat","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "repeat"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string repeat(string str, int n)","Repeats str n times.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","replace","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "replace"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string replace(string A, string OLD, string NEW)","Returns the string A with all non-overlapping?occurrences of OLD replaced with NEW . Example: select replace(ababab, abab, Z); returns Zab.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","reverse","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "reverse"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string reverse(string A)","Returns the reversed string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rpad","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rpad"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string rpad(string str, int len, string pad)","Returns str, right-padded with pad to a length of len. If str is longer than len, the return value is shortened to len characters. In case of empty pad string, the return value is null.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rtrim","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rtrim"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string rtrim(string A)","Returns the string resulting from trimming spaces from the end(right hand side) of A. For example, rtrim(' foobar ') results in ' foobar'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","space","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "space"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string space(int n)","Returns a string of n spaces.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substr","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string substr(STRING|BINARY A, INT start [, INT len])","Returns the substring or slice of the byte array of A starting from start position till the end of string A or with optional length len. For example, substr('foobar', 4) results in 'bar'",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substring","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substring"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string substring(STRING|BINARY a, INT start [, INT len])","Returns the substring or slice of the byte array of A starting from start position till the end of string A or with optional length len. For example, substr('foobar', 4) results in 'bar'",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substring_index","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substring_index"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string substring_index(string A, string delim, int count)","Returns the substring from string A before count occurrences of the delimiter delim. If count is positive, everything to the left of the final delimiter (counting from the left) is returned. If count is negative, everything to the right of the final delim",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","translate","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "translate"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string translate(string|char|varchar input, string|char|varchar from, string|char|varchar to)","Translates the input string by replacing the characters present in the?from?string with the corresponding characters in the?to?string. This is similar to the?translatefunction in?PostgreSQL. If any of the parameters to this UDF are NULL, the result is NUL",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","trim","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "trim"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string trim(string A)","Returns the string resulting from trimming spaces from both ends of A. For example, trim(' foobar ') results in 'foobar'",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","upper","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "upper"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string upper(string A)","Returns the string resulting from converting all characters of A to upper case. For example, upper('fOoBaR') results in 'FOOBAR'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ucase","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ucase"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string ucase(string A)","Returns the string resulting from converting all characters of A to upper case. For example, upper('fOoBaR') results in 'FOOBAR'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","initcap","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "initcap"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string initcap(string A)","Returns string, with the first letter of each word in uppercase, all other letters in lowercase. Words are delimited by whitespace.?",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","soundex","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "soundex"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string soundex(string A)","Returns soundex code of the string. For example, soundex('Miller') results in M460.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","str_to_map","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "str_to_map"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","map str_to_map(text[, delimiter1, delimiter2])","Splits text into key-value pairs using two delimiters. Delimiter1 separates text into K-V pairs, and Delimiter2 splits each K-V pair. Default delimiters are ',' for delimiter1 and ':' for delimiter2.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ascii","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ascii"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int ascii(string str)","Returns the numeric value of the first character of str.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","character_length","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "character_length"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int character_length(string str)","Returns the number of UTF-8 characters contained in str . The function char_length is shorthand for this function.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","field","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "field"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int field(val T,val1 T,val2 T,val3 T,...)","Returns the index of val in the val1,val2,val3,... list or 0 if not found.?For example?field('world','say','hello','world') returns 3.All primitive types are supported, arguments are compared using str.equals(x). If val is NULL, the return value is 0.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","find_in_set","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "find_in_set"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int find_in_set(string str, string strList)","Returns the first occurance of str in strList where strList is a comma-delimited string. Returns null if either argument is null. Returns 0 if the first argument contains any commas. For example, find_in_set('ab', 'abc,b,ab,c,def') returns 3.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","instr","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "instr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int instr(string str, string substr)","Returns the position of the first occurrence of?substr?in?str. Returns?null?if either of the arguments are?null?and returns?0?if?substr?could not be found in?str. Be aware that this is not zero based. The first character in?str?has index 1.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","length","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "length"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int length(string A)","Returns the length of the string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","locate","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "locate"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int locate(string substr, string str[, int pos])","Returns the position of the first occurrence of substr in str after position pos.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","octet_length","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "octet_length"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int octet_length(string str)","Returns the number of octets required to hold the string str in UTF-8 encoding. Note that octet_length(str) can be larger than character_length(str).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","levenshtein","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "levenshtein"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int levenshtein(string A, string B)","Returns the Levenshtein distance between two strings?. For example, levenshtein('kitten', 'sitting') results in 3.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","in_file","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "in_file"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean in_file(string str, string filename)","Returns true if the string str appears as an entire line in filename.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","encode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "encode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary encode(string src, string charset)","Encodes the first argument into a BINARY using the provided character set (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16'). If either argument is null, the result will also be null.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","expr","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "expr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column expr(String expr)","Parses the expression string into the column that it represents, similar to DataFrame.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unbase64","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unbase64"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary unbase64(string str)","Converts the argument from a base 64 string to BINARY. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","context_ngrams","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "context_ngrams"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array> context_ngrams(array>, array, int K, int pf)","Returns the top-k contextual N-grams from a set of tokenized sentences, given a string of context. See?StatisticsAndDataMining?for more information.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","format_string","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "format_string"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column format_string(String format, scala.collection.Seq arguments)","Formats the arguments in printf-style and returns the result as a string column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ngrams","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ngrams"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array> ngrams(array>, int N, int K, int pf)","Returns the top-k N-grams from a set of tokenized sentences, such as those returned by the sentences() UDAF. See?StatisticsAndDataMining?for more information.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sentences","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sentences"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array> sentences(string str, string lang, string locale)","Tokenizes a string of natural language text into words and sentences, where each sentence is broken at the appropriate sentence boundary and returned as an array of words. The 'lang' and 'locale' are optional arguments. For example, sentences('Hello there",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","split","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "split"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array split(string str, string pat)","Splits str around pat (pat is a regular expression).",now(),"",now()); +-- 数值函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","数值函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="数值函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","abs","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "abs"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","Column abs(Column e)","Computes the absolute value",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","randn","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "randn"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column randn(long seed)","Generate a column with independent and identically distributed (i.i.d.) samples from the standard normal distribution.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rint","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rint"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column rint(Column e)","Returns the double value that is closest in value to the argument and is equal to a mathematical integer.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","signum","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "signum"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column signum(Column e)","Computes the signum of the given value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sinh","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sinh"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column sinh(String columnName)","Computes the hyperbolic sine of the given column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","tanh","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "tanh"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column tanh(Column e)","Computes the hyperbolic tangent of the given value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","toDegrees","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "toDegrees"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column toDegrees(Column e)","Use degrees. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","toRadians","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "toRadians"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column toRadians(Column e)","Use radians.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","atan2","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "atan2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column atan2(Column l, Column r)","Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cosh","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cosh"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column cosh(Column e)","Computes the hyperbolic cosine of the given value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","expm1","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "expm1"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column expm1(Column e)","Computes the exponential of the given value minus one.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","round","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "round"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE round(DOUBLE a [, INT d])","Returns the rounded BIGINT value of a or a rounded to d decimal places.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hypot","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hypot"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Column hypot(Column l, Column r)","Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","bround","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "bround"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE bround(DOUBLE a [, INT decimals])","Returns the rounded BIGINT value of a using HALF_EVEN rounding mode with optional decimal places d.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","floor","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "floor"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT floor(DOUBLE a)","Returns the maximum?BIGINT?value that is equal to or less than?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ceil","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ceil"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT ceil(DOUBLE a)","Returns the minimum BIGINT value that is equal to or greater than?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ceiling","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ceiling"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT ceiling(DOUBLE a)","Returns the minimum BIGINT value that is equal to or greater than?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rand","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rand"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE rand([INT seed])","Returns a random number (that changes from row to row) that is distributed uniformly from 0 to 1. Specifying the seed will make sure the generated random number sequence is deterministic.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log1p","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log1p"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column log1p(Column e)","Computes the natural logarithm of the given value plus one.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","exp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "exp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE exp(TL a)","Returns?ea?where?e?is the base of the natural logarithm. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ln","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ln"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE ln(TL a)","Returns the natural logarithm of the argument?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log10","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log10"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE log10(TL a)","Returns the base-10 logarithm of the argument?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log2","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE log2(DOUBLE|a)","Returns the base-2 logarithm of the argument?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE log(TL base, DOUBLE |DECIMALa)","Returns the base-base?logarithm of the argument?a.?",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","pow","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "pow"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE pow(DOUBLE a, DOUBLE p)","Returns?ap.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","power","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "power"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE power(DOUBLE a, DOUBLE p)","Returns?ap.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sqrt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sqrt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE sqrt(DOUBLE a), sqrt(DECIMAL a)","Returns the square root of?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","bin","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "bin"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","STRING bin(BIGINT a)","Returns the number in binary format.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hex","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hex"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","STRING hex(BIGINT a) hex(STRING a) hex(BINARY a)","If the argument is an?INT?or?binary,?hex?returns the number as a?STRING?in hexadecimal format. Otherwise if the number is a?STRING, it converts each character into its hexadecimal representation and returns the resulting?STRING.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unhex","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unhex"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BINARY unhex(STRING a)","Inverse of hex. Interprets each pair of characters as a hexadecimal number and converts to the byte representation of the number. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","conv","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "conv"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","STRING conv(BIGINT num, INT from_base, INT to_base), conv(STRING num, INT from_base, INT to_base)","Converts a number from a given base to another .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","abs","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "abs"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE abs(DOUBLE a)","Returns the absolute value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","pmod","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "pmod"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T pmod(T a, T b),","Returns the positive value of?a mod b.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sin","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sin"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE sin(T a)","Returns the sine of?a?(a?is in radians). ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asin","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asin"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE asin(T a)","Returns the arc sin of?a?if -1<=a<=1 or NULL otherwise. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cos","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cos"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE cos(T a)","Returns the cosine of?a?(a?is in radians). ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","acos","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "acos"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE acos(T a)","Returns the arccosine of?a?if -1<=a<=1 or NULL otherwise. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","tan","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "tan"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE tan(T a)","Returns the tangent of?a?(a?is in radians). ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","atan","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "atan"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE atan(T a)","Returns the arctangent of?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","degrees","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "degrees"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE degrees(T a)","Converts value of?a?from radians to degrees. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","radians","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "radians"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE radians(T a)","Converts value of?a?from degrees to radians. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","positive","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "positive"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T positive(T a)","Returns?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","negate","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "negate"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","Column negate(Column e)","Unary minus.negate the expression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","negative","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "negative"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T negative(T a)","Returns?-a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sign","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sign"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T sign(T a)","Returns the sign of?a?as '1.0' (if?a?is positive) or '-1.0' (if?a?is negative), '0.0' otherwise. The decimal version returns INT instead of DOUBLE. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","e","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "e"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE e()","Returns the value of?e.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","pi","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "pi"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE pi()","Returns the value of?pi.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","factorial","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "factorial"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT factorial(INT a)","Returns the factorial of?a?. Valid?a?is [0..20].",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cbrt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cbrt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE cbrt(DOUBLE a)","Returns the cube root of?a?double value?.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","shiftleft","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "shiftleft"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T shiftleft(T a, INT b)","Bitwise left shift. Shifts a b positions to the left. Returns int for tinyint, smallint and int a. Returns bigint for bigint a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","shiftright","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "shiftright"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T shiftright(T a, INT b)","Bitwise right shift. Shifts a b positions to the right. Returns int for tinyint, smallint and int a. Returns bigint for bigint a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","shiftrightunsigned","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "shiftrightunsigned"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T shiftrightunsigned(T a, INT b)","Bitwise unsigned right shift. Shifts a b positions to the right. Returns int for tinyint, smallint and int a. Returns bigint for bigint a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","greatest","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "greatest"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T greatest(T v1, T v2, ...)","Returns the greatest value of the list of values. Fixed to return NULL when one or more arguments are NULL, and strict type restriction relaxed, consistent with > operator.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","least","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "least"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T least(T v1, T v2, ...)","Returns the least value of the list of values.?Fixed to return NULL when one or more arguments are NULL, and strict type restriction relaxed, consistent with < operator .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","width_bucket","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "width_bucket"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","INT width_bucket(NUMERIC expr, NUMERIC min_value, NUMERIC max_value, INT num_buckets)","Returns an integer between 0 and num_buckets+1 by mapping expr into the ith equally sized bucket. Buckets are made by dividing [min_value, max_value] into?equally sized regions. If expr < min_value, return 1, if expr > max_value return num_buckets+1.",now(),"",now()); +-- 日期函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","日期函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="日期函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last_day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last_day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","",""," Column last_day(Column e)","Given a date column, returns the last day of the month which the given date belongs to.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dayofyear","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dayofyear"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column dayofyear(Column e)","Extracts the day of the year as an integer from a given date/timestamp/string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","from_unixtime","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "from_unixtime"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string from_unixtime(bigint unixtime[, string format])","Converts the number of seconds from unix epoch (1970-01-01 00:00:00 UTC) to a string representing the timestamp of that moment in the current system time zone in the format of 1970-01-01 00:00:00.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unix_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unix_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint unix_timestamp()","Gets current Unix timestamp in seconds. This function is not deterministic and its value is not fixed for the scope of a query execution, therefore prevents proper optimization of queries - this has been deprecated since 2.0 in favour of CURRENT_TIMESTAMP",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unix_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unix_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint unix_timestamp(string date)","Converts time string in format?yyyy-MM-dd HH:mm:ss?to Unix timestamp (in seconds), using the default timezone and the default locale, return 0 if fail: unix_timestamp('2009-03-20 11:30:01') = 1237573801",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unix_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unix_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint unix_timestamp(string date, string pattern)","Convert time string with given pattern to Unix time stamp (in seconds), return 0 if fail: unix_timestamp('2009-03-20', 'yyyy-MM-dd') = 1237532400.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","to_date","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "to_date"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string to_date(string timestamp)","Returns the date part of a timestamp string : to_date(1970-01-01 00:00:00) = 1970-01-01. returns a date object.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","year","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "year"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int year(string date)","Returns the year part of a date or a timestamp string: year(1970-01-01 00:00:00) = 1970, year(1970-01-01) = 1970.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","quarter","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "quarter"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int quarter(date/timestamp/string)","Returns the quarter of the year for a date, timestamp, or string in the range 1 to 4 . Example: quarter('2015-04-08') = 2.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","month","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "month"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int month(string date)","Returns the month part of a date or a timestamp string: month(1970-11-01 00:00:00) = 11, month(1970-11-01) = 11.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int day(string date) ","Returns the day part of a date or a timestamp string: day('1970-11-01 00:00:00') = 1, day('1970-11-01') = 1",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dayofmonth","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dayofmonth"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int dayofmonth(date)","Returns the day part of a date or a timestamp string: dayofmonth('1970-11-01 00:00:00') = 1, dayofmonth('1970-11-01') = 1.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hour","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hour"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int hour(string date)","Returns the hour of the timestamp: hour('2009-07-30 12:58:59') = 12, hour('12:58:59') = 12.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","minute","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "minute"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int minute(string date)","Returns the minute of the timestamp.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","second","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "second"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int second(string date)","Returns the second of the timestamp.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","weekofyear","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "weekofyear"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int weekofyear(string date)","Returns the week number of a timestamp string: weekofyear(1970-11-01 00:00:00) = 44, weekofyear(1970-11-01) = 44.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","extract","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "extract"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int extract(field FROM source)","Retrieve fields such as days or hours from source. Source must be a date, timestamp, interval or a string that can be converted into either a date or timestamp. Supported fields include: day, dayofweek, hour, minute, month, quarter, second, week and year.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","datediff","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "datediff"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int datediff(string enddate, string startdate)","Returns the number of days from startdate to enddate: datediff('2009-03-01', '2009-02-27') = 2.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","date_add","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "date_add"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string date_add(date/timestamp/string?startdate, tinyint/smallint/int days)","Adds a number of days to startdate: date_add('2008-12-31', 1) = '2009-01-01'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","date_sub","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "date_sub"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string date_sub(date/timestamp/string?startdate, tinyint/smallint/int days)","Subtracts a number of days to startdate: date_sub('2008-12-31', 1) = '2008-12-30'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","from_utc_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "from_utc_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","timestamp from_utc_timestamp({any primitive type} ts, string timezone)","Converts a timestamp* in UTC to a given timezone?.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","to_utc_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "to_utc_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","timestamp to_utc_timestamp({any?primitive type} ts, string timezone)","Converts a timestamp* in a given timezone to UTC.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_date","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_date"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","date current_date","Returns the current date at the start of query evaluation . All calls of current_date within the same query return the same value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","timestamp current_timestamp","Returns the current timestamp at the start of query evaluation. All calls of current_timestamp within the same query return the same value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","add_months","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "add_months"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string add_months(string start_date, int num_months,?output_date_format)","Returns the date that is num_months after start_date. start_date is a string, date or timestamp. num_months is an integer.?If start_date is the last day of the month or if the resulting month has fewer days than the day component of start_date, then the r",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last_day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last_day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string last_day(string date)","Returns the last day of the month which the date belongs to. date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'.?The time part of date is ignored.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","next_day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "next_day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string next_day(string start_date, string day_of_week)","Returns the first date which is later than start_date and named as day_of_week?.?start_date is a string/date/timestamp. day_of_week is 2 letters, 3 letters or full name of the day of the week (e.g. Mo, tue, FRIDAY). The time part of start_date is ignored.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","trunc","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "trunc"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string trunc(string date, string format)","Returns date truncated to the unit specified by the format?. Supported formats: MONTH/MON/MM, YEAR/YYYY/YY. Example: trunc('2015-03-17', 'MM') = 2015-03-01.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","months_between","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "months_between"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double months_between(date1, date2)","Returns number of months between dates date1 and date2 . If date1 is later than date2, then the result is positive. If date1 is earlier than date2, then the result is negative. If date1 and date2 are either the same days of the month or both last days of ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","date_format","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "date_format"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string date_format(date/timestamp/string ts, string fmt)","Converts a date/timestamp/string to a value of string in the format specified by the date format fmt . Supported formats are Java SimpleDateFormat formats?.The second argument fmt should be constant. Example: date_format('2015-04-08', 'y') = '2015'.",now(),"",now()); +-- 聚合函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","聚合函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="聚合函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sum","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sum"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","Column sum(Column e)"," returns the sum of all values in the expression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","skewness","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "skewness"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column skewness(Column e)","returns the skewness of the values in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stddev","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stddev"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column stddev(Column e)","alias for stddev_samp.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sumDistinct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sumDistinct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column sumDistinct(Column e)","returns the sum of distinct values in the expression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","countDistinct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "countDistinct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column countDistinct(String columnName,String... columnNames)","returns the number of distinct items in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","first","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "first"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column first(Column e)"," returns the first value in a group.The function by default returns the first values it sees. It will return the first non-null value it sees when ignoreNulls is set to true. If all values are null, then null is returned.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","grouping_id","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "grouping_id"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column grouping_id(String colName,scala.collection.Seq colNames)","returns the level of grouping,",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","grouping","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "grouping"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column grouping(String columnName)","indicates whether a specified column in a GROUP BY list is aggregated or not, returns 1 for aggregated or 0 for not aggregated in the result set.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","kurtosis","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "kurtosis"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column kurtosis(Column e)","returns the kurtosis of the values in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column last(Column e,boolean ignoreNulls)","returns the last value in a group.The function by default returns the last values it sees. It will return the last non-null value it sees when ignoreNulls is set to true. If all values are null, then null is returned.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mean","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mean"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column mean(String columnName)","returns the average of the values in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","count","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "count"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT count([DISTINCT] col)","count(*) - Returns the total number of retrieved rows, including rows containing NULL values. count(expr) - Returns the number of rows for which the supplied expression is non-NULL. count(DISTINCT expr[, expr]) - Returns the number of rows for which the s",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sum","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sum"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE sum(col), sum(DISTINCT col)","Returns the sum of the elements in the group or the sum of the distinct values of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","avg","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "avg"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE avg(col), avg(DISTINCT col)","Returns the average of the elements in the group or the average of the distinct values of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","min","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "min"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE min(col)","Returns the minimum of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","max","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "max"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE max(col)","Returns the maximum value of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","variance","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "variance"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE variance(col)","Returns the variance of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","var_pop","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "var_pop"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE var_pop(col)","Returns the variance of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","var_samp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "var_samp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE var_samp(col)","Returns the unbiased sample variance of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stddev_pop","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stddev_pop"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE stddev_pop(col)","Returns the standard deviation of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stddev_samp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stddev_samp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE stddev_samp(col)","Returns the unbiased sample standard deviation of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","covar_pop","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "covar_pop"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE covar_pop(col1, col2)","Returns the population covariance of a pair of numeric columns in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","covar_samp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "covar_samp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE covar_samp(col1, col2)","Returns the sample covariance of a pair of a numeric columns in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","corr","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "corr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE corr(col1, col2)","Returns the Pearson coefficient of correlation of a pair of a numeric columns in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE percentile(BIGINT col, p)","Returns the exact pth?percentile of a column in the group (does not work with floating point types). p must be between 0 and 1. NOTE: A true percentile can only be computed for integer values. Use PERCENTILE_APPROX if your input is non-integral.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array percentile(BIGINT col, array(p1?[, p2]...))","Returns the exact percentiles p1, p2, ... of a column in the group (does not work with floating point types). pi?must be between 0 and 1. NOTE: A true percentile can only be computed for integer values. Use PERCENTILE_APPROX if your input is non-integral.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile_approx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile_approx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE percentile_approx(DOUBLE col, p [, B])","Returns an approximate pth?percentile of a numeric column (including floating point types) in the group. The B parameter controls approximation accuracy at the cost of memory. Higher values yield better approximations, and the default is 10,000. When the ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile_approx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile_approx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array percentile_approx(DOUBLE col, array(p1?[, p2]...) [, B])","Same as above, but accepts and returns an array of percentile values instead of a single one.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_avgx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_avgx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_avgx(independent, dependent)","Equivalent to avg(dependent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_avgy","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_avgy"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_avgy(independent, dependent)","Equivalent to avg(independent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_count","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_count"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_count(independent, dependent)","Returns the number of non-null pairs used to fit the linear regression line.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_intercept","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_intercept"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_intercept(independent, dependent)","Returns the y-intercept of the?linear regression line, i.e. the value of b in the equation dependent = a * independent + b.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_r2","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_r2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_r2(independent, dependent)","Returns the?coefficient of determination?for the regression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_slope","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_slope"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_slope(independent, dependent)","Returns the slope of the?linear regression line, i.e. the value of a in the equation dependent = a * independent + b.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_sxx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_sxx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_sxx(independent, dependent)","Equivalent to regr_count(independent, dependent) * var_pop(dependent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_sxy","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_sxy"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_sxy(independent, dependent)","Equivalent to regr_count(independent, dependent) * covar_pop(independent, dependent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_syy","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_syy"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_syy(independent, dependent)","Equivalent to regr_count(independent, dependent) * var_pop(independent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","histogram_numeric","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "histogram_numeric"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array histogram_numeric(col, b)","Computes a histogram of a numeric column in the group using b non-uniformly spaced bins. The output is an array of size b of double-valued (x,y) coordinates that represent the bin centers and heights",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","collect_set","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "collect_set"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array collect_set(col)","Returns a set of objects with duplicate elements eliminated.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","collect_list","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "collect_list"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array collect_list(col)","Returns a list of objects with duplicates. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ntile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ntile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","INTEGER ntile(INTEGER x)","Divides an ordered partition into?x?groups called buckets and assigns a bucket number to each row in the partition. This?allows easy calculation of tertiles, quartiles, deciles, percentiles and other?common summary statistics. ",now(),"",now()); +-- 条件判断函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","条件判断函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="条件判断函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","not","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "not"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column not(Column e)","Inversion of boolean expression,",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","when","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "when"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column when(Column condition,Object value)","Evaluates a list of conditions and returns one of multiple possible result expressions. If otherwise is not defined at the end, null is returned for unmatched conditions.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","isnan","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "isnan"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column isnan(Column e)","Return true iff the column is NaN.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","nanvl","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "nanvl"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column nanvl(Column col1, Column col2)","Returns col1 if it is not NaN, or col2 if col1 is NaN.Both inputs should be floating point columns (DoubleType or FloatType).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","point","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "point"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","columns (DoubleType or FloatType)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","if","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "if"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T if(boolean testCondition, T valueTrue, T valueFalseOrNull)","Returns valueTrue when testCondition is true, returns valueFalseOrNull otherwise.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","isnull","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "isnull"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean isnull( a )","Returns true if a is NULL and false otherwise.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","isnotnull ","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "isnotnull "; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean isnotnull ( a )","Returns true if a is not NULL and false otherwise.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","nvl","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "nvl"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T nvl(T value, T default_value)","Returns default value if value is null else returns value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","coalesce","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "coalesce"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T coalesce(T v1, T v2, ...)","Returns the first v that is not NULL, or NULL if all v's are NULL.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","nullif","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "nullif"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T nullif( a, b )","Returns NULL if a=b; otherwise returns a?.Shorthand for: CASE?WHEN a = b then NULL else a",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","assert_true","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "assert_true"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","void assert_true(boolean condition)","Throw an exception if 'condition' is not true, otherwise return null . For example, select assert_true (2<1).",now(),"",now()); +-- 类型转换函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","类型转换函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="类型转换函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","binary","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "binary"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary binary(string|binary)","Casts the parameter into a binary.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cast","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cast"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Expected = to follow type cast(expr as )","Converts the results of the expression expr to . For example, cast('1' as BIGINT) will convert the string '1' to its integral representation. A null is returned if the conversion does not succeed. If cast(expr as boolean) Hive returns true for a non",now(),"",now()); +-- 集合操作函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","集合操作函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="集合操作函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","struct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "struct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column struct(scala.collection.Seq cols)","Creates a new struct column. If the input column is a column in a DataFrame, or a derived column expression that is named (i.e. aliased), its name would be remained as the StructField's name, otherwise, the newly generated StructField's name would be auto",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","col","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "col"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column col(String colName)","Returns a Column based on the given column name.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","column","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "column"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column column(String colName)","Returns a Column based on the given column name. Alias of col.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","desc_nulls_first","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "desc_nulls_first"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column desc_nulls_first(String columnName)","Returns a sort expression based on the descending order of the column, and null values appear before non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","desc_nulls_last","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "desc_nulls_last"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column desc_nulls_last(String columnName)","Returns a sort expression based on the descending order of the column, and null values appear after non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","desc","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "desc"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column desc(String columnName)","Returns a sort expression based on the descending order of the column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","array","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "array"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array(val1, val2, ...)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","map","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "map"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","map(key1, value1, ...)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","size","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "size"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int size(Map|Array a)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","map_keys","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "map_keys"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array map_keys(Map)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","map_values","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "map_values"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array map_values(Map)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","array_contains","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "array_contains"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean array_contains(Array, value)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sort_array","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sort_array"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array sort_array(Array)","",now(),"",now()); +-- 数据加密函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","数据加密函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="数据加密函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","base64","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "base64"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column base64(Column e)","Computes the BASE64 encoding of a binary column and returns it as a string column. This is the reverse of unbase64.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask(string str[, string upper[, string lower[, string number]]])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_first_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_first_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_first_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_last_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_last_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_last_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_show_first_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_show_first_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_show_first_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_show_last_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_show_last_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_show_last_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_hash","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_hash"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_hash(string|char|varchar str)","",now(),"",now()); +-- 生成表函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","生成表函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="生成表函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","explode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "explode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T explode(Array|Array|Map a)","Explodes an array or map to multiple rows. Returns a row-set with a single column (col), one row for each element from the array or ?a row-set with a two columns (key,value)?,?one row for each key-value pair from the input map",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","posexplode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "posexplode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int,T posexplode(ARRAY a)","Explodes an array to multiple rows with additional positional column of?int?type (position of items in the original array, starting with 0). Returns a row-set with two columns (pos,val), one row for each element from the array.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","inline","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "inline"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T1,...,Tn inline(ARRAY> a)","Explodes an array of structs to multiple rows.?Returns?a row-set with N columns (N = number of top level elements in the struct), one row per struct from the array. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stack","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stack"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T1,...,Tn/r stack(int r,T1?V1,...,Tn/r?Vn)","Breaks up?n?values V1,...,Vn?into?r?rows. Each row will have?n/r?columns.?r?must be constant.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","json_tuple","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "json_tuple"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string1,...,stringn json_tuple(string?jsonStr,string?k1,...,string?kn)","Takes?JSON string and?a set of?n?keys, and returns a tuple of?n?values. This is a more efficient version of the?get_json_object?UDF because it can get multiple keys with just one call.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","parse_url_tuple","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "parse_url_tuple"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string?1,...,stringn parse_url_tuple(string?urlStr,string?p1,...,string?pn)","Takes?URL string and?a set of?n?URL parts, and returns a tuple of?n?values.?This is similar to the?parse_url()?UDF but can extract multiple parts at once out of a URL. Valid part names are: HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, USERINFO, QUER",now(),"",now()); +-- 分析窗口函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","分析窗口函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="分析窗口函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dense_rank","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dense_rank"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Column dense_rank()","returns the rank of rows within a window partition, without any gaps.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dense_rank ","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dense_rank "; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","dense_rank ( ) OVER ( [query_partition_clause] order_by_clause )","Returns an ascending sequence of integers, starting with 1. The output sequence produces duplicate integers for duplicate values of the ORDER BY expressions.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","first_value","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "first_value"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," first_value(expr) OVER([partition_by_clause] order_by_clause [window_clause])","Returns the expression value from the first row in the window. The return value is NULL if the input expression is NULL.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lag","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lag"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," lag(expr [, offset] [, default]) OVER ([partition_by_clause] order_by_clause)","This function returns the value of an expression using column values from a preceding row. You specify an integer offset, which designates a row position some number of rows previous to the current row. Any column references in the expression argument ref",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last_value","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last_value"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," last_value(expr) OVER([partition_by_clause] order_by_clause [window_clause])","Returns the expression value from the last row in the window. The return value is NULL if the input expression is NULL.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lead","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lead"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," lead(expr [, offset] [, default]) OVER([partition_by_clause] order_by_clause)","This function returns the value of an expression using column values from a following row. You specify an integer offset, which designates a row position some number of rows after to the current row. Any column references in the expression argument refer ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ntile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ntile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," ntile(int n) OVER([partition_by_clause] order_by_clause)","用于将分组数据按照顺序切分成n片,返回当前切片值",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percent_rank","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percent_rank"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," percent_rank() OVER([partition_by_clause] order_by_clause)","分组内当前行的RANK值-1/分组内总行数-1",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cume_dist","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cume_dist"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," cume_dist() OVER([partition_by_clause] order_by_clause)","小于等于当前值的行数/分组内总行数",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rank","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rank"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," rank() OVER([partition_by_clause] order_by_clause)","Returns an ascending sequence of integers, starting with 1. The output sequence produces duplicate integers for duplicate values of the ORDER BY expressions. After generating duplicate output values for the tied input values, the function increments the s",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","row_number","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "row_number"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," row_number() OVER([partition_by_clause] order_by_clause)","Returns an ascending sequence of integers, starting with 1. Starts the sequence over for each group produced by the PARTITIONED BY clause. The output sequence includes different values for duplicate input values. Therefore, the sequence never contains any",now(),"",now()); +-- 其它函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","其它函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="其它函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","spark_partition_id","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "spark_partition_id"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column spark_partition_id()","returns partition ID.This is indeterministic because it depends on data partitioning and task scheduling.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","to_json","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "to_json"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column to_json(Column e,scala.collection.immutable.Map options)","(Scala-specific) Converts a column containing a StructType into a JSON string with the specified schema. Throws an exception, in the case of an unsupported type.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","window","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "window"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column window(Column timeColumn, String windowDuration, String slideDuration)","Bucketize rows into one or more time windows given a timestamp specifying column. Window starts are inclusive but the window ends are exclusive, e.g. 12:05 will be in the window [12:05,12:10) but not in [12:00,12:05). Windows can support microsecond preci",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","approxCountDistinct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "approxCountDistinct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column approxCountDistinct(Column e)"," Use approx_count_distinct. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asc_nulls_first","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asc_nulls_first"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column asc_nulls_first(String columnName)","Returns a sort expression based on ascending order of the column, and null values return before non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asc_nulls_last","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asc_nulls_last"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column asc_nulls_last(String columnName)","Returns a sort expression based on ascending order of the column, and null values appear after non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asc","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asc"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column asc(String columnName)","Returns a sort expression based on ascending order of the column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","bitwiseNOT","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "bitwiseNOT"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column expr(String expr)","Parses the expression string into the column that it represents, similar to DataFrame.selectExpr",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","broadcast","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "broadcast"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Dataset broadcast(Dataset df)","Marks a DataFrame as small enough for use in broadcast joins.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","callUDF","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "callUDF"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column callUDF(String udfName, scala.collection.Seq cols)","Call an user-defined function.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","from_json","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "from_json"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column from_json(Column e,StructType schema,scala.collection.immutable.Map options)","(Scala-specific) Parses a column containing a JSON string into a StructType with the specified schema. Returns null, in the case of an unparseable string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lit","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lit"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Column lit(Object literal)","Creates a Column of literal value.The passed in object is returned directly if it is already a Column. If the object is a Scala Symbol, it is converted into a Column also. Otherwise, a new Column is created to represent the literal value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","md5","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "md5"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string md5(string/binary)","Calculates an MD5 128-bit checksum for the string or binary . The value is returned as a string of 32 hex digits, or NULL if the argument was NULL. Example: md5('ABC') = '902fbdd2b1df0c4f70b4a5d23525e932'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sha1","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sha1"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string sha1(string/binary)","Calculates the SHA-1 digest for string or binary and returns the value as a hex string . Example: sha1('ABC') = '3c01bdbb26f358bab27f267924aa2c9a03fcfdb8'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sha","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sha"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string sha(string/binary)","Calculates the SHA-1 digest for string or binary and returns the value as a hex string . Example: sha1('ABC') = '3c01bdbb26f358bab27f267924aa2c9a03fcfdb8'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sha2","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sha2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string sha2(string/binary, int)","Calculates the SHA-2 family of hash functions (SHA-224, SHA-256, SHA-384, and SHA-512) . The first argument is the string or binary to be hashed. The second argument indicates the desired bit length of the result, which must have a value of 224, 256, 384,",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","reflect","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "reflect"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","varies reflect(class, method[, arg1[, arg2..]])","Calls a Java method by matching the argument signature, using reflection. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","crc32","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "crc32"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint crc32(string/binary)","Computes a cyclic redundancy check value for string or binary argument and returns bigint value . Example: crc32('ABC') = 2743272264.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","aes_decrypt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "aes_decrypt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary aes_decrypt(input binary, key string/binary)","Decrypt input using AES . Key lengths of 128, 192 or 256 bits can be used. 192 and 256 bits keys can be used if Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files are installed. If either argument is NULL or the key length is n",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","aes_encrypt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "aes_encrypt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary aes_encrypt(input string/binary, key string/binary)","Encrypt input using AES . Key lengths of 128, 192 or 256 bits can be used. 192 and 256 bits keys can be used if Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files are installed. If either argument is NULL or the key length is n",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hash","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hash"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int hash(a1[, a2...])","Returns a hash value of the arguments.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_database","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_database"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string current_database()","Returns current database name .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_user","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_user"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string current_user()","Returns current user name from the configured authenticator manager?. Could be the same as the user provided when connecting, but with some authentication managers (for example HadoopDefaultAuthenticator) it could be different.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","logged_in_user","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "logged_in_user"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string logged_in_user()","Returns current user name from the session state. This is the username provided when connecting to Hive.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","version","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "version"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string version()","Returns the Hive version. The string contains 2 fields, the first being a build number and the second being a build hash. Example: select version(); might return 2.1.0.2.5.0.0-1245 r027527b9c5ce1a3d7d0b6d2e6de2378fb0c39232. Actual results will depend on y",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","java_method","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "java_method"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","varies java_method(class, method[, arg1[, arg2..]])","Synonym for?reflect. ",now(),"",now()); + + + + + + + diff --git a/linkis-dist/package/db/upgrade/1.5.0_schema/mysql/linkis_ddl.sql b/linkis-dist/package/db/upgrade/1.5.0_schema/mysql/linkis_ddl.sql index 6d78b7b670..19c23b116c 100644 --- a/linkis-dist/package/db/upgrade/1.5.0_schema/mysql/linkis_ddl.sql +++ b/linkis-dist/package/db/upgrade/1.5.0_schema/mysql/linkis_ddl.sql @@ -15,4 +15,72 @@ * limitations under the License. */ -ALTER TABLE `linkis_cg_manager_label` MODIFY COLUMN label_key varchar(50); \ No newline at end of file +ALTER TABLE `linkis_cg_manager_label` MODIFY COLUMN label_key varchar(50); +ALTER TABLE linkis_ps_udf_user_load ADD CONSTRAINT uniq_uid_uname UNIQUE (`udf_id`, `user_name`); +ALTER TABLE linkis_ps_bml_resources ADD CONSTRAINT uniq_rid_eflag UNIQUE (`resource_id`, `enable_flag`); + + +ALTER TABLE linkis_ps_configuration_config_key ADD UNIQUE uniq_key_ectype (`key`,`engine_conn_type`); + +ALTER TABLE linkis_ps_configuration_config_key modify column engine_conn_type varchar(50) DEFAULT '' COMMENT 'engine type,such as spark,hive etc'; + +ALTER TABLE linkis_ps_common_lock ADD COLUMN locker VARCHAR(255) NOT NULL COMMENT 'locker'; + +ALTER TABLE linkis_ps_configuration_config_key ADD column template_required tinyint(1) DEFAULT 0 COMMENT 'template required 0 none / 1 must'; +ALTER TABLE linkis_ps_configuration_config_key ADD column `boundary_type` int(2) NOT NULL COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both'; +ALTER TABLE linkis_ps_configuration_config_value modify COLUMN config_value varchar(500); + +DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; +CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_template_config_key` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `template_name` VARCHAR(200) NOT NULL COMMENT 'Configuration template name redundant storage', + `template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid template id recorded by the third party', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT 'configuration value', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'upper limit value', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Lower limit value (reserved)', + `validate_range` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Verification regularity (reserved)', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT 'Is it valid? Reserved Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT 'Creator', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Update by', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_tid_kid` (`template_uuid`, `key_id`), + UNIQUE INDEX `uniq_tname_kid` (`template_uuid`, `key_id`) + )ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; +CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_key_limit_for_user` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `user_name` VARCHAR(50) NOT NULL COMMENT 'username', + `combined_label_value` VARCHAR(128) NOT NULL COMMENT 'Combined label combined_userCreator_engineType such as hadoop-IDE,spark-2.4.3', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT 'configuration value', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'upper limit value', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Lower limit value (reserved)', + `latest_update_template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid template id recorded by the third party', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT 'Is it valid? Reserved Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT 'Creator', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Update by', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) + )ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_ps_configutation_lm_across_cluster_rule`; +CREATE TABLE IF NOT EXISTS linkis_ps_configutation_lm_across_cluster_rule ( + id INT AUTO_INCREMENT COMMENT 'Rule ID, auto-increment primary key', + cluster_name char(32) NOT NULL COMMENT 'Cluster name, cannot be empty', + creator char(32) NOT NULL COMMENT 'Creator, cannot be empty', + username char(32) NOT NULL COMMENT 'User, cannot be empty', + create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Creation time, cannot be empty', + create_by char(32) NOT NULL COMMENT 'Creator, cannot be empty', + update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Modification time, cannot be empty', + update_by char(32) NOT NULL COMMENT 'Updater, cannot be empty', + rules varchar(256) NOT NULL COMMENT 'Rule content, cannot be empty', + is_valid VARCHAR(2) DEFAULT 'N' COMMENT 'Is it valid Y/N', + PRIMARY KEY (id), + UNIQUE KEY idx_creator_username (creator, username) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; \ No newline at end of file diff --git a/linkis-dist/package/sbin/common.sh b/linkis-dist/package/sbin/common.sh index 6a9acb207f..f3f0555933 100644 --- a/linkis-dist/package/sbin/common.sh +++ b/linkis-dist/package/sbin/common.sh @@ -27,6 +27,8 @@ NC='\033[0m' # No Color GREEN='\033[0;32m' #used as: echo -e "Apache ${RED}Linkis ${NC} Test \n" +export DISCOVERY=EUREKA + function isLocal(){ if [ "$1" == "127.0.0.1" ];then return 0 diff --git a/linkis-dist/package/sbin/ext/linkis-et-monitor b/linkis-dist/package/sbin/ext/linkis-et-monitor new file mode 100644 index 0000000000..e0c78c487d --- /dev/null +++ b/linkis-dist/package/sbin/ext/linkis-et-monitor @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# description: monitor start cmd +# + + +source $LINKIS_CONF_DIR/linkis-env.sh +export SERVER_SUFFIX="linkis-extensions/linkis-et-monitor" + +export SERVER_CLASS=org.apache.linkis.monitor.LinksMonitorApplication + +if test -z "$MONITOR_HEAP_SIZE" + then + if test -z "$SERVER_HEAP_SIZE" + then + export SERVER_HEAP_SIZE="512M" + fi +else + export SERVER_HEAP_SIZE=$MONITOR_HEAP_SIZE +fi + +#export DEBUG_PORT= + +export COMMON_START_BIN=$LINKIS_HOME/sbin/ext/linkis-common-start +if [[ ! -f "${COMMON_START_BIN}" ]]; then + echo "The $COMMON_START_BIN does not exist!" + exit 1 +else + sh $COMMON_START_BIN +fi \ No newline at end of file diff --git a/linkis-dist/package/sbin/linkis-start-all.sh b/linkis-dist/package/sbin/linkis-start-all.sh index c9bd380619..55c1ba684e 100644 --- a/linkis-dist/package/sbin/linkis-start-all.sh +++ b/linkis-dist/package/sbin/linkis-start-all.sh @@ -131,9 +131,11 @@ sleep 3 } #linkis-mg-eureka -export SERVER_NAME="mg-eureka" -SERVER_IP=$EUREKA_INSTALL_IP -checkServer +if [ "$DISCOVERY" == "EUREKA" ]; then + export SERVER_NAME="mg-eureka" + SERVER_IP=$EUREKA_INSTALL_IP + checkServer +fi #linkis-mg-gateway diff --git a/linkis-dist/package/sbin/linkis-stop-all.sh b/linkis-dist/package/sbin/linkis-stop-all.sh index a560075a95..4302cf7af1 100644 --- a/linkis-dist/package/sbin/linkis-stop-all.sh +++ b/linkis-dist/package/sbin/linkis-stop-all.sh @@ -103,8 +103,10 @@ SERVER_IP=$MANAGER_INSTALL_IP stopApp #linkis-mg-eureka -export SERVER_NAME="mg-eureka" -SERVER_IP=$EUREKA_INSTALL_IP -stopApp +if [ "$DISCOVERY" == "EUREKA" ]; then + export SERVER_NAME="mg-eureka" + SERVER_IP=$EUREKA_INSTALL_IP + stopApp +fi echo "stop-all shell script executed completely" diff --git a/linkis-dist/release-docs/licenses/LICENSE-client.txt b/linkis-dist/release-docs/licenses/LICENSE-client.txt new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/linkis-dist/release-docs/licenses/LICENSE-client.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/linkis-dist/release-docs/licenses/LICENSE-jts-core.txt b/linkis-dist/release-docs/licenses/LICENSE-jts-core.txt new file mode 100644 index 0000000000..e55f34467e --- /dev/null +++ b/linkis-dist/release-docs/licenses/LICENSE-jts-core.txt @@ -0,0 +1,277 @@ +Eclipse Public License - v 2.0 + + THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE + PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION + OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS + +"Contribution" means: + + a) in the case of the initial Contributor, the initial content + Distributed under this Agreement, and + + b) in the case of each subsequent Contributor: + i) changes to the Program, and + ii) additions to the Program; + where such changes and/or additions to the Program originate from + and are Distributed by that particular Contributor. A Contribution + "originates" from a Contributor if it was added to the Program by + such Contributor itself or anyone acting on such Contributor's behalf. + Contributions do not include changes or additions to the Program that + are not Modified Works. + +"Contributor" means any person or entity that Distributes the Program. + +"Licensed Patents" mean patent claims licensable by a Contributor which +are necessarily infringed by the use or sale of its Contribution alone +or when combined with the Program. + +"Program" means the Contributions Distributed in accordance with this +Agreement. + +"Recipient" means anyone who receives the Program under this Agreement +or any Secondary License (as applicable), including Contributors. + +"Derivative Works" shall mean any work, whether in Source Code or other +form, that is based on (or derived from) the Program and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. + +"Modified Works" shall mean any work in Source Code or other form that +results from an addition to, deletion from, or modification of the +contents of the Program, including, for purposes of clarity any new file +in Source Code form that contains any contents of the Program. Modified +Works shall not include works that contain only declarations, +interfaces, types, classes, structures, or files of the Program solely +in each case in order to link to, bind by name, or subclass the Program +or Modified Works thereof. + +"Distribute" means the acts of a) distributing or b) making available +in any manner that enables the transfer of a copy. + +"Source Code" means the form of a Program preferred for making +modifications, including but not limited to software source code, +documentation source, and configuration files. + +"Secondary License" means either the GNU General Public License, +Version 2.0, or any later versions of that license, including any +exceptions or additional permissions as identified by the initial +Contributor. + +2. GRANT OF RIGHTS + + a) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free copyright + license to reproduce, prepare Derivative Works of, publicly display, + publicly perform, Distribute and sublicense the Contribution of such + Contributor, if any, and such Derivative Works. + + b) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free patent + license under Licensed Patents to make, use, sell, offer to sell, + import and otherwise transfer the Contribution of such Contributor, + if any, in Source Code or other form. This patent license shall + apply to the combination of the Contribution and the Program if, at + the time the Contribution is added by the Contributor, such addition + of the Contribution causes such combination to be covered by the + Licensed Patents. The patent license shall not apply to any other + combinations which include the Contribution. No hardware per se is + licensed hereunder. + + c) Recipient understands that although each Contributor grants the + licenses to its Contributions set forth herein, no assurances are + provided by any Contributor that the Program does not infringe the + patent or other intellectual property rights of any other entity. + Each Contributor disclaims any liability to Recipient for claims + brought by any other entity based on infringement of intellectual + property rights or otherwise. As a condition to exercising the + rights and licenses granted hereunder, each Recipient hereby + assumes sole responsibility to secure any other intellectual + property rights needed, if any. For example, if a third party + patent license is required to allow Recipient to Distribute the + Program, it is Recipient's responsibility to acquire that license + before distributing the Program. + + d) Each Contributor represents that to its knowledge it has + sufficient copyright rights in its Contribution, if any, to grant + the copyright license set forth in this Agreement. + + e) Notwithstanding the terms of any Secondary License, no + Contributor makes additional grants to any Recipient (other than + those set forth in this Agreement) as a result of such Recipient's + receipt of the Program under the terms of a Secondary License + (if permitted under the terms of Section 3). + +3. REQUIREMENTS + +3.1 If a Contributor Distributes the Program in any form, then: + + a) the Program must also be made available as Source Code, in + accordance with section 3.2, and the Contributor must accompany + the Program with a statement that the Source Code for the Program + is available under this Agreement, and informs Recipients how to + obtain it in a reasonable manner on or through a medium customarily + used for software exchange; and + + b) the Contributor may Distribute the Program under a license + different than this Agreement, provided that such license: + i) effectively disclaims on behalf of all other Contributors all + warranties and conditions, express and implied, including + warranties or conditions of title and non-infringement, and + implied warranties or conditions of merchantability and fitness + for a particular purpose; + + ii) effectively excludes on behalf of all other Contributors all + liability for damages, including direct, indirect, special, + incidental and consequential damages, such as lost profits; + + iii) does not attempt to limit or alter the recipients' rights + in the Source Code under section 3.2; and + + iv) requires any subsequent distribution of the Program by any + party to be under a license that satisfies the requirements + of this section 3. + +3.2 When the Program is Distributed as Source Code: + + a) it must be made available under this Agreement, or if the + Program (i) is combined with other material in a separate file or + files made available under a Secondary License, and (ii) the initial + Contributor attached to the Source Code the notice described in + Exhibit A of this Agreement, then the Program may be made available + under the terms of such Secondary Licenses, and + + b) a copy of this Agreement must be included with each copy of + the Program. + +3.3 Contributors may not remove or alter any copyright, patent, +trademark, attribution notices, disclaimers of warranty, or limitations +of liability ("notices") contained within the Program from any copy of +the Program which they Distribute, provided that Contributors may add +their own appropriate notices. + +4. COMMERCIAL DISTRIBUTION + +Commercial distributors of software may accept certain responsibilities +with respect to end users, business partners and the like. While this +license is intended to facilitate the commercial use of the Program, +the Contributor who includes the Program in a commercial product +offering should do so in a manner which does not create potential +liability for other Contributors. Therefore, if a Contributor includes +the Program in a commercial product offering, such Contributor +("Commercial Contributor") hereby agrees to defend and indemnify every +other Contributor ("Indemnified Contributor") against any losses, +damages and costs (collectively "Losses") arising from claims, lawsuits +and other legal actions brought by a third party against the Indemnified +Contributor to the extent caused by the acts or omissions of such +Commercial Contributor in connection with its distribution of the Program +in a commercial product offering. The obligations in this section do not +apply to any claims or Losses relating to any actual or alleged +intellectual property infringement. In order to qualify, an Indemnified +Contributor must: a) promptly notify the Commercial Contributor in +writing of such claim, and b) allow the Commercial Contributor to control, +and cooperate with the Commercial Contributor in, the defense and any +related settlement negotiations. The Indemnified Contributor may +participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial +product offering, Product X. That Contributor is then a Commercial +Contributor. If that Commercial Contributor then makes performance +claims, or offers warranties related to Product X, those performance +claims and warranties are such Commercial Contributor's responsibility +alone. Under this section, the Commercial Contributor would have to +defend claims against the other Contributors related to those performance +claims and warranties, and if a court requires any other Contributor to +pay any damages as a result, the Commercial Contributor must pay +those damages. + +5. NO WARRANTY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS" +BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR +IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF +TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR +PURPOSE. Each Recipient is solely responsible for determining the +appropriateness of using and distributing the Program and assumes all +risks associated with its exercise of rights under this Agreement, +including but not limited to the risks and costs of program errors, +compliance with applicable laws, damage to or loss of data, programs +or equipment, and unavailability or interruption of operations. + +6. DISCLAIMER OF LIABILITY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS +SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST +PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE +EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + +7. GENERAL + +If any provision of this Agreement is invalid or unenforceable under +applicable law, it shall not affect the validity or enforceability of +the remainder of the terms of this Agreement, and without further +action by the parties hereto, such provision shall be reformed to the +minimum extent necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity +(including a cross-claim or counterclaim in a lawsuit) alleging that the +Program itself (excluding combinations of the Program with other software +or hardware) infringes such Recipient's patent(s), then such Recipient's +rights granted under Section 2(b) shall terminate as of the date such +litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it +fails to comply with any of the material terms or conditions of this +Agreement and does not cure such failure in a reasonable period of +time after becoming aware of such noncompliance. If all Recipient's +rights under this Agreement terminate, Recipient agrees to cease use +and distribution of the Program as soon as reasonably practicable. +However, Recipient's obligations under this Agreement and any licenses +granted by Recipient relating to the Program shall continue and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, +but in order to avoid inconsistency the Agreement is copyrighted and +may only be modified in the following manner. The Agreement Steward +reserves the right to publish new versions (including revisions) of +this Agreement from time to time. No one other than the Agreement +Steward has the right to modify this Agreement. The Eclipse Foundation +is the initial Agreement Steward. The Eclipse Foundation may assign the +responsibility to serve as the Agreement Steward to a suitable separate +entity. Each new version of the Agreement will be given a distinguishing +version number. The Program (including Contributions) may always be +Distributed subject to the version of the Agreement under which it was +received. In addition, after a new version of the Agreement is published, +Contributor may elect to Distribute the Program (including its +Contributions) under the new version. + +Except as expressly stated in Sections 2(a) and 2(b) above, Recipient +receives no rights or licenses to the intellectual property of any +Contributor under this Agreement, whether expressly, by implication, +estoppel or otherwise. All rights in the Program not expressly granted +under this Agreement are reserved. Nothing in this Agreement is intended +to be enforceable by any entity that is not a Contributor or Recipient. +No third-party beneficiary rights are created under this Agreement. + +Exhibit A - Form of Secondary Licenses Notice + +"This Source Code may also be made available under the following +Secondary Licenses when the conditions for such availability set forth +in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), +version(s), and exceptions or additional permissions here}." + + Simply including a copy of this Agreement, including this Exhibit A + is not sufficient to license the Source Code under Secondary Licenses. + + If it is not possible or desirable to put the notice in a particular + file, then You may include the notice in a location (such as a LICENSE + file in a relevant directory) where a recipient would be likely to + look for such a notice. + + You may add additional accurate notices of copyright ownership. \ No newline at end of file diff --git a/linkis-dist/src/main/assembly/distribution.xml b/linkis-dist/src/main/assembly/distribution.xml index 57fd07d4d6..a506c99b18 100644 --- a/linkis-dist/src/main/assembly/distribution.xml +++ b/linkis-dist/src/main/assembly/distribution.xml @@ -261,6 +261,21 @@ + + + + + + ../linkis-extensions/linkis-et-monitor/target/out/lib + + + linkis-package/lib/linkis-extensions/linkis-et-monitor + + + **/* + + + diff --git a/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkEnvConfiguration.scala b/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkEnvConfiguration.scala index 6b521dceed..bcd721c162 100644 --- a/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkEnvConfiguration.scala +++ b/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkEnvConfiguration.scala @@ -38,7 +38,7 @@ object FlinkEnvConfiguration { val FLINK_DIST_JAR_PATH = CommonVars( "flink.dist.jar.path", - FLINK_HOME.getValue + s"/lib/flink-dist_2.11-${FLINK_VERSION.getValue}.jar" + FLINK_HOME.getValue + s"/lib/flink-dist-${FLINK_VERSION.getValue}.jar" ) val FLINK_PROVIDED_LIB_PATH = CommonVars("flink.lib.path", "") @@ -58,7 +58,9 @@ object FlinkEnvConfiguration { "The local lib path of each user in Flink EngineConn." ) - val FLINK_SHIP_DIRECTORIES = CommonVars("flink.yarn.ship-directories", "") + val FLINK_SHIP_DIRECTORIES = + CommonVars("flink.yarn.ship-directories", FLINK_HOME.getValue + "/lib") + val FLINK_SHIP_REMOTE_DIRECTORIES = CommonVars("flink.yarn.remote.ship-directories", "") val FLINK_CHECK_POINT_ENABLE = CommonVars("flink.app.checkpoint.enable", false) diff --git a/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkEngineConnFactory.scala b/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkEngineConnFactory.scala index 1c6db3bba9..1b9759d847 100644 --- a/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkEngineConnFactory.scala +++ b/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkEngineConnFactory.scala @@ -108,7 +108,13 @@ class FlinkEngineConnFactory extends MultiExecutorEngineConnFactory with Logging val flinkHome = FLINK_HOME.getValue(options) val flinkConfDir = FLINK_CONF_DIR.getValue(options) val flinkProvidedLibPath = FLINK_PROVIDED_LIB_PATH.getValue(options) - val flinkDistJarPath = FLINK_DIST_JAR_PATH.getValue(options) + val flinkVersion = FlinkEnvConfiguration.FLINK_VERSION.getValue(options) + var flinkDistJarPath = FLINK_DIST_JAR_PATH.getValue(options) + if ( + StringUtils.isNotBlank(flinkVersion) && flinkVersion.equalsIgnoreCase(FLINK_1_12_2_VERSION) + ) { + flinkDistJarPath = flinkDistJarPath.replaceFirst("flink-dist", "flink-dist_2.11") + } // Local lib path val providedLibDirsArray = FLINK_LIB_LOCAL_PATH.getValue(options).split(",") // Ship directories @@ -126,7 +132,6 @@ class FlinkEngineConnFactory extends MultiExecutorEngineConnFactory with Logging ) } otherParams.put(GovernanceCommonConf.EC_APP_MANAGE_MODE.key, flinkClientType.toLowerCase()) - val flinkVersion = FlinkEnvConfiguration.FLINK_VERSION.getValue(options) FlinkVersionThreadLocal.setFlinkVersion(flinkVersion) val context = new EnvironmentContext( defaultEnv, diff --git a/linkis-engineconn-plugins/hbase/hbase-core/pom.xml b/linkis-engineconn-plugins/hbase/hbase-core/pom.xml index cabc4cde1e..28e79f0a79 100644 --- a/linkis-engineconn-plugins/hbase/hbase-core/pom.xml +++ b/linkis-engineconn-plugins/hbase/hbase-core/pom.xml @@ -31,6 +31,198 @@ org.apache.linkis hbase-shims-${hbase.version} ${project.version} + + + com.google.inject + guice + + + + commons-codec + commons-codec + + + commons-io + commons-io + + + org.apache.commons + commons-lang3 + + + org.apache.commons + commons-math3 + + + commons-lang + commons-lang + + + commons-cli + commons-cli + + + commons-collections + commons-collections + + + commons-httpclient + commons-httpclient + + + org.apache.commons + commons-math + + + commons-logging + commons-logging + + + commons-lang + commons-lang + + + commons-beanutils + commons-beanutils-core + + + commons-configuration + commons-configuration + + + commons-fileupload + commons-fileupload + + + org.apache.commons + commons-compress + + + org.apache.commons + commons-crypto + + + org.apache.hadoop + hadoop-common + + + org.apache.hadoop + hadoop-auth + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + javax.ws.rs + jsr311-api + + + org.mortbay.jetty + jetty + + + org.mortbay.jetty + jetty-util + + + javax.servlet + javax.servlet-api + + + org.glassfish.web + javax.servlet.jsp + + + javax.servlet.jsp + javax.servlet.jsp-api + + + + org.jruby.jcodings + jcodings + + + org.jruby.joni + joni + + + org.mortbay.jetty + jsp-api-2.1 + + + org.mortbay.jetty + servlet-api-2.5 + + + xml-apis + xml-apis + + + xml-apis + xml-apis-ext + + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-server + + + org.glassfish.jersey.containers + jersey-container-servlet-core + + + + org.mortbay.jetty + jsp-2.1 + + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.jaxrs + jackson-jaxrs-json-provider + + + com.fasterxml.jackson.module + jackson-module-jaxb-annotations + + + com.fasterxml.jackson.core + jackson-annotations + + + org.codehaus.jackson + jackson-mapper-asl + + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-jaxrs + + + + log4j + log4j + + + org.slf4j + slf4j-api + + diff --git a/linkis-engineconn-plugins/hbase/hbase-core/src/main/scala/org/apache/linkis/manager/engineplugin/hbase/exception/ExecutorInitException.java b/linkis-engineconn-plugins/hbase/hbase-core/src/main/java/org/apache/linkis/manager/engineplugin/hbase/exception/ExecutorInitException.java similarity index 100% rename from linkis-engineconn-plugins/hbase/hbase-core/src/main/scala/org/apache/linkis/manager/engineplugin/hbase/exception/ExecutorInitException.java rename to linkis-engineconn-plugins/hbase/hbase-core/src/main/java/org/apache/linkis/manager/engineplugin/hbase/exception/ExecutorInitException.java diff --git a/linkis-engineconn-plugins/hbase/hbase-shims-1.2.0/pom.xml b/linkis-engineconn-plugins/hbase/hbase-shims-1.2.0/pom.xml index e05665786f..46bcdd279f 100644 --- a/linkis-engineconn-plugins/hbase/hbase-shims-1.2.0/pom.xml +++ b/linkis-engineconn-plugins/hbase/hbase-shims-1.2.0/pom.xml @@ -43,22 +43,6 @@ hbase-server ${hbase.version} - - commons-codec - commons-codec - - - commons-io - commons-io - - - org.apache.commons - commons-lang3 - - - org.apache.commons - commons-math3 - org.apache.hadoop hadoop-common @@ -72,29 +56,13 @@ hadoop-mapreduce-client-core - javax.ws.rs - jsr311-api + log4j + log4j org.mortbay.jetty jetty - - org.mortbay.jetty - jetty-util - - - javax.servlet - javax.servlet-api - - - org.glassfish.web - javax.servlet.jsp - - - javax.servlet.jsp - javax.servlet.jsp-api - org.jruby.jcodings jcodings diff --git a/linkis-engineconn-plugins/hbase/hbase-shims-1.4.3/pom.xml b/linkis-engineconn-plugins/hbase/hbase-shims-1.4.3/pom.xml index a687caeecd..218b77d813 100644 --- a/linkis-engineconn-plugins/hbase/hbase-shims-1.4.3/pom.xml +++ b/linkis-engineconn-plugins/hbase/hbase-shims-1.4.3/pom.xml @@ -43,22 +43,6 @@ hbase-server ${hbase.version} - - commons-codec - commons-codec - - - commons-io - commons-io - - - org.apache.commons - commons-lang3 - - - org.apache.commons - commons-math3 - org.apache.hadoop hadoop-common @@ -71,30 +55,6 @@ org.apache.hadoop hadoop-mapreduce-client-core - - javax.ws.rs - jsr311-api - - - org.mortbay.jetty - jetty - - - org.mortbay.jetty - jetty-util - - - javax.servlet - javax.servlet-api - - - org.glassfish.web - javax.servlet.jsp - - - javax.servlet.jsp - javax.servlet.jsp-api - org.jruby.jcodings jcodings @@ -103,6 +63,10 @@ org.jruby.joni joni + + org.mortbay.jetty + jetty + diff --git a/linkis-engineconn-plugins/hbase/hbase-shims-2.2.6/pom.xml b/linkis-engineconn-plugins/hbase/hbase-shims-2.2.6/pom.xml index 8e14b438d6..c2a3cf6a7b 100644 --- a/linkis-engineconn-plugins/hbase/hbase-shims-2.2.6/pom.xml +++ b/linkis-engineconn-plugins/hbase/hbase-shims-2.2.6/pom.xml @@ -43,22 +43,6 @@ hbase-server ${hbase.version} - - commons-codec - commons-codec - - - commons-io - commons-io - - - org.apache.commons - commons-lang3 - - - org.apache.commons - commons-math3 - org.apache.hadoop hadoop-common @@ -72,40 +56,39 @@ hadoop-mapreduce-client-core - javax.ws.rs - jsr311-api + org.apache.zookeeper + zookeeper - org.mortbay.jetty - jetty + org.jruby.jcodings + jcodings - org.mortbay.jetty - jetty-util + org.jruby.joni + joni - javax.servlet - javax.servlet-api + org.glassfish + javax.el org.glassfish.web javax.servlet.jsp - - javax.servlet.jsp - javax.servlet.jsp-api - - - org.jruby.jcodings - jcodings - - - org.jruby.joni - joni - + + org.apache.zookeeper + zookeeper + + + + org.apache.zookeeper + zookeeper-jute + ${zookeeper.version} + + org.apache.hbase hbase-testing-util diff --git a/linkis-engineconn-plugins/hbase/hbase-shims-2.5.3/pom.xml b/linkis-engineconn-plugins/hbase/hbase-shims-2.5.3/pom.xml index e1509b16b5..48c8356084 100644 --- a/linkis-engineconn-plugins/hbase/hbase-shims-2.5.3/pom.xml +++ b/linkis-engineconn-plugins/hbase/hbase-shims-2.5.3/pom.xml @@ -42,22 +42,6 @@ org.apache.hbase hbase-server - - commons-codec - commons-codec - - - commons-io - commons-io - - - org.apache.commons - commons-lang3 - - - org.apache.commons - commons-math3 - org.apache.hadoop hadoop-common @@ -71,33 +55,17 @@ hadoop-mapreduce-client-core - javax.ws.rs - jsr311-api - - - org.mortbay.jetty - jetty - - - org.mortbay.jetty - jetty-util + org.apache.zookeeper + zookeeper - javax.servlet - javax.servlet-api + org.glassfish + javax.el org.glassfish.web javax.servlet.jsp - - javax.servlet.jsp - javax.servlet.jsp-api - - - org.apache.zookeeper - zookeeper - org.jruby.jcodings jcodings diff --git a/linkis-engineconn-plugins/impala/src/main/scala/org/apache/linkis/engineplugin/impala/executor/ImpalaEngineConnExecutor.scala b/linkis-engineconn-plugins/impala/src/main/scala/org/apache/linkis/engineplugin/impala/executor/ImpalaEngineConnExecutor.scala index 23cd1a0e6f..97613f3f94 100644 --- a/linkis-engineconn-plugins/impala/src/main/scala/org/apache/linkis/engineplugin/impala/executor/ImpalaEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/impala/src/main/scala/org/apache/linkis/engineplugin/impala/executor/ImpalaEngineConnExecutor.scala @@ -17,43 +17,72 @@ package org.apache.linkis.engineplugin.impala.executor -import org.apache.commons.collections.MapUtils -import org.apache.commons.io.IOUtils -import org.apache.commons.lang3.StringUtils -import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{OverloadUtils, Utils} -import org.apache.linkis.engineconn.common.password.{CommandPasswordCallback, StaticPasswordCallback} -import org.apache.linkis.engineconn.computation.executor.execute.{ConcurrentComputationExecutor, EngineExecutionContext} +import org.apache.linkis.engineconn.common.password.{ + CommandPasswordCallback, + StaticPasswordCallback +} +import org.apache.linkis.engineconn.computation.executor.execute.{ + ConcurrentComputationExecutor, + EngineExecutionContext +} import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineplugin.impala.client.{ + ExecutionListener, + ImpalaClient, + ImpalaResultSet +} import org.apache.linkis.engineplugin.impala.client.ImpalaResultSet.Row -import org.apache.linkis.engineplugin.impala.client.exception.{ImpalaEngineException, ImpalaErrorCodeSummary} +import org.apache.linkis.engineplugin.impala.client.exception.{ + ImpalaEngineException, + ImpalaErrorCodeSummary +} import org.apache.linkis.engineplugin.impala.client.protocol.{ExecProgress, ExecStatus} -import org.apache.linkis.engineplugin.impala.client.thrift.{ImpalaThriftClient, ImpalaThriftSessionFactory} -import org.apache.linkis.engineplugin.impala.client.{ExecutionListener, ImpalaClient, ImpalaResultSet} +import org.apache.linkis.engineplugin.impala.client.thrift.{ + ImpalaThriftClient, + ImpalaThriftSessionFactory +} import org.apache.linkis.engineplugin.impala.conf.ImpalaConfiguration._ import org.apache.linkis.engineplugin.impala.conf.ImpalaEngineConfig import org.apache.linkis.governance.common.paser.SQLCodeParser -import org.apache.linkis.manager.common.entity.resource.{CommonNodeResource, LoadResource, NodeResource} +import org.apache.linkis.manager.common.entity.resource.{ + CommonNodeResource, + LoadResource, + NodeResource +} import org.apache.linkis.manager.engineplugin.common.util.NodeResourceUtils import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} import org.apache.linkis.protocol.engine.JobProgressInfo import org.apache.linkis.rpc.Sender -import org.apache.linkis.scheduler.executer.{CompletedExecuteResponse, ErrorExecuteResponse, ExecuteResponse, SuccessExecuteResponse} +import org.apache.linkis.scheduler.executer.{ + CompletedExecuteResponse, + ErrorExecuteResponse, + ExecuteResponse, + SuccessExecuteResponse +} import org.apache.linkis.storage.domain.Column import org.apache.linkis.storage.resultset.ResultSetFactory import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} + +import org.apache.commons.collections.MapUtils +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.exception.ExceptionUtils + import org.springframework.util.CollectionUtils +import javax.net.SocketFactory +import javax.net.ssl._ +import javax.security.auth.callback.{Callback, CallbackHandler, NameCallback, PasswordCallback} + import java.io.FileInputStream import java.security.KeyStore import java.util import java.util.concurrent.ConcurrentHashMap import java.util.function.Consumer -import javax.net.SocketFactory -import javax.net.ssl._ -import javax.security.auth.callback.{Callback, CallbackHandler, NameCallback, PasswordCallback} + import scala.collection.JavaConverters._ class ImpalaEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) diff --git a/linkis-engineconn-plugins/io_file/src/test/java/executor/IoEngineConnExecutorTest.java b/linkis-engineconn-plugins/io_file/src/test/java/executor/IoEngineConnExecutorTest.java new file mode 100644 index 0000000000..252ec95ab5 --- /dev/null +++ b/linkis-engineconn-plugins/io_file/src/test/java/executor/IoEngineConnExecutorTest.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package executor; + +import org.apache.linkis.common.io.FsPath; +import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext; +import org.apache.linkis.manager.engineplugin.io.executor.IoEngineConnExecutor; +import org.apache.linkis.scheduler.executer.AliasOutputExecuteResponse; +import org.apache.linkis.scheduler.executer.ExecuteResponse; +import org.apache.linkis.storage.domain.MethodEntity; +import org.apache.linkis.storage.domain.MethodEntitySerializer; + +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +class IoEngineConnExecutorTest { + + @Test + public void testExecuteLine() { + // test init + IoEngineConnExecutor ioEngineConnExecutor = new IoEngineConnExecutor(1, Integer.MAX_VALUE); + EngineExecutionContext engineExecutionContext = + new EngineExecutionContext(ioEngineConnExecutor, "hadoop"); + engineExecutionContext.setJobId("jobId-1"); + Object[] objects = new Object[10]; + MethodEntity methodEntity = + new MethodEntity(0L, "file", "hadoop", "hadoop", "localhost", "init", objects); + AliasOutputExecuteResponse executeResponse = + (AliasOutputExecuteResponse) + ioEngineConnExecutor.executeLine( + engineExecutionContext, MethodEntitySerializer.serializer(methodEntity)); + Assertions.assertThat(executeResponse).isNotNull(); + Assertions.assertThat(executeResponse.alias()).isEqualTo("0"); + + // test write + String filePath = this.getClass().getResource("/testIoResult.dolphin").getFile().toString(); + FsPath fsPath = new FsPath(filePath); + String fsPathStr = MethodEntitySerializer.serializerJavaObject(fsPath); + objects = new Object[3]; + objects[0] = fsPathStr; + objects[1] = true; + objects[2] = "dolphin000000000300000000040,110000000016aGVsbG8gd29ybGQ="; + methodEntity = new MethodEntity(0L, "file", "hadoop", "hadoop", "localhost", "write", objects); + ExecuteResponse writeResponse = + ioEngineConnExecutor.executeLine( + engineExecutionContext, MethodEntitySerializer.serializer(methodEntity)); + System.out.println(writeResponse); + Assertions.assertThat(executeResponse).isNotNull(); + + // test read + objects = new Object[1]; + objects[0] = fsPathStr; + methodEntity = new MethodEntity(0L, "file", "hadoop", "hadoop", "localhost", "read", objects); + AliasOutputExecuteResponse readResponse = + (AliasOutputExecuteResponse) + ioEngineConnExecutor.executeLine( + engineExecutionContext, MethodEntitySerializer.serializer(methodEntity)); + Assertions.assertThat(readResponse).isNotNull(); + Assertions.assertThat(readResponse.output()) + .isEqualTo("dolphin000000000300000000040,110000000016aGVsbG8gd29ybGQ="); + } +} diff --git a/linkis-engineconn-plugins/io_file/src/test/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutorTest.java b/linkis-engineconn-plugins/io_file/src/test/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutorTest.java deleted file mode 100644 index 5dcc883114..0000000000 --- a/linkis-engineconn-plugins/io_file/src/test/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutorTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.manager.engineplugin.io.executor; - -import org.apache.linkis.common.io.FsPath; -import org.apache.linkis.engineconn.computation.executor.execute.ComputationExecutor; -import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext; -import org.apache.linkis.manager.engineplugin.io.conf.IOEngineConnConfiguration; -import org.apache.linkis.manager.engineplugin.io.factory.IoEngineConnFactory; -import org.apache.linkis.scheduler.executer.AliasOutputExecuteResponse; -import org.apache.linkis.scheduler.executer.ExecuteResponse; -import org.apache.linkis.storage.domain.MethodEntity; -import org.apache.linkis.storage.domain.MethodEntitySerializer; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.Test; - - - -class IoEngineConnExecutorTest { - - @Test - public void testExecuteLine() { - //test init - IoEngineConnExecutor ioEngineConnExecutor = new IoEngineConnExecutor(1, Integer.MAX_VALUE); - EngineExecutionContext engineExecutionContext = new EngineExecutionContext(ioEngineConnExecutor, "hadoop"); - engineExecutionContext.setJobId("jobId-1"); - Object[] objects = new Object[10]; - MethodEntity methodEntity = new MethodEntity(0L, "file", "hadoop", "hadoop", - "localhost", "init", objects); - AliasOutputExecuteResponse executeResponse = (AliasOutputExecuteResponse)ioEngineConnExecutor.executeLine(engineExecutionContext, MethodEntitySerializer.serializer(methodEntity)); - Assertions.assertThat(executeResponse).isNotNull(); - Assertions.assertThat(executeResponse.alias()).isEqualTo("0"); - - //test write - String filePath = this.getClass().getResource("/testIoResult.dolphin").getFile().toString(); - FsPath fsPath = new FsPath(filePath); - String fsPathStr = MethodEntitySerializer.serializerJavaObject(fsPath); - objects = new Object[3]; - objects[0] = fsPathStr; - objects[1] = true; - objects[2] = "dolphin000000000300000000040,110000000016aGVsbG8gd29ybGQ="; - methodEntity = new MethodEntity(0L, "file", "hadoop", "hadoop", - "localhost", "write", objects); - ExecuteResponse writeResponse = ioEngineConnExecutor.executeLine(engineExecutionContext, MethodEntitySerializer.serializer(methodEntity)); - System.out.println(writeResponse); - Assertions.assertThat(executeResponse).isNotNull(); - - //test read - objects = new Object[1]; - objects[0] = fsPathStr; - methodEntity = new MethodEntity(0L, "file", "hadoop", "hadoop", - "localhost", "read", objects); - AliasOutputExecuteResponse readResponse = (AliasOutputExecuteResponse)ioEngineConnExecutor.executeLine(engineExecutionContext, MethodEntitySerializer.serializer(methodEntity)); - Assertions.assertThat(readResponse).isNotNull(); - Assertions.assertThat(readResponse.output()).isEqualTo("dolphin000000000300000000040,110000000016aGVsbG8gd29ybGQ="); - } - -} \ No newline at end of file diff --git a/linkis-engineconn-plugins/jdbc/src/main/java/org/apache/linkis/manager/engineplugin/jdbc/ConnectionManager.java b/linkis-engineconn-plugins/jdbc/src/main/java/org/apache/linkis/manager/engineplugin/jdbc/ConnectionManager.java index b9cd479457..a49613f8d1 100644 --- a/linkis-engineconn-plugins/jdbc/src/main/java/org/apache/linkis/manager/engineplugin/jdbc/ConnectionManager.java +++ b/linkis-engineconn-plugins/jdbc/src/main/java/org/apache/linkis/manager/engineplugin/jdbc/ConnectionManager.java @@ -23,16 +23,19 @@ import org.apache.linkis.manager.engineplugin.jdbc.exception.JDBCParamsIllegalException; import org.apache.linkis.manager.engineplugin.jdbc.utils.JdbcParamUtils; -import org.apache.commons.dbcp.BasicDataSource; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.security.UserGroupInformation; import javax.sql.DataSource; +import java.io.Closeable; import java.security.PrivilegedExceptionAction; -import java.sql.*; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; import java.text.MessageFormat; -import java.util.*; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -42,7 +45,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.linkis.manager.engineplugin.jdbc.JdbcAuthType.*; +import static org.apache.linkis.manager.engineplugin.jdbc.JdbcAuthType.USERNAME; +import static org.apache.linkis.manager.engineplugin.jdbc.JdbcAuthType.of; import static org.apache.linkis.manager.engineplugin.jdbc.errorcode.JDBCErrorCodeSummary.*; public class ConnectionManager { @@ -103,8 +107,10 @@ public void close() { } for (DataSource dataSource : this.dataSourceFactories.values()) { try { - ((BasicDataSource) dataSource).close(); - } catch (SQLException e) { + if (dataSource instanceof Closeable) { + ((Closeable) dataSource).close(); + } + } catch (Exception e) { LOG.error("Error while closing datasource...", e); } } diff --git a/linkis-engineconn-plugins/jdbc/src/main/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/JDBCEngineConnExecutor.scala b/linkis-engineconn-plugins/jdbc/src/main/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/JDBCEngineConnExecutor.scala index 336d1197f7..8a2d64fa76 100644 --- a/linkis-engineconn-plugins/jdbc/src/main/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/JDBCEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/jdbc/src/main/scala/org/apache/linkis/manager/engineplugin/jdbc/executor/JDBCEngineConnExecutor.scala @@ -19,6 +19,7 @@ package org.apache.linkis.manager.engineplugin.jdbc.executor import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.utils.{OverloadUtils, Utils} +import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.{ ConcurrentComputationExecutor, EngineExecutionContext @@ -78,6 +79,8 @@ class JDBCEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) private val progressMonitors: util.Map[String, ProgressMonitor[_]] = new ConcurrentHashMap[String, ProgressMonitor[_]]() + private val connectionCache: util.Map[String, Connection] = new util.HashMap[String, Connection]() + override def init(): Unit = { logger.info("jdbc executor start init.") setCodeParser(new SQLCodeParser) @@ -87,49 +90,59 @@ class JDBCEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) } } - override def executeLine( - engineExecutorContext: EngineExecutionContext, - code: String - ): ExecuteResponse = { - val realCode = code.trim() - val taskId = engineExecutorContext.getJobId.get + override def execute(engineConnTask: EngineConnTask): ExecuteResponse = { + val executeResponse = super.execute(engineConnTask) + if (StringUtils.isNotBlank(engineConnTask.getTaskId)) { + val connection = connectionCache.remove(engineConnTask.getTaskId) + logger.info(s"remove task ${engineConnTask.getTaskId} connection") + Utils.tryAndWarn(connection.close()) + } + executeResponse + } - var properties: util.Map[String, String] = Collections.emptyMap() + private def getConnection(engineExecutorContext: EngineExecutionContext): Connection = { - Utils.tryCatch({ - properties = getJDBCRuntimeParams(engineExecutorContext) - }) { e: Throwable => - logger.error(s"try to build JDBC runtime params error! $e") - return ErrorExecuteResponse(e.getMessage, e) + val taskId = engineExecutorContext.getJobId.orNull + if (StringUtils.isNotBlank(taskId) && connectionCache.containsKey(taskId)) { + logger.info( + s"Task ${taskId} paragraph ${engineExecutorContext.getCurrentParagraph} from cache get connection" + ) + return connectionCache.get(taskId) } - + val properties: util.Map[String, String] = getJDBCRuntimeParams(engineExecutorContext) logger.info(s"The jdbc properties is: $properties") val dataSourceName = properties.get(JDBCEngineConnConstant.JDBC_ENGINE_RUN_TIME_DS) val dataSourceMaxVersionId = properties.get(JDBCEngineConnConstant.JDBC_ENGINE_RUN_TIME_DS_MAX_VERSION_ID) logger.info( - s"The data source name is [$dataSourceName], and the jdbc client begins to run jdbc code:\n ${realCode.trim}" + s"The data source name is [$dataSourceName], and the jdbc client begins to run task ${taskId}" ) - var connection: Connection = null - var statement: Statement = null - var resultSet: ResultSet = null logger.info(s"The data source properties is $properties") - Utils.tryCatch({ - /* url + user as the cache key */ - val jdbcUrl: String = properties.get(JDBCEngineConnConstant.JDBC_URL) - val execUser: String = properties.get(JDBCEngineConnConstant.JDBC_SCRIPTS_EXEC_USER) - val proxyUser: String = properties.get(JDBCEngineConnConstant.JDBC_PROXY_USER_PROPERTY) - var dataSourceIdentifier = s"$jdbcUrl-$execUser-$proxyUser" - /* If datasource is used, use datasource name as the cache key */ - if (StringUtils.isNotBlank(dataSourceName)) { - dataSourceIdentifier = s"$dataSourceName-$dataSourceMaxVersionId" - } - connection = connectionManager.getConnection(dataSourceIdentifier, properties) - logger.info("The jdbc connection has created successfully!") - }) { e: Throwable => - logger.error(s"created data source connection error! $e") - return ErrorExecuteResponse("created data source connection error!", e) + /* url + user as the cache key */ + val jdbcUrl: String = properties.get(JDBCEngineConnConstant.JDBC_URL) + val execUser: String = properties.get(JDBCEngineConnConstant.JDBC_SCRIPTS_EXEC_USER) + val proxyUser: String = properties.get(JDBCEngineConnConstant.JDBC_PROXY_USER_PROPERTY) + var dataSourceIdentifier = s"$jdbcUrl-$execUser-$proxyUser" + /* If datasource is used, use datasource name as the cache key */ + if (StringUtils.isNotBlank(dataSourceName)) { + dataSourceIdentifier = s"$dataSourceName-$dataSourceMaxVersionId" + } + val connection = connectionManager.getConnection(dataSourceIdentifier, properties) + if (StringUtils.isNotBlank(taskId)) { + connectionCache.put(taskId, connection) } + connection + } + + override def executeLine( + engineExecutorContext: EngineExecutionContext, + code: String + ): ExecuteResponse = { + + val taskId = engineExecutorContext.getJobId.get + val connection: Connection = getConnection(engineExecutorContext) + var statement: Statement = null + var resultSet: ResultSet = null try { statement = connection.createStatement() @@ -167,14 +180,10 @@ class JDBCEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) } } finally { if (resultSet != null) { - Utils.tryCatch({ resultSet.close() }) { case e: SQLException => - logger.warn(e.getMessage) - } + Utils.tryAndWarn(resultSet.close()) } if (statement != null) { - Utils.tryCatch({ statement.close() }) { case e: SQLException => - logger.warn(e.getMessage) - } + Utils.tryAndWarn(statement.close()) } } } catch { @@ -182,14 +191,6 @@ class JDBCEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) logger.error(s"Cannot run $code", e) return ErrorExecuteResponse(e.getMessage, e) } finally { - if (connection != null) { - try { - if (!connection.getAutoCommit) connection.commit() - connection.close() - } catch { - case e: SQLException => logger.warn("close connection error.", e) - } - } connectionManager.removeStatement(taskId) } SuccessExecuteResponse() diff --git a/linkis-engineconn-plugins/pom.xml b/linkis-engineconn-plugins/pom.xml index 5df6526748..cbee4fa0fe 100644 --- a/linkis-engineconn-plugins/pom.xml +++ b/linkis-engineconn-plugins/pom.xml @@ -42,6 +42,7 @@ elasticsearch seatunnel hbase + nebula repl diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/deployment/KubernetesApplicationClusterDescriptorAdapter.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/deployment/KubernetesApplicationClusterDescriptorAdapter.java index ce709b2e7a..73892117ad 100644 --- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/deployment/KubernetesApplicationClusterDescriptorAdapter.java +++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/deployment/KubernetesApplicationClusterDescriptorAdapter.java @@ -40,7 +40,7 @@ public class KubernetesApplicationClusterDescriptorAdapter extends ClusterDescriptorAdapter { private static final Logger logger = - LoggerFactory.getLogger(KubernetesOperatorClusterDescriptorAdapter.class); + LoggerFactory.getLogger(KubernetesApplicationClusterDescriptorAdapter.class); protected SparkConfig sparkConfig; protected KubernetesClient client; @@ -66,7 +66,7 @@ public void deployCluster(String mainClass, String args, Map con .setJavaHome(sparkConfig.getJavaHome()) .setSparkHome(sparkConfig.getSparkHome()) .setMaster(sparkConfig.getK8sMasterUrl()) - .setDeployMode(sparkConfig.getDeployMode()) + .setDeployMode("cluster") .setAppName(sparkConfig.getAppName()) .setVerbose(true); this.driverPodName = generateDriverPodName(sparkConfig.getAppName()); @@ -196,12 +196,16 @@ public SparkAppHandle.State getJobState() { @Override public void close() { logger.info("Start to close job {}.", getApplicationId()); + client.close(); + if (isDisposed()) { + logger.info("Job has finished, close action return."); + return; + } PodResource sparkDriverPodResource = client.pods().inNamespace(namespace).withName(driverPodName); if (null != sparkDriverPodResource.get()) { sparkDriverPodResource.delete(); } - client.close(); } @Override diff --git a/linkis-engineconn-plugins/spark/src/main/resources/python/mix_pyspark.py b/linkis-engineconn-plugins/spark/src/main/resources/python/mix_pyspark.py index adbf5ecad6..9426ed867a 100644 --- a/linkis-engineconn-plugins/spark/src/main/resources/python/mix_pyspark.py +++ b/linkis-engineconn-plugins/spark/src/main/resources/python/mix_pyspark.py @@ -118,6 +118,7 @@ def isImportAllPackageUnderSparkSql(self): java_import(gateway.jvm, "org.apache.spark.api.java.*") java_import(gateway.jvm, "org.apache.spark.api.python.*") java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*") +java_import(gateway.jvm, "org.apache.spark.sql.api.python.*") intp = gateway.entry_point diff --git a/linkis-extensions/linkis-et-monitor/pom.xml b/linkis-extensions/linkis-et-monitor/pom.xml new file mode 100644 index 0000000000..0e5f44c076 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/pom.xml @@ -0,0 +1,112 @@ + + + + 4.0.0 + + org.apache.linkis + linkis + ${revision} + ../../pom.xml + + + linkis-et-monitor + + + + org.apache.linkis + linkis-httpclient + ${project.version} + + + org.apache.linkis + linkis-mybatis + ${project.version} + + + + org.apache.linkis + linkis-storage + ${project.version} + provided + + + org.apache.linkis + linkis-rpc + ${project.version} + provided + + + + org.apache.linkis + linkis-gateway-httpclient-support + ${project.version} + + + + org.apache.linkis + linkis-common + ${project.version} + + + + org.springframework + spring-test + test + + + + junit + junit + test + + + org.apache.linkis + linkis-computation-client + ${project.version} + + + + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + false + + false + out + false + false + + src/main/assembly/distribution.xml + + + + + make-assembly + + single + + package + + + + + + + diff --git a/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml b/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml new file mode 100644 index 0000000000..e606ed79f2 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml @@ -0,0 +1,296 @@ + + + + + linkis-et-monitor + + dir + + false + linkis-et-monitor + + + + + + lib + true + true + false + false + true + + + antlr:antlr:jar + aopalliance:aopalliance:jar + asm:asm:jar + cglib:cglib:jar + com.amazonaws:aws-java-sdk-autoscaling:jar + com.amazonaws:aws-java-sdk-core:jar + com.amazonaws:aws-java-sdk-ec2:jar + com.amazonaws:aws-java-sdk-route53:jar + com.amazonaws:aws-java-sdk-sts:jar + com.amazonaws:jmespath-java:jar + com.fasterxml.jackson.core:jackson-annotations:jar + com.fasterxml.jackson.core:jackson-core:jar + com.fasterxml.jackson.core:jackson-databind:jar + com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar + com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar + com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:jar + com.fasterxml.jackson.module:jackson-module-jaxb-annotations:jar + com.fasterxml.jackson.module:jackson-module-parameter-names:jar + com.fasterxml.jackson.module:jackson-module-paranamer:jar + com.fasterxml.jackson.module:jackson-module-scala_2.11:jar + com.github.andrewoma.dexx:dexx-collections:jar + com.github.vlsi.compactmap:compactmap:jar + com.google.code.findbugs:annotations:jar + com.google.code.findbugs:jsr305:jar + com.google.code.gson:gson:jar + com.google.guava:guava:jar + com.google.inject:guice:jar + com.google.protobuf:protobuf-java:jar + com.netflix.archaius:archaius-core:jar + com.netflix.eureka:eureka-client:jar + com.netflix.eureka:eureka-core:jar + com.netflix.hystrix:hystrix-core:jar + com.netflix.netflix-commons:netflix-commons-util:jar + com.netflix.netflix-commons:netflix-eventbus:jar + com.netflix.netflix-commons:netflix-infix:jar + com.netflix.netflix-commons:netflix-statistics:jar + com.netflix.ribbon:ribbon:jar + com.netflix.ribbon:ribbon-core:jar + com.netflix.ribbon:ribbon-eureka:jar + com.netflix.ribbon:ribbon-httpclient:jar + com.netflix.ribbon:ribbon-loadbalancer:jar + com.netflix.ribbon:ribbon-transport:jar + com.netflix.servo:servo-core:jar + com.ning:async-http-client:jar + com.sun.jersey.contribs:jersey-apache-client4:jar + com.sun.jersey:jersey-client:jar + com.sun.jersey:jersey-core:jar + com.sun.jersey:jersey-json:jar + com.sun.jersey:jersey-server:jar + com.sun.jersey:jersey-servlet:jar + com.sun.xml.bind:jaxb-impl:jar + com.thoughtworks.paranamer:paranamer:jar + com.thoughtworks.xstream:xstream:jar + org.apache.linkis:linkis-common:jar + org.apache.linkis:linkis-module:jar + commons-beanutils:commons-beanutils:jar + commons-beanutils:commons-beanutils-core:jar + commons-cli:commons-cli:jar + commons-codec:commons-codec:jar + commons-collections:commons-collections:jar + commons-configuration:commons-configuration:jar + commons-daemon:commons-daemon:jar + commons-dbcp:commons-dbcp:jar + commons-digester:commons-digester:jar + commons-httpclient:commons-httpclient:jar + commons-io:commons-io:jar + commons-jxpath:commons-jxpath:jar + commons-lang:commons-lang:jar + commons-logging:commons-logging:jar + commons-net:commons-net:jar + commons-pool:commons-pool:jar + io.micrometer:micrometer-core:jar + io.netty:netty:jar + io.netty:netty-all:jar + io.netty:netty-buffer:jar + io.netty:netty-codec:jar + io.netty:netty-codec-http:jar + io.netty:netty-common:jar + io.netty:netty-handler:jar + io.netty:netty-transport:jar + io.netty:netty-transport-native-epoll:jar + io.reactivex:rxjava:jar + io.reactivex:rxnetty:jar + io.reactivex:rxnetty-contexts:jar + io.reactivex:rxnetty-servo:jar + javax.activation:activation:jar + javax.annotation:javax.annotation-api:jar + javax.inject:javax.inject:jar + javax.servlet:javax.servlet-api:jar + javax.servlet.jsp:jsp-api:jar + javax.validation:validation-api:jar + javax.websocket:javax.websocket-api:jar + javax.ws.rs:javax.ws.rs-api:jar + javax.xml.bind:jaxb-api:jar + javax.xml.stream:stax-api:jar + joda-time:joda-time:jar + log4j:log4j:jar + mysql:mysql-connector-java:jar + net.databinder.dispatch:dispatch-core_2.11:jar + net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar + org.antlr:antlr-runtime:jar + org.antlr:stringtemplate:jar + org.apache.commons:commons-compress:jar + org.apache.commons:commons-math:jar + org.apache.commons:commons-math3:jar + org.apache.curator:curator-client:jar + org.apache.curator:curator-framework:jar + org.apache.curator:curator-recipes:jar + org.apache.directory.api:api-asn1-api:jar + org.apache.directory.api:api-util:jar + org.apache.directory.server:apacheds-i18n:jar + org.apache.directory.server:apacheds-kerberos-codec:jar + org.apache.hadoop:hadoop-annotations:jar + org.apache.hadoop:hadoop-auth:jar + org.apache.hadoop:hadoop-common:jar + org.apache.hadoop:hadoop-hdfs:jar + org.apache.htrace:htrace-core:jar + org.apache.httpcomponents:httpclient:jar + org.apache.httpcomponents:httpcore:jar + org.apache.logging.log4j:log4j-api:jar + org.apache.logging.log4j:log4j-core:jar + org.apache.logging.log4j:log4j-jul:jar + org.apache.logging.log4j:log4j-slf4j-impl:jar + org.apache.zookeeper:zookeeper:jar + org.aspectj:aspectjweaver:jar + org.bouncycastle:bcpkix-jdk15on:jar + org.bouncycastle:bcprov-jdk15on:jar + org.codehaus.jackson:jackson-jaxrs:jar + org.codehaus.jackson:jackson-xc:jar + org.codehaus.jettison:jettison:jar + org.codehaus.woodstox:stax2-api:jar + org.codehaus.woodstox:woodstox-core-asl:jar + org.eclipse.jetty:jetty-annotations:jar + org.eclipse.jetty:jetty-client:jar + org.eclipse.jetty:jetty-continuation:jar + org.eclipse.jetty:jetty-http:jar + org.eclipse.jetty:jetty-io:jar + org.eclipse.jetty:jetty-jndi:jar + org.eclipse.jetty:jetty-plus:jar + org.eclipse.jetty:jetty-security:jar + org.eclipse.jetty:jetty-server:jar + org.eclipse.jetty:jetty-servlet:jar + org.eclipse.jetty:jetty-servlets:jar + org.eclipse.jetty:jetty-util:jar + org.eclipse.jetty:jetty-webapp:jar + org.eclipse.jetty:jetty-xml:jar + org.eclipse.jetty.websocket:javax-websocket-client-impl:jar + org.eclipse.jetty.websocket:javax-websocket-server-impl:jar + org.eclipse.jetty.websocket:websocket-api:jar + org.eclipse.jetty.websocket:websocket-client:jar + org.eclipse.jetty.websocket:websocket-common:jar + org.eclipse.jetty.websocket:websocket-server:jar + org.eclipse.jetty.websocket:websocket-servlet:jar + org.fusesource.leveldbjni:leveldbjni-all:jar + org.glassfish.hk2:class-model:jar + org.glassfish.hk2:config-types:jar + org.glassfish.hk2.external:aopalliance-repackaged:jar + org.glassfish.hk2.external:asm-all-repackaged:jar + org.glassfish.hk2.external:bean-validator:jar + org.glassfish.hk2.external:javax.inject:jar + org.glassfish.hk2:hk2:jar + org.glassfish.hk2:hk2-api:jar + org.glassfish.hk2:hk2-config:jar + org.glassfish.hk2:hk2-core:jar + org.glassfish.hk2:hk2-locator:jar + org.glassfish.hk2:hk2-runlevel:jar + org.glassfish.hk2:hk2-utils:jar + org.glassfish.hk2:osgi-resource-locator:jar + org.glassfish.hk2:spring-bridge:jar + org.glassfish.jersey.bundles:jaxrs-ri:jar + org.glassfish.jersey.bundles.repackaged:jersey-guava:jar + org.glassfish.jersey.containers:jersey-container-servlet:jar + org.glassfish.jersey.containers:jersey-container-servlet-core:jar + org.glassfish.jersey.core:jersey-client:jar + org.glassfish.jersey.core:jersey-common:jar + org.glassfish.jersey.core:jersey-server:jar + org.glassfish.jersey.ext:jersey-entity-filtering:jar + org.glassfish.jersey.ext:jersey-spring3:jar + org.glassfish.jersey.media:jersey-media-jaxb:jar + org.glassfish.jersey.media:jersey-media-json-jackson:jar + org.glassfish.jersey.media:jersey-media-multipart:jar + org.hdrhistogram:HdrHistogram:jar + org.javassist:javassist:jar + org.json4s:json4s-ast_2.11:jar + org.json4s:json4s-core_2.11:jar + org.json4s:json4s-jackson_2.11:jar + org.jsoup:jsoup:jar + org.jvnet.mimepull:mimepull:jar + org.jvnet:tiger-types:jar + org.latencyutils:LatencyUtils:jar + org.mortbay.jasper:apache-el:jar + org.mortbay.jetty:jetty:jar + org.mortbay.jetty:jetty-util:jar + org.ow2.asm:asm-analysis:jar + org.ow2.asm:asm-commons:jar + org.ow2.asm:asm-tree:jar + org.reflections:reflections:jar + org.scala-lang.modules:scala-parser-combinators_2.11:jar + org.scala-lang.modules:scala-xml_2.11:jar + org.scala-lang:scala-compiler:jar + org.scala-lang:scala-library:jar + org.scala-lang:scala-reflect:jar + org.scala-lang:scalap:jar + org.slf4j:jul-to-slf4j:jar + org.slf4j:slf4j-api:jar + org.springframework.boot:spring-boot:jar + org.springframework.boot:spring-boot-actuator:jar + org.springframework.boot:spring-boot-actuator-autoconfigure:jar + org.springframework.boot:spring-boot-autoconfigure:jar + org.springframework.boot:spring-boot-starter:jar + org.springframework.boot:spring-boot-starter-actuator:jar + org.springframework.boot:spring-boot-starter-aop:jar + org.springframework.boot:spring-boot-starter-jetty:jar + org.springframework.boot:spring-boot-starter-json:jar + org.springframework.boot:spring-boot-starter-log4j2:jar + org.springframework.boot:spring-boot-starter-web:jar + org.springframework.cloud:spring-cloud-commons:jar + org.springframework.cloud:spring-cloud-config-client:jar + org.springframework.cloud:spring-cloud-context:jar + org.springframework.cloud:spring-cloud-netflix-archaius:jar + org.springframework.cloud:spring-cloud-netflix-core:jar + org.springframework.cloud:spring-cloud-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-netflix-ribbon:jar + org.springframework.cloud:spring-cloud-starter:jar + org.springframework.cloud:spring-cloud-starter-config:jar + org.springframework.cloud:spring-cloud-starter-eureka:jar + org.springframework.cloud:spring-cloud-starter-netflix-archaius:jar + org.springframework.cloud:spring-cloud-starter-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-starter-netflix-ribbon:jar + org.springframework.security:spring-security-crypto:jar + org.springframework.security:spring-security-rsa:jar + org.springframework:spring-aop:jar + org.springframework:spring-beans:jar + org.springframework:spring-context:jar + org.springframework:spring-core:jar + org.springframework:spring-expression:jar + org.springframework:spring-jcl:jar + org.springframework:spring-web:jar + org.springframework:spring-webmvc:jar + org.tukaani:xz:jar + org.yaml:snakeyaml:jar + software.amazon.ion:ion-java:jar + xerces:xercesImpl:jar + xmlenc:xmlenc:jar + xmlpull:xmlpull:jar + xpp3:xpp3_min:jar + + + + + + + + + diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/LinksMonitorApplication.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/LinksMonitorApplication.java new file mode 100644 index 0000000000..8f503dcf65 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/LinksMonitorApplication.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor; + +import org.apache.linkis.LinkisBaseServerApp; + +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.scheduling.annotation.EnableScheduling; + +@EnableScheduling +@SpringBootApplication +public class LinksMonitorApplication { + + public static void main(String[] args) throws ReflectiveOperationException { + LinkisBaseServerApp.main(args); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/dao/VersionDao.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/dao/VersionDao.java new file mode 100644 index 0000000000..b4492c95ac --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/dao/VersionDao.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.dao; + +import org.apache.linkis.monitor.bml.cleaner.entity.CleanedResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.entity.ResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.vo.CleanResourceVo; + +import org.apache.ibatis.annotations.*; + +import java.util.Date; +import java.util.List; + +public interface VersionDao { + + @Select( + "select resource_id, count(resource_id) as version_count, max(version) as max_version from " + + "linkis_ps_bml_resources_version lpbrv where start_time < #{startTime} GROUP BY resource_id HAVING count(resource_id) > #{maxVersionNum} limit #{limitNum}") + List getAllNeedCleanResource( + @Param("maxVersionNum") Integer maxVersionNum, + @Param("startTime") Date startTime, + @Param("limitNum") int num); + + @Select( + "select * from linkis_ps_bml_resources_version where resource_id = #{resourceId} and version < #{minKeepVersion} and version <> 'v000001'") + List getCleanVersionsByResourceId( + @Param("resourceId") String resourceId, @Param("minKeepVersion") String minKeepVersion); + + @Insert({ + "insert into linkis_ps_bml_cleaned_resources_version(`resource_id`,`file_md5`,`version`,`size`,`start_byte`, `end_byte`,`resource`,`description`," + + "`start_time`,`end_time`,`client_ip`,`updator`,`enable_flag`,`old_resource`) values(#{resourceId},#{fileMd5},#{version},#{size},#{startByte},#{endByte}" + + ",#{resource},#{description},#{startTime},#{endTime},#{clientIp},#{updator},#{enableFlag},#{oldResource})" + }) + @Options(useGeneratedKeys = true, keyProperty = "id") + void insertCleanResourceVersion(CleanedResourceVersion cleanedResourceVersion); + + @Delete("delete from linkis_ps_bml_resources_version where id=#{id}") + void deleteResourceVersionById(@Param("id") long id); + + @Select( + "select version from linkis_ps_bml_resources_version where resource_id =#{resourceId} and version <= #{maxVersion} order by version desc limit #{keepNum},1") + String getMinKeepVersion( + @Param("resourceId") String resourceId, + @Param("maxVersion") String maxVersion, + @Param("keepNum") int keepNum); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/CleanedResourceVersion.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/CleanedResourceVersion.java new file mode 100644 index 0000000000..5a0bfcc487 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/CleanedResourceVersion.java @@ -0,0 +1,209 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.entity; + +import java.util.Date; + +public class CleanedResourceVersion { + + private long id; + + private String resourceId; + + private String fileMd5; + + private String version; + + private long size; + + private String resource; + + private String oldResource; + + private String description; + + private String clientIp; + + private boolean enableFlag; + + private String user; + + private String system; + + private Date startTime; + + private Date endTime; + + private long startByte; + + private long endByte; + + private String updator; + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + + public String getSystem() { + return system; + } + + public void setSystem(String system) { + this.system = system; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getResource() { + return resource; + } + + public void setResource(String resource) { + this.resource = resource; + } + + public String getOldResource() { + return oldResource; + } + + public void setOldResource(String oldResource) { + this.oldResource = oldResource; + } + + public long getId() { + return id; + } + + public void setId(long id) { + this.id = id; + } + + public String getFileMd5() { + return fileMd5; + } + + public void setFileMd5(String fileMd5) { + this.fileMd5 = fileMd5; + } + + public long getSize() { + return size; + } + + public void setSize(long size) { + this.size = size; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getClientIp() { + return clientIp; + } + + public void setClientIp(String clientIp) { + this.clientIp = clientIp; + } + + public boolean isEnableFlag() { + return enableFlag; + } + + public void setEnableFlag(boolean enableFlag) { + this.enableFlag = enableFlag; + } + + public long getStartByte() { + return startByte; + } + + public void setStartByte(long startByte) { + this.startByte = startByte; + } + + public long getEndByte() { + return endByte; + } + + public void setEndByte(long endByte) { + this.endByte = endByte; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getUpdator() { + return updator; + } + + public void setUpdator(String updator) { + this.updator = updator; + } + + public static CleanedResourceVersion copyFromResourceVersion(ResourceVersion resourceVersion) { + CleanedResourceVersion cleanedResourceVersion = new CleanedResourceVersion(); + cleanedResourceVersion.setResourceId(resourceVersion.getResourceId()); + cleanedResourceVersion.setOldResource(resourceVersion.getResource()); + cleanedResourceVersion.setFileMd5(resourceVersion.getFileMd5()); + cleanedResourceVersion.setClientIp(resourceVersion.getClientIp()); + cleanedResourceVersion.setSize(resourceVersion.getSize()); + cleanedResourceVersion.setEnableFlag(resourceVersion.getEnableFlag()); + cleanedResourceVersion.setVersion(resourceVersion.getVersion()); + cleanedResourceVersion.setStartByte(resourceVersion.getStartByte()); + cleanedResourceVersion.setEndByte(resourceVersion.getEndByte()); + cleanedResourceVersion.setStartTime(resourceVersion.getStartTime()); + cleanedResourceVersion.setEndTime(resourceVersion.getEndTime()); + return cleanedResourceVersion; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/ResourceVersion.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/ResourceVersion.java new file mode 100644 index 0000000000..5d297cf180 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/ResourceVersion.java @@ -0,0 +1,206 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.entity; + +import java.util.Date; + +public class ResourceVersion { + + private long id; + + private String resourceId; + + private String fileMd5; + + private String version; + + private long size; + + private String resource; + + private String description; + + private String clientIp; + + private boolean enableFlag; + + private String user; + + private String system; + + private Date startTime; + + private Date endTime; + + private long startByte; + + private long endByte; + + private String updator; + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + + public String getSystem() { + return system; + } + + public void setSystem(String system) { + this.system = system; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getResource() { + return resource; + } + + public void setResource(String resource) { + this.resource = resource; + } + + public long getId() { + return id; + } + + public void setId(long id) { + this.id = id; + } + + public String getFileMd5() { + return fileMd5; + } + + public void setFileMd5(String fileMd5) { + this.fileMd5 = fileMd5; + } + + public long getSize() { + return size; + } + + public void setSize(long size) { + this.size = size; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getClientIp() { + return clientIp; + } + + public void setClientIp(String clientIp) { + this.clientIp = clientIp; + } + + public boolean getEnableFlag() { + return enableFlag; + } + + public void setEnableFlag(boolean enableFlag) { + this.enableFlag = enableFlag; + } + + public long getStartByte() { + return startByte; + } + + public void setStartByte(long startByte) { + this.startByte = startByte; + } + + public long getEndByte() { + return endByte; + } + + public void setEndByte(long endByte) { + this.endByte = endByte; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getUpdator() { + return updator; + } + + public void setUpdator(String updator) { + this.updator = updator; + } + + public static ResourceVersion createNewResourceVersion( + String resourceId, + String resourcePath, + String fileMd5, + String clientIp, + long size, + String version, + long startByte) { + ResourceVersion resourceVersion = new ResourceVersion(); + resourceVersion.setResourceId(resourceId); + resourceVersion.setResource(resourcePath); + resourceVersion.setFileMd5(fileMd5); + resourceVersion.setClientIp(clientIp); + resourceVersion.setSize(size); + resourceVersion.setEnableFlag(true); + resourceVersion.setVersion(version); + resourceVersion.setStartByte(startByte); + resourceVersion.setEndByte(startByte + size - 1); + resourceVersion.setStartTime(new Date(System.currentTimeMillis())); + resourceVersion.setEndTime(new Date(System.currentTimeMillis())); + return resourceVersion; + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/CleanerService.java similarity index 75% rename from linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala rename to linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/CleanerService.java index 47a6ce9e9e..0dfa15f396 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/CleanerService.java @@ -15,10 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.entrance.scheduler.cache +package org.apache.linkis.monitor.bml.cleaner.service; -import org.apache.linkis.scheduler.executer.OutputExecuteResponse +public interface CleanerService { -case class CacheOutputExecuteResponse(alias: String, output: String) extends OutputExecuteResponse { - override def getOutput: String = output + public void run(); } diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/VersionService.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/VersionService.java new file mode 100644 index 0000000000..cc1a45c9e6 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/VersionService.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.service; + +import org.apache.linkis.common.io.FsPath; +import org.apache.linkis.monitor.bml.cleaner.entity.CleanedResourceVersion; +import org.apache.linkis.storage.fs.FileSystem; + +import java.io.IOException; + +public interface VersionService { + + void doMove( + FileSystem fs, + FsPath srcPath, + FsPath destPath, + CleanedResourceVersion insertVersion, + long delVersionId) + throws IOException; + + void moveOnDb(CleanedResourceVersion insertVersion, long delVersionId); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/CleanerServiceImpl.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/CleanerServiceImpl.java new file mode 100644 index 0000000000..ec2ed2d758 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/CleanerServiceImpl.java @@ -0,0 +1,167 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.service.impl; + +import org.apache.linkis.common.io.FsPath; +import org.apache.linkis.monitor.bml.cleaner.dao.VersionDao; +import org.apache.linkis.monitor.bml.cleaner.entity.CleanedResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.entity.ResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.service.CleanerService; +import org.apache.linkis.monitor.bml.cleaner.service.VersionService; +import org.apache.linkis.monitor.bml.cleaner.vo.CleanResourceVo; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.storage.FSFactory; +import org.apache.linkis.storage.fs.FileSystem; +import org.apache.linkis.storage.utils.StorageConfiguration; +import org.apache.linkis.storage.utils.StorageUtils; + +import org.apache.commons.io.IOUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.io.File; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class CleanerServiceImpl implements CleanerService { + + private final Logger logger = LoggerFactory.getLogger("CleanerServiceImpl"); + + private final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); + + public static final String VERSION_FORMAT = "%06d"; + public static final String VERSION_PREFIX = "v"; + public static final String TRASH_DIR = "/trash"; + + private FileSystem fs = null; + + @Autowired private VersionDao versionDao; + + public void setVersionDao(VersionDao versionDao) { + this.versionDao = versionDao; + } + + private Set cleanedResourceIds = new HashSet(); + + Date previous; + + @Autowired VersionService versionService; + + public void clean() { + previous = + new Date( + System.currentTimeMillis() + - (Long) Constants.BML_PREVIOUS_INTERVAL_TIME_DAYS().getValue() * 86400 * 1000); + + if ((Integer) Constants.BML_VERSION_MAX_NUM().getValue() + - (Integer) Constants.BML_VERSION_KEEP_NUM().getValue() + <= 1) { + logger.error("conf error need to keep version num > 1"); + return; + } + List needCleanResources = getCleanResources(); + while (needCleanResources != null && needCleanResources.size() > 0) { + logger.info("need cleaned resource count:{}", needCleanResources.size()); + fs = + (FileSystem) + FSFactory.getFs(StorageUtils.HDFS, StorageConfiguration.HDFS_ROOT_USER.getValue()); + for (CleanResourceVo resourceVo : needCleanResources) { + String minVersion = + versionDao.getMinKeepVersion( + resourceVo.getResourceId(), + resourceVo.getMaxVersion(), + (Integer) Constants.BML_VERSION_KEEP_NUM().getValue() - 1); + List cleanVersionList = + versionDao.getCleanVersionsByResourceId(resourceVo.getResourceId(), minVersion); + // move on hdfs + for (ResourceVersion version : cleanVersionList) { + FsPath srcPath = new FsPath(version.getResource()); + // fs放到外层 + try { + fs.init(null); + if (!fs.exists(srcPath)) { + logger.error("try to move but bml source file:{} not exists!", version.getResource()); + CleanedResourceVersion cleanedResourceVersion = + CleanedResourceVersion.copyFromResourceVersion(version); + cleanedResourceVersion.setResource(""); + versionService.moveOnDb(cleanedResourceVersion, version.getId()); + continue; + } + String destPrefix = + version.getResource().substring(0, version.getResource().indexOf("/bml/") + 4); + String destPath = + destPrefix + + TRASH_DIR + + File.separator + + sdf.format(new Date()) + + File.separator + + version.getResourceId() + + "_" + + version.getVersion(); + FsPath dest = new FsPath(destPath); + if (!fs.exists(dest.getParent())) { + fs.mkdirs(dest.getParent()); + } + logger.info("begin to mv bml resource:{} to dest:{}", version.getResource(), destPath); + CleanedResourceVersion cleanedResourceVersion = + CleanedResourceVersion.copyFromResourceVersion(version); + cleanedResourceVersion.setResource(destPath); + versionService.doMove(fs, srcPath, dest, cleanedResourceVersion, version.getId()); + } catch (Exception e) { + logger.error("failed to mv bml resource:{}", e.getMessage(), e); + } + } + + cleanedResourceIds.add(resourceVo.getResourceId()); + } + needCleanResources = getCleanResources(); + } + } + + public void run() { + logger.info("start to clean."); + clean(); + logger.info("start to shutdown."); + shutdown(); + } + + void shutdown() { + IOUtils.closeQuietly(fs); + } + + List getCleanResources() { + List cleanResourceVoList = + versionDao.getAllNeedCleanResource( + (Integer) Constants.BML_VERSION_MAX_NUM().getValue(), + previous, + (Integer) Constants.BML_CLEAN_ONCE_RESOURCE_LIMIT_NUM().getValue()); + + return cleanResourceVoList.stream() + .filter(cleanResourceVo -> !cleanedResourceIds.contains(cleanResourceVo.getResourceId())) + .collect(Collectors.toList()); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/VersionServiceImpl.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/VersionServiceImpl.java new file mode 100644 index 0000000000..be9e5b70ea --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/VersionServiceImpl.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.service.impl; + +import org.apache.linkis.common.io.FsPath; +import org.apache.linkis.monitor.bml.cleaner.dao.VersionDao; +import org.apache.linkis.monitor.bml.cleaner.entity.CleanedResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.service.VersionService; +import org.apache.linkis.storage.fs.FileSystem; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.io.IOException; + +@Service +public class VersionServiceImpl implements VersionService { + + @Autowired VersionDao versionDao; + + public void setVersionDao(VersionDao versionDao) { + this.versionDao = versionDao; + } + + @Transactional(rollbackFor = Throwable.class) + public void doMove( + FileSystem fs, + FsPath srcPath, + FsPath destPath, + CleanedResourceVersion insertVersion, + long delVersionId) + throws IOException { + versionDao.insertCleanResourceVersion(insertVersion); + versionDao.deleteResourceVersionById(delVersionId); + fs.renameTo(srcPath, destPath); + } + + @Transactional + public void moveOnDb(CleanedResourceVersion insertVersion, long delVersionId) { + versionDao.insertCleanResourceVersion(insertVersion); + versionDao.deleteResourceVersionById(delVersionId); + } + + public String test() { + return "this a test string"; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/vo/CleanResourceVo.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/vo/CleanResourceVo.java new file mode 100644 index 0000000000..4ef205effd --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/vo/CleanResourceVo.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.vo; + +public class CleanResourceVo { + private String resourceId; + private int versionCount; + private String maxVersion; + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public int getVersionCount() { + return versionCount; + } + + public void setVersionCount(int versionCount) { + this.versionCount = versionCount; + } + + public String getMaxVersion() { + return maxVersion; + } + + public void setMaxVersion(String maxVersion) { + this.maxVersion = maxVersion; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ListenerConfig.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ListenerConfig.java new file mode 100644 index 0000000000..eb5c11af87 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ListenerConfig.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.config; + +import org.apache.linkis.monitor.until.HttpsUntils; +import org.apache.linkis.monitor.until.ThreadUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.Configuration; +import org.springframework.context.event.ContextClosedEvent; +import org.springframework.context.event.EventListener; + +import java.io.IOException; + +import org.slf4j.Logger; + +@Configuration +public class ListenerConfig { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @EventListener + private void shutdownEntrance(ContextClosedEvent event) { + try { + ThreadUtils.executors.shutdown(); + HttpsUntils.client.close(); + } catch (IOException e) { + logger.error("ListenerConfig error msg {}", e.getMessage()); + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/MonitorConfig.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/MonitorConfig.java new file mode 100644 index 0000000000..5b4c2e269a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/MonitorConfig.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.config; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.Configuration; + +public class MonitorConfig { + + public static final String shellPath = Configuration.getLinkisHome() + "/admin/"; + + public static final CommonVars USER_MODE_TIMEOUT = + CommonVars.apply("linkis.monitor.user.timeOut", 300); + public static final CommonVars USER_MODE_ENGINE = + CommonVars.apply("linkis.monitor.user.enginelist", "[]"); + + public static final CommonVars ECM_TASK_MAJOR = + CommonVars.apply("linkis.monitor.ecmResourceTask.major", 0.03); + public static final CommonVars ECM_TASK_MINOR = + CommonVars.apply("linkis.monitor.ecmResourceTask.minor", 0.1); + public static final CommonVars ECM_TASK_IMURL = + CommonVars.apply("linkis.monitor.metrics.imsUrl"); + public static final CommonVars ECM_TASK_USER_AUTHKEY = + CommonVars.apply("linkis.monitor.metrics.userAuthKey"); + + public static final CommonVars JOB_HISTORY_TIME_EXCEED = + CommonVars.apply("linkis.monitor.jobhistory.id.timeExceed", 0L); + + public static final CommonVars ENTRANCE_TASK_USERTOTAL = + CommonVars.apply("linkis.monitor.entranceTask.userTotalTask", 1000); + public static final CommonVars ENTRANCE_TASK_TOTAL_MAJOR = + CommonVars.apply("linkis.monitor.entranceTask.linkisTotalTaskMajor", 50000); + public static final CommonVars ENTRANCE_TASK_TOTAL_MINOR = + CommonVars.apply("linkis.monitor.entranceTask.linkisTotalTaskMinor", 10000); + public static final CommonVars ENTRANCE_TASK_USERLIST = + CommonVars.apply("linkis.monitor.entranceTask.userlist", "[]"); + + public static final CommonVars SCHEDULED_CONFIG_NUM = + CommonVars.apply("linkis.monitor.scheduled.pool.cores.num", 10); + + public static final CommonVars SHELL_TIMEOUT = + CommonVars.apply("linkis.monitor.shell.time.out.minute", 30); + + public static final CommonVars USER_MODE_INTERFACE_TIMEOUT = + CommonVars.apply("linkis.monitor.user.mode.time.out", 30 * 1000); + + public static final CommonVars SOLUTION_URL = + CommonVars.apply( + "linkis.monitor.jobhistory.solution.url", + "https://linkis.apache.org/docs/latest/tuning-and-troubleshooting/error-guide/error-code"); + + public static final CommonVars TASK_RUNTIME_TIMEOUT_DESC = + CommonVars.apply( + "linkis.monitor.jobhistory.task.timeout.desc", + "[Linkis任务信息]您好,您在Linkis/DSS提交的任务(任务ID:{0}),已经运行超过{1}h," + + "请关注是否任务正常,如果不正常您可以到Linkis/DSS管理台进行任务的kill,集群信息为BDAP({2})。详细解决方案见Q47:{3} "); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ScheduledConfig.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ScheduledConfig.java new file mode 100644 index 0000000000..e954122671 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ScheduledConfig.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.scheduling.annotation.SchedulingConfigurer; +import org.springframework.scheduling.config.ScheduledTaskRegistrar; + +import java.util.concurrent.Executor; +import java.util.concurrent.Executors; + +@Configuration +public class ScheduledConfig implements SchedulingConfigurer { + @Bean + public Executor taskExecutor() { + return Executors.newScheduledThreadPool(MonitorConfig.SCHEDULED_CONFIG_NUM.getValue()); + } + + @Override + public void configureTasks(ScheduledTaskRegistrar scheduledTaskRegistrar) { + scheduledTaskRegistrar.setScheduler(taskExecutor()); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EngineEntity.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EngineEntity.java new file mode 100644 index 0000000000..760c06ba1b --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EngineEntity.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.entity; + +import java.io.Serializable; + +public class EngineEntity implements Serializable { + + private String engineType; + + private String code; + + private String runType; + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getRunType() { + return runType; + } + + public void setRunType(String runType) { + this.runType = runType; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EntranceEntity.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EntranceEntity.java new file mode 100644 index 0000000000..241439b499 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EntranceEntity.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.entity; + +import java.io.Serializable; + +public class EntranceEntity implements Serializable { + + private Integer runningtasks; + + private Integer queuedtasks; + + private String alteruser; + + private String username; + + public Integer getQueuedtasks() { + return queuedtasks; + } + + public void setQueuedtasks(Integer queuedtasks) { + this.queuedtasks = queuedtasks; + } + + public String getAlteruser() { + return alteruser; + } + + public void setAlteruser(String alteruser) { + this.alteruser = alteruser; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public Integer getRunningtasks() { + return runningtasks; + } + + public void setRunningtasks(Integer runningtasks) { + this.runningtasks = runningtasks; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/IndexEntity.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/IndexEntity.java new file mode 100644 index 0000000000..2905f8aced --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/IndexEntity.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.entity; + +import org.apache.linkis.monitor.constants.Constants; + +public class IndexEntity { + + private final String subsystemId = Constants.ALERT_SUB_SYSTEM_ID(); + private String interfaceName; + private String attrGroup; + private String attrName; + private String hostIp; + private String metricValue; + + public String getSubsystemId() { + return subsystemId; + } + + public String getInterfaceName() { + return interfaceName; + } + + public void setInterfaceName(String interfaceName) { + this.interfaceName = interfaceName; + } + + public String getAttrGroup() { + return attrGroup; + } + + public void setAttrGroup(String attrGroup) { + this.attrGroup = attrGroup; + } + + public String getAttrName() { + return attrName; + } + + public void setAttrName(String attrName) { + this.attrName = attrName; + } + + public String getHostIp() { + return hostIp; + } + + public void setHostIp(String hostIp) { + this.hostIp = hostIp; + } + + public String getMetricValue() { + return metricValue; + } + + public void setMetricValue(String metricValue) { + this.metricValue = metricValue; + } + + public IndexEntity() {} + + public IndexEntity( + String interfaceName, String attrGroup, String attrName, String hostIp, String metricValue) { + this.interfaceName = interfaceName; + this.attrGroup = attrGroup; + this.attrName = attrName; + this.hostIp = hostIp; + this.metricValue = metricValue; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InsLabelRelationDao.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InsLabelRelationDao.java new file mode 100644 index 0000000000..d805c1a99b --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InsLabelRelationDao.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.dao; + +import org.apache.linkis.monitor.instance.entity.InsPersistenceLabel; + +import java.util.List; + +public interface InsLabelRelationDao { + + /** + * Search labels + * + * @param instance instance value (http:port) + * @return + */ + List searchLabelsByInstance(String instance); + + void dropRelationsByInstance(String instance); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceInfoDao.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceInfoDao.java new file mode 100644 index 0000000000..973801a37a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceInfoDao.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.dao; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.monitor.instance.entity.InstanceInfo; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface InstanceInfoDao { + + void removeInstance(ServiceInstance instance); + + List getInstanceInfoByApplicationNameAndHostnameFuzzy( + @Param("hostname") String hostname, @Param("applicationName") String applicationName); + + List getInstanceInfoByApplicationNameAndInstanceName( + @Param("instanceName") String instanceName, @Param("applicationName") String applicationName); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceLabelDao.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceLabelDao.java new file mode 100644 index 0000000000..361bebf8ba --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceLabelDao.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.dao; + +import org.apache.linkis.monitor.instance.entity.InsPersistenceLabel; + +import java.util.List; + +public interface InstanceLabelDao { + /** + * Remove label + * + * @param label label entity + */ + void remove(InsPersistenceLabel label); + + /** + * Remove key -> value map from label id + * + * @param labelId + */ + void doRemoveKeyValues(Integer labelId); + + void doRemoveKeyValuesBatch(List labelIds); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabel.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabel.java new file mode 100644 index 0000000000..0959c27988 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabel.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.entity; + +import java.util.Date; + +public class InsPersistenceLabel { + private Integer id; + private int labelValueSize = -1; + private String stringValue; + private Boolean modifiable = false; + + private String labelKey; + + private String feature; + + private Date updateTime; + private Date createTime; + + public Boolean getModifiable() { + return modifiable; + } + + public void setModifiable(Boolean modifiable) { + this.modifiable = modifiable; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public int getLabelValueSize() { + return labelValueSize; + } + + public void setLabelValueSize(int labelValueSize) { + this.labelValueSize = labelValueSize; + } + + public String getStringValue() { + return stringValue; + } + + public void setStringValue(String stringValue) { + this.stringValue = stringValue; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getLabelKey() { + return labelKey; + } + + public void setLabelKey(String labelKey) { + this.labelKey = labelKey; + } + + public String getFeature() { + return feature; + } + + public void setFeature(String feature) { + this.feature = feature; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabelValue.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabelValue.java new file mode 100644 index 0000000000..029f552608 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabelValue.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.entity; + +public class InsPersistenceLabelValue { + + private Integer labelId; + + private String valueKey; + + private String valueContent; + + public InsPersistenceLabelValue() {} + + public InsPersistenceLabelValue(Integer labelId, String key, String content) { + this.labelId = labelId; + this.valueKey = key; + this.valueContent = content; + } + + public String getValueKey() { + return valueKey; + } + + public void setValueKey(String valueKey) { + this.valueKey = valueKey; + } + + public String getValueContent() { + return valueContent; + } + + public void setValueContent(String valueContent) { + this.valueContent = valueContent; + } + + public Integer getLabelId() { + return labelId; + } + + public void setLabelId(Integer labelId) { + this.labelId = labelId; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InstanceInfo.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InstanceInfo.java new file mode 100644 index 0000000000..5360ffd323 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InstanceInfo.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.entity; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +public class InstanceInfo { + /** Automatic increment */ + private Integer id; + + private String applicationName; + + private String instance; + + private Date updateTime; + + private Date createTime; + /** Labels related */ + private List labels = new ArrayList<>(); + + public InstanceInfo() {} + + public InstanceInfo(String applicationName, String instance) { + this.applicationName = applicationName; + this.instance = instance; + } + + public List getLabels() { + return labels; + } + + public void setLabels(List labels) { + this.labels = labels; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getApplicationName() { + return applicationName; + } + + public void setApplicationName(String applicationName) { + this.applicationName = applicationName; + } + + public String getInstance() { + return instance; + } + + public void setInstance(String instance) { + this.instance = instance; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/QueryUtils.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/QueryUtils.java new file mode 100644 index 0000000000..aa73471c49 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/QueryUtils.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory; + +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Date; + +public class QueryUtils { + + private static DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); + + public static String dateToString(Date date) { + return dateFormat.format(date); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/dao/JobHistoryMapper.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/dao/JobHistoryMapper.java new file mode 100644 index 0000000000..ebd213cfb3 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/dao/JobHistoryMapper.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.dao; + +import org.apache.linkis.monitor.jobhistory.entity.JobHistory; + +import org.apache.ibatis.annotations.Param; + +import java.util.Date; +import java.util.List; + +public interface JobHistoryMapper { + + List selectJobHistory(JobHistory jobReq); + + List search( + @Param("id") Long id, + @Param("umUser") String username, + @Param("status") List status, + @Param("startDate") Date startDate, + @Param("endDate") Date endDate, + @Param("engineType") String engineType); + + void updateIncompleteJobStatusGivenIDList( + @Param("idList") List idList, @Param("targetStatus") String targetStatus); + + void updateJobStatusForInstanceGivenStatusList( + @Param("instanceName") String instanceName, + @Param("statusList") List statusList, + @Param("targetStatus") String targetStatus, + @Param("startDate") Date startDate); + + List searchByCache( + @Param("id") Long id, + @Param("umUser") String username, + @Param("status") List status, + @Param("startDate") Date startDate, + @Param("endDate") Date endDate, + @Param("engineType") String engineType); + + List searchByCacheAndUpdateTime( + @Param("id") Long id, + @Param("umUser") String username, + @Param("status") List status, + @Param("startDate") Date startDate, + @Param("endDate") Date endDate, + @Param("engineType") String engineType); + + Long selectIdByHalfDay(@Param("id") long beginId); + + Long selectMaxId(); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/entity/JobHistory.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/entity/JobHistory.java new file mode 100644 index 0000000000..e2499d328d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/entity/JobHistory.java @@ -0,0 +1,221 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.entity; + +import org.apache.linkis.monitor.jobhistory.QueryUtils; + +import java.util.Date; + +public class JobHistory { + + private Long id; + + private String jobReqId; + + private String submitUser; + + private String executeUser; + + private String source; + + private String labels; + + private String params; + + private String progress; + + private String status; + + private String logPath; + + private Integer errorCode; + + private String errorDesc; + + private Date createdTime; + + private Date updatedTime; + + private String updateTimeMills; + + private String instances; + + private String metrics; + + private String engineType; + + private String executionCode; + + private String observeInfo; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getJobReqId() { + return jobReqId; + } + + public void setJobReqId(String jobReqId) { + this.jobReqId = jobReqId; + } + + public String getSubmitUser() { + return submitUser; + } + + public void setSubmitUser(String submitUser) { + this.submitUser = submitUser; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getLabels() { + return labels; + } + + public void setLabels(String labels) { + this.labels = labels; + } + + public String getParams() { + return params; + } + + public void setParams(String params) { + this.params = params; + } + + public String getProgress() { + return progress; + } + + public void setProgress(String progress) { + this.progress = progress; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public Integer getErrorCode() { + return errorCode; + } + + public void setErrorCode(Integer errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public Date getCreatedTime() { + return createdTime; + } + + public void setCreatedTime(Date createdTime) { + this.createdTime = createdTime; + } + + public Date getUpdatedTime() { + return updatedTime; + } + + public void setUpdatedTime(Date updatedTime) { + this.updatedTime = updatedTime; + } + + public String getInstances() { + return instances; + } + + public void setInstances(String instances) { + this.instances = instances; + } + + public String getMetrics() { + return metrics; + } + + public void setMetrics(String metrics) { + this.metrics = metrics; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getExecutionCode() { + return executionCode; + } + + public void setExecutionCode(String executionCode) { + this.executionCode = executionCode; + } + + public String getUpdateTimeMills() { + return QueryUtils.dateToString(getUpdatedTime()); + } + + public String getObserveInfo() { + return observeInfo; + } + + public void setObserveInfo(String observeInfo) { + this.observeInfo = observeInfo; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/AnomalyScannerException.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/AnomalyScannerException.java new file mode 100644 index 0000000000..b2c83be013 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/AnomalyScannerException.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.exception; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; + +public class AnomalyScannerException extends ErrorException { + public AnomalyScannerException(int errCode, String desc) { + super(errCode, desc); + } + + public AnomalyScannerException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + @Override + public ExceptionLevel getLevel() { + return super.getLevel(); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/DirtyDataCleanException.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/DirtyDataCleanException.java new file mode 100644 index 0000000000..b1c5e64743 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/DirtyDataCleanException.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.exception; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; + +public class DirtyDataCleanException extends ErrorException { + public DirtyDataCleanException(int errCode, String desc) { + super(errCode, desc); + } + + public DirtyDataCleanException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + @Override + public ExceptionLevel getLevel() { + return super.getLevel(); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/BmlClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/BmlClear.java new file mode 100644 index 0000000000..cb7d37197f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/BmlClear.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.bml.cleaner.service.CleanerService; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import org.slf4j.Logger; + +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class BmlClear { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @Autowired private CleanerService cleanerServices; + + @Scheduled(cron = "${linkis.monitor.bml.clear.history.version.cron}") + public void jobHistoryScanTask() { + logger.info("start to clear bml history version"); + cleanerServices.run(); + logger.info("end to clear bml history version"); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EcRecordClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EcRecordClear.java new file mode 100644 index 0000000000..ddb3d3e8c8 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EcRecordClear.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.until.ThreadUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; + +/** * Task: clean up linkis_cg_ec_resource_info_record data */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class EcRecordClear { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @Scheduled(cron = "${linkis.monitor.clear.ecRecord.cron}") + public void ecRecordClear() { + logger.info("Start to clear_ec_record shell"); + List cmdlist = new ArrayList<>(); + cmdlist.add("sh"); + cmdlist.add(MonitorConfig.shellPath + "clear_ec_record.sh"); + logger.info("clear_ec_record shell command {}", cmdlist); + String exec = ThreadUtils.run(cmdlist, "clear_ec_record.sh"); + logger.info("shell log {}", exec); + logger.info("End to clear_ec_record shell "); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EntranceTaskMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EntranceTaskMonitor.java new file mode 100644 index 0000000000..a6e7879f07 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EntranceTaskMonitor.java @@ -0,0 +1,195 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.governance.common.conf.GovernanceCommonConf; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.entity.IndexEntity; +import org.apache.linkis.monitor.until.HttpsUntils; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; +import org.apache.linkis.rpc.Sender; +import org.apache.linkis.server.BDPJettyServerHelper; + +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang3.StringUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.math.BigDecimal; +import java.util.*; + +import com.google.gson.internal.LinkedTreeMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * * Entrance monitors the number of tasks for specified users and systems. If the configured + * threshold is exceeded, an alarm will be triggered. + */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class EntranceTaskMonitor { + + private static final Logger logger = LoggerFactory.getLogger(EntranceTaskMonitor.class); + + private static final String ENTRANCE_RUNNING_TASK = "entrance_running_task"; + private static final String ENTRANCE_QUEUED_TASK = "entrance_queued_task"; + + @Scheduled(cron = "${linkis.monitor.entrance.task.cron}") + public void entranceTask() throws IOException { + List> userlist = new ArrayList<>(); + String value = MonitorConfig.ENTRANCE_TASK_USERLIST.getValue(); + if (StringUtils.isNotBlank(value)) { + userlist = BDPJettyServerHelper.gson().fromJson(value, ArrayList.class); + } + + userlist.forEach( + entranceEntity -> { + Map data = new HashMap<>(); + try { + data = + MapUtils.getMap( + HttpsUntils.getEntranceTask(null, entranceEntity.get("username"), null), + "data"); + logger.info("TaskMonitor userlist response {}:", data); + } catch (IOException e) { + logger.warn("failed to get EntranceTask data"); + } + + int runningNumber = + null != entranceEntity.get("runningtasks") + ? Integer.parseInt(entranceEntity.get("runningtasks")) + : 0; + int queuedNumber = + null != entranceEntity.get("queuedtasks") + ? Integer.parseInt(entranceEntity.get("queuedtasks")) + : 0; + + BigDecimal runningtotal = new BigDecimal((int) data.get("runningNumber")); + BigDecimal queuedtotal = new BigDecimal((int) data.get("queuedNumber")); + BigDecimal total = runningtotal.add(queuedtotal); + HashMap parms = new HashMap<>(); + parms.put("$username", entranceEntity.get("username")); + parms.put("$alteruser", entranceEntity.get("alteruser")); + parms.put("$url", Configuration.GATEWAY_URL().getValue()); + // 获取标准阈值 + if (runningtotal.intValue() > runningNumber) { + // 触发告警 用户运行任务满 + parms.put("$runningtask", String.valueOf(runningNumber)); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12005")); + } + if (queuedtotal.intValue() > queuedNumber) { + // 触发告警 用户排队任务满 + parms.put("$queuedtask", String.valueOf(queuedNumber)); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12006")); + } + int usertotalTask = MonitorConfig.ENTRANCE_TASK_USERTOTAL.getValue(); + if (total.intValue() > usertotalTask) { + parms.put("$tasktotal", String.valueOf(usertotalTask)); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12007")); + } + }); + Map likisData = null; + try { + likisData = MapUtils.getMap(HttpsUntils.getEntranceTask(null, "hadoop", null), "data"); + logger.info("TaskMonitor hadoop response {}:", likisData); + } catch (IOException e) { + logger.warn("failed to get EntranceTask data"); + } + // 系统监控 + BigDecimal runningNumber = new BigDecimal((int) likisData.get("runningNumber")); + BigDecimal queuedNumber = new BigDecimal((int) likisData.get("queuedNumber")); + BigDecimal total = runningNumber.add(queuedNumber); + + HashMap parms = new HashMap<>(); + parms.put("$url", Configuration.GATEWAY_URL().getValue()); + int linkisTotalMajor = MonitorConfig.ENTRANCE_TASK_TOTAL_MAJOR.getValue(); + int linkisTotalMinor = MonitorConfig.ENTRANCE_TASK_TOTAL_MINOR.getValue(); + if (total.intValue() >= linkisTotalMajor) { + // 触发告警Major + parms.put("$taskmajor", String.valueOf(linkisTotalMajor)); + logger.info("TaskMonitor parms {}:", parms); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12009")); + + } else if (total.intValue() >= linkisTotalMinor) { + parms.put("$taskminor", String.valueOf(linkisTotalMinor)); + logger.info("TaskMonitor parms {}:", parms); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12008")); + } + resourceSendToIms(); + } + + public static void resourceSendToIms() { + ServiceInstance[] instances = + Sender.getInstances(GovernanceCommonConf.ENTRANCE_SERVICE_NAME().getValue()); + if (null != instances) { + for (ServiceInstance instance : instances) { + String serviceInstance = instance.getInstance(); + try { + Map instanceData = + MapUtils.getMap(HttpsUntils.getEntranceTask(null, "hadoop", serviceInstance), "data"); + int runningNumber = 0; + int queuedNumber = 0; + if (instanceData.containsKey("runningNumber")) { + runningNumber = (int) instanceData.get("runningNumber"); + } + if (instanceData.containsKey("queuedNumber")) { + queuedNumber = (int) instanceData.get("queuedNumber"); + } + logger.info("ResourceMonitor send index "); + List list = new ArrayList<>(); + list.add( + new IndexEntity( + serviceInstance, + "entrance", + ENTRANCE_RUNNING_TASK, + HttpsUntils.localHost, + String.valueOf(runningNumber))); + list.add( + new IndexEntity( + serviceInstance, + "entrance", + ENTRANCE_QUEUED_TASK, + HttpsUntils.localHost, + String.valueOf(queuedNumber))); + HttpsUntils.sendIndex(list); + } catch (IOException e) { + logger.warn("failed to send EcmResource index :" + e); + } + } + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryClear.java new file mode 100644 index 0000000000..346ca9cb06 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryClear.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.until.ThreadUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; + +/** * Task: clean up linkis_ps_job_history_group_history data */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class JobHistoryClear { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @Scheduled(cron = "${linkis.monitor.clear.history.task.cron}") + public void historyTaskClear() { + logger.info("Start to clear_history_task shell"); + List cmdlist = new ArrayList<>(); + cmdlist.add("sh"); + cmdlist.add(MonitorConfig.shellPath + "clear_history_task.sh"); + logger.info("clear_history_task shell command {}", cmdlist); + String exec = ThreadUtils.run(cmdlist, "clear_history_task.sh"); + logger.info("shell log {}", exec); + logger.info("End to clear_history_task shell "); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryMonitor.java new file mode 100644 index 0000000000..c4bd65a90b --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryMonitor.java @@ -0,0 +1,265 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.core.pac.DataFetcher; +import org.apache.linkis.monitor.core.scanner.AnomalyScanner; +import org.apache.linkis.monitor.core.scanner.DefaultScanner; +import org.apache.linkis.monitor.factory.MapperFactory; +import org.apache.linkis.monitor.jobhistory.JobHistoryDataFetcher; +import org.apache.linkis.monitor.jobhistory.errorcode.JobHistoryErrCodeRule; +import org.apache.linkis.monitor.jobhistory.errorcode.JobHistoryErrorCodeAlertSender; +import org.apache.linkis.monitor.jobhistory.jobtime.JobTimeExceedAlertSender; +import org.apache.linkis.monitor.jobhistory.jobtime.JobTimeExceedRule; +import org.apache.linkis.monitor.jobhistory.labels.JobHistoryLabelsAlertSender; +import org.apache.linkis.monitor.jobhistory.labels.JobHistoryLabelsRule; +import org.apache.linkis.monitor.jobhistory.runtime.CommonJobRunTimeRule; +import org.apache.linkis.monitor.jobhistory.runtime.CommonRunTimeAlertSender; +import org.apache.linkis.monitor.jobhistory.runtime.JobHistoryRunTimeAlertSender; +import org.apache.linkis.monitor.jobhistory.runtime.JobHistoryRunTimeRule; +import org.apache.linkis.monitor.until.CacheUtils; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.ImsAlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.UserLabelAlertUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.util.*; + +import org.slf4j.Logger; + +/** + * * jobHistory monitor 1.errorCode: Monitor the error code 2.userLabel: tenant label + * monitoring,scan the execution data within the first 20 minutes, and judge the labels field of the + * data + * + *

3.jobResultRunTime: Scan the execution data within the first 20 minutes, and judge the + * completed tasks. If the parm field in the jobhistory contains (task.notification.conditions) and + * the result of executing the task is (Succeed, Failed, Canceled, Timeout, ALL) any one of them, an + * alarm is triggered and the result of the job is that it has ended. All three are indispensable + * + *

4.jobResultRunTimeForDSS: Scan the execution data within the first 20 minutes, scan the tasks + * that have been marked for notification, if the task has ended, a notification will be initiated + * + *

5.jobHistoryUnfinishedScan: monitor the status of the execution task, scan the data outside 12 + * hours and within 24 hours + */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class JobHistoryMonitor { + + private static final Logger logger = LogUtils.stdOutLogger(); + private static final long backtrackNum = 1000000L; + + @Scheduled(cron = "${linkis.monitor.jobHistory.finished.cron}") + public void jobHistoryFinishedScan() { + long intervalMs = 20 * 60 * 1000; + long maxIntervalMs = Constants.ERRORCODE_MAX_INTERVALS_SECONDS() * 1000; + long endTime = System.currentTimeMillis(); + long startTime = endTime - intervalMs; + long realIntervals = Math.min(endTime - startTime, maxIntervalMs); + AnomalyScanner scanner = new DefaultScanner(); + boolean shouldStart = false; + long id; + if (null == CacheUtils.cacheBuilder.getIfPresent("jobHistoryId")) { + long maxId = MapperFactory.getJobHistoryMapper().selectMaxId(); + long beginId = 0L; + if (maxId > backtrackNum) { + beginId = maxId - backtrackNum; + } + id = MapperFactory.getJobHistoryMapper().selectIdByHalfDay(beginId); + CacheUtils.cacheBuilder.put("jobHistoryId", id); + } else { + id = CacheUtils.cacheBuilder.getIfPresent("jobHistoryId"); + } + List fetchers = generateFetchersfortime(startTime, endTime, id, "updated_time"); + if (fetchers.isEmpty()) { + logger.warn("generated 0 dataFetchers, plz check input"); + return; + } + // errorCode + try { + Map errorCodeAlerts = + MonitorAlertUtils.getAlerts(Constants.SCAN_PREFIX_ERRORCODE(), null); + + if (errorCodeAlerts == null || errorCodeAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 errorcode alert from alert-rule properties file."); + } else { + logger.info( + "[INFO] Loaded {} errorcode alert from alert-rules properties file.", + errorCodeAlerts.size()); + shouldStart = true; + addIntervalToImsAlerts(errorCodeAlerts, realIntervals); + JobHistoryErrCodeRule jobHistoryErrCodeRule = + new JobHistoryErrCodeRule( + errorCodeAlerts.keySet(), new JobHistoryErrorCodeAlertSender(errorCodeAlerts)); + scanner.addScanRule(jobHistoryErrCodeRule); + } + } catch (Exception e) { + logger.warn("Jobhistory Monitor ErrorCode Faily: " + e.getMessage()); + } + // userLabel + try { + Map userLabelAlerts = + UserLabelAlertUtils.getAlerts(Constants.USER_LABEL_MONITOR(), ""); + if (userLabelAlerts == null || userLabelAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 alerts userLabel alert-rule from alert properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts userLabel alert-rules from alert properties file.", + userLabelAlerts.size()); + shouldStart = true; + JobHistoryLabelsRule jobHistoryLabelsRule = + new JobHistoryLabelsRule(new JobHistoryLabelsAlertSender()); + scanner.addScanRule(jobHistoryLabelsRule); + } + } catch (Exception e) { + logger.warn("Jobhistory Monitor UserLabel Faily: " + e.getMessage()); + } + // jobResultRunTime + try { + Map jobResultAlerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_ERRORCODE()), null); + if (jobResultAlerts == null || jobResultAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 jobResult alert from alert-rule properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts jobResult alert-rules from alert properties file.", + jobResultAlerts.size()); + shouldStart = true; + JobHistoryRunTimeRule jobHistoryRunTimeRule = + new JobHistoryRunTimeRule(new JobHistoryRunTimeAlertSender()); + scanner.addScanRule(jobHistoryRunTimeRule); + } + } catch (Exception e) { + logger.warn("Jobhistory Monitor JobResultRunTime Faily: " + e.getMessage()); + } + // jobResultRunTimeForDSS + try { + Map dssJobResultAlerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_ERRORCODE()), null); + if (dssJobResultAlerts == null || dssJobResultAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 jobResult alert from alert-rule properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts jobResult alert-rules from alert properties file.", + dssJobResultAlerts.size()); + shouldStart = true; + + CommonJobRunTimeRule commonJobRunTimeRule = + new CommonJobRunTimeRule(new CommonRunTimeAlertSender()); + scanner.addScanRule(commonJobRunTimeRule); + } + } catch (Exception e) { + logger.warn("Jobhistory JobResultRunTimeForDSS ErrorCode Faily: " + e.getMessage()); + } + run(scanner, fetchers, shouldStart); + } + + @Scheduled(cron = "${linkis.monitor.jobHistory.timeout.cron}") + public void jobHistoryUnfinishedScan() { + long id = + Optional.ofNullable(CacheUtils.cacheBuilder.getIfPresent("jobhistoryScan")) + .orElse(MonitorConfig.JOB_HISTORY_TIME_EXCEED.getValue()); + long intervalMs = Constants.TIMEOUT_INTERVALS_SECONDS() * 1000; + long maxIntervalMs = Constants.ERRORCODE_MAX_INTERVALS_SECONDS() * 1000; + long endTime = System.currentTimeMillis(); + long startTime = endTime - intervalMs; + long realIntervals = Math.min(endTime - startTime, maxIntervalMs); + AnomalyScanner scanner = new DefaultScanner(); + boolean shouldStart = false; + List fetchers = + generateFetchers(startTime, endTime, maxIntervalMs, id, "created_time"); + if (fetchers.isEmpty()) { + logger.warn("generated 0 dataFetchers, plz check input"); + return; + } + Map jobTimeAlerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_UNFINISHED_JOBTIME_EXCEED_SEC()), null); + if (jobTimeAlerts == null || jobTimeAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 alerts jobtime alert-rule from alert properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts jobtime alert-rules from alert properties file.", + jobTimeAlerts.size()); + shouldStart = true; + addIntervalToImsAlerts(jobTimeAlerts, realIntervals); + JobTimeExceedRule jobTimeExceedRule = + new JobTimeExceedRule( + jobTimeAlerts.keySet(), new JobTimeExceedAlertSender(jobTimeAlerts)); + scanner.addScanRule(jobTimeExceedRule); + } + run(scanner, fetchers, shouldStart); + } + + public static void run(AnomalyScanner scanner, List fetchers, Boolean shouldStart) { + if (shouldStart) { + scanner.addDataFetchers(fetchers); + scanner.run(); + } + } + + private static List generateFetchers( + long startTime, long endTime, long maxIntervalMs, long id, String timeType) { + List ret = new ArrayList<>(); + long pe = endTime; + long ps; + while (pe > startTime) { + ps = Math.max(pe - maxIntervalMs, startTime); + String[] fetcherArgs = + new String[] {String.valueOf(ps), String.valueOf(pe), String.valueOf(id), timeType}; + ret.add(new JobHistoryDataFetcher(fetcherArgs, MapperFactory.getJobHistoryMapper())); + logger.info( + "Generated dataFetcher for startTime: " + new Date(ps) + ". EndTime: " + new Date(pe)); + pe = pe - maxIntervalMs; + } + return ret; + } + + private static List generateFetchersfortime( + long startTime, long endTime, long id, String timeType) { + List fetchers = new ArrayList<>(); + String[] fetcherArgs = + new String[] { + String.valueOf(startTime), String.valueOf(endTime), String.valueOf(id), timeType + }; + fetchers.add(new JobHistoryDataFetcher(fetcherArgs, MapperFactory.getJobHistoryMapper())); + logger.info( + "Generated dataFetcher for startTime: " + + new Date(startTime) + + ". EndTime: " + + new Date(endTime)); + return fetchers; + } + + private static void addIntervalToImsAlerts(Map alerts, long realIntervals) { + for (AlertDesc alert : alerts.values()) { + if (!(alert instanceof ImsAlertDesc)) { + logger.info("[warn] ignore wrong alert" + alert); + } else { + ((ImsAlertDesc) alert).hitIntervalMs_$eq(realIntervals); + } + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/ResourceMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/ResourceMonitor.java new file mode 100644 index 0000000000..ac3046501c --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/ResourceMonitor.java @@ -0,0 +1,162 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.common.utils.ByteTimeUtils; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.entity.IndexEntity; +import org.apache.linkis.monitor.until.HttpsUntils; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; + +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang3.StringUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.math.BigDecimal; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** * Monitor the usage of ECM resources for monitoring and metrics reporting */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class ResourceMonitor { + + private static final Logger logger = LoggerFactory.getLogger(ResourceMonitor.class); + + @Scheduled(cron = "${linkis.monitor.ecm.resource.cron}") + public void ecmResourceTask() { + Map resultmap = null; + AtomicReference tenant = new AtomicReference<>("租户标签:公共资源"); + AtomicReference totalMemory = new AtomicReference<>(0.0); + AtomicReference totalInstance = new AtomicReference<>(0.0); + AtomicReference totalCores = new AtomicReference<>(0.0); + try { + resultmap = HttpsUntils.sendHttp(null, null); + logger.info("ResourceMonitor response {}:", resultmap); + } catch (IOException e) { + logger.warn("failed to get EcmResource data"); + } + // got interface data + Map>> data = MapUtils.getMap(resultmap, "data"); + List> emNodeVoList = data.getOrDefault("EMs", new ArrayList<>()); + StringJoiner minor = new StringJoiner(","); + StringJoiner major = new StringJoiner(","); + // deal ecm resource + emNodeVoList.forEach( + emNodeVo -> { + Map leftResource = MapUtils.getMap(emNodeVo, "leftResource"); + Map maxResource = MapUtils.getMap(emNodeVo, "maxResource"); + // 新增 ECM资源告警,需补充此ECM所属租户 + List> labels = (List>) emNodeVo.get("labels"); + labels.stream() + .filter(labelmap -> labelmap.containsKey("tenant")) + .forEach(map -> tenant.set("租户标签:" + map.get("stringValue").toString())); + String leftmemory = + ByteTimeUtils.bytesToString((long) leftResource.getOrDefault("memory", 0)); + String maxmemory = + ByteTimeUtils.bytesToString((long) maxResource.getOrDefault("memory", 0)); + + String leftmemoryStr = leftmemory.split(" ")[0]; + String maxmemoryStr = maxmemory.split(" ")[0]; + + BigDecimal leftMemory = new BigDecimal(leftmemoryStr); + BigDecimal leftCores = new BigDecimal((int) leftResource.get("cores")); + BigDecimal leftInstance = new BigDecimal((int) leftResource.get("instance")); + totalMemory.set(totalMemory.get() + leftMemory.doubleValue()); + totalInstance.set(totalInstance.get() + leftInstance.doubleValue()); + totalCores.set(totalCores.get() + leftCores.doubleValue()); + + BigDecimal maxMemory = new BigDecimal(maxmemoryStr); + BigDecimal maxCores = new BigDecimal((int) maxResource.get("cores")); + BigDecimal maxInstance = new BigDecimal((int) maxResource.get("instance")); + double memorydouble = + leftMemory.divide(maxMemory, 2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); + double coresdouble = + leftCores.divide(maxCores, 2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); + double instancedouble = + leftInstance.divide(maxInstance, 2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); + Double majorValue = MonitorConfig.ECM_TASK_MAJOR.getValue(); + Double minorValue = MonitorConfig.ECM_TASK_MINOR.getValue(); + if (((memorydouble) <= majorValue) + || ((coresdouble) <= majorValue) + || ((instancedouble) <= majorValue)) { + major.add(emNodeVo.get("instance").toString()); + } else if (((memorydouble) < minorValue) + || ((coresdouble) < minorValue) + || ((instancedouble) < minorValue)) { + minor.add(emNodeVo.get("instance").toString()); + } + HashMap replaceParm = new HashMap<>(); + replaceParm.put("$tenant", tenant.get()); + if (StringUtils.isNotBlank(major.toString())) { + replaceParm.put("$instance", major.toString()); + replaceParm.put("$ratio", majorValue.toString()); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), replaceParm); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12004")); + } + if (StringUtils.isNotBlank(minor.toString())) { + replaceParm.put("$instance", minor.toString()); + replaceParm.put("$ratio", minorValue.toString()); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), replaceParm); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12003")); + } + resourceSendToIms( + coresdouble, memorydouble, instancedouble, HttpsUntils.localHost, "USED"); + }); + resourceSendToIms( + totalCores.get(), totalMemory.get(), totalInstance.get(), HttpsUntils.localHost, "TOTAL"); + } + + private void resourceSendToIms( + Double coresdouble, + Double memorydouble, + Double instancedouble, + String loaclhost, + String name) { + List list = new ArrayList<>(); + logger.info("ResourceMonitor send index "); + String core = "ECM_CPU_"; + String memory = "ECM_MEMORY_"; + String instance = "ECM_INSTANCE_"; + list.add( + new IndexEntity(core.concat(name), "CPU", "INDEX", loaclhost, String.valueOf(coresdouble))); + list.add( + new IndexEntity( + memory.concat(name), "MEMORY", "INDEX", loaclhost, String.valueOf(memorydouble))); + list.add( + new IndexEntity( + instance.concat(name), "INSTANCE", "INDEX", loaclhost, String.valueOf(instancedouble))); + try { + HttpsUntils.sendIndex(list); + } catch (IOException e) { + logger.warn("failed to send EcmResource index"); + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/TaskLogClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/TaskLogClear.java new file mode 100644 index 0000000000..6def756982 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/TaskLogClear.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.until.ThreadUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; + +/** * Task: clean up logs, file data of ec materials */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class TaskLogClear { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @Scheduled(cron = "${linkis.monitor.clear.taskLog.cron}") + public void taskLogClear() { + logger.info("Start to linkis_task_res_log_clear shell"); + List cmdlist = new ArrayList<>(); + cmdlist.add("sh"); + cmdlist.add(MonitorConfig.shellPath + "linkis_task_res_log_clear.sh"); + logger.info("linkis_task_res_log_clear shell command {}", cmdlist); + String exec = ThreadUtils.run(cmdlist, "linkis_task_res_log_clear.sh"); + logger.info("shell log {}", exec); + logger.info("End to linkis_task_res_log_clear shell "); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/UserModeMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/UserModeMonitor.java new file mode 100644 index 0000000000..ad6f861479 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/UserModeMonitor.java @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.governance.common.entity.task.RequestPersistTask; +import org.apache.linkis.httpclient.dws.config.DWSClientConfig; +import org.apache.linkis.manager.label.constant.LabelKeyConstant; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.until.HttpsUntils; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.ujes.client.UJESClient; +import org.apache.linkis.ujes.client.UJESClientImpl; +import org.apache.linkis.ujes.client.request.GetTableStatisticInfoAction; +import org.apache.linkis.ujes.client.request.JobSubmitAction; +import org.apache.linkis.ujes.client.response.GetTableStatisticInfoResult; +import org.apache.linkis.ujes.client.response.JobExecuteResult; +import org.apache.linkis.ujes.client.response.JobInfoResult; + +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.net.SocketTimeoutException; +import java.util.*; +import java.util.concurrent.TimeUnit; + +import com.google.gson.internal.LinkedTreeMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * * User mode monitoring: regularly trigger scripts to monitor whether the engine status is running + * normally + */ +@Component +public class UserModeMonitor { + + private static final Logger logger = LoggerFactory.getLogger(UserModeMonitor.class); + + private static final DWSClientConfig clientConfig = HttpsUntils.dwsClientConfig; + + private static final UJESClient client = new UJESClientImpl(clientConfig); + + @Scheduled(cron = "${linkis.monitor.user.mode.cron}") + public void job() { + Optional.ofNullable(MonitorConfig.USER_MODE_ENGINE.getValue()) + .ifPresent( + configStr -> { + ArrayList> userModeStr = + BDPJettyServerHelper.gson().fromJson(configStr, ArrayList.class); + userModeStr.forEach( + engine -> { + // 3. build job and execute + JobExecuteResult jobExecuteResult = toSubmit(engine); + logger.info( + "start run engineType: {},job id : {}", + engine.get("engineType"), + jobExecuteResult.taskID()); + HashMap parms = new HashMap<>(); + parms.put("$engineType", engine.get("engineType")); + parms.put("$url", Configuration.GATEWAY_URL().getValue()); + parms.put("$jobId", jobExecuteResult.taskID()); + Utils.sleepQuietly(MonitorConfig.USER_MODE_TIMEOUT.getValue() * 1000); + JobInfoResult jobInfo = client.getJobInfo(jobExecuteResult); + if (jobInfo.isCompleted()) { + if (jobInfo.getJobStatus().equals("Failed")) { + logger.info( + "run fail engineType: {},job id : {}", + engine.get("engineType"), + jobExecuteResult.taskID()); + RequestPersistTask requestPersistTask = jobInfo.getRequestPersistTask(); + parms.put("$errorCode", String.valueOf(requestPersistTask.getErrCode())); + parms.put("$errorMsg", requestPersistTask.getErrDesc()); + Map failedAlerts = + MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(failedAlerts.get("12012")); + } + } else { + logger.info( + "run timeout engineType: {},job id : {}", + engine.get("engineType"), + jobExecuteResult.taskID()); + Map alerts = + MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(alerts.get("12011")); + } + }); + }); + } + + private static JobExecuteResult toSubmit(LinkedTreeMap engine) { + // 1. build params + // set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant + Map labels = new HashMap(); + labels.put( + LabelKeyConstant.ENGINE_TYPE_KEY, engine.get("engineType")); // required engineType Label + labels.put( + LabelKeyConstant.USER_CREATOR_TYPE_KEY, + engine.get("executeUser") + "-IDE"); // required execute user and creator eg:hadoop-IDE + labels.put(LabelKeyConstant.CODE_TYPE_KEY, engine.get("runType")); // required codeType + Map startupMap = new HashMap(16); + // setting linkis params + // startupMap.put("wds.linkis.rm.yarnqueue", "dws"); + // 2. build jobSubmitAction + JobSubmitAction jobSubmitAction = + JobSubmitAction.builder() + .addExecuteCode(engine.get("code")) + .setStartupParams(startupMap) + .setUser(engine.get("executeUser")) // submit user + .addExecuteUser(engine.get("executeUser")) // execute user + .setLabels(labels) + .build(); + // 3. to execute + return client.submit(jobSubmitAction); + } + + @Scheduled(cron = "${linkis.monitor.user.db.cron:0 0/10 * * * ?}") + public void dbJob() { + Map properties = new HashMap<>(); + properties.put("readTimeout", MonitorConfig.USER_MODE_INTERFACE_TIMEOUT.getValue()); + DWSClientConfig clientConfig = HttpsUntils.createClientConfig(null, properties); + UJESClientImpl ujesClient = new UJESClientImpl(clientConfig); + GetTableStatisticInfoAction builder = + GetTableStatisticInfoAction.builder() + .setUser("hadoop") + .setDatabase("default") + .setTable("dual") + .builder(); + HashMap parms = new HashMap<>(); + try { + GetTableStatisticInfoResult tableStatisticInfo = ujesClient.getTableStatisticInfo(builder); + if (tableStatisticInfo.getStatus() != 0) { + logger.info("元数据查询服务用户态,执行失败,异常信息:" + tableStatisticInfo.getMessage()); + // parms.put("$msg", tableStatisticInfo.getMessage()); + // Map failedAlerts = + // MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + // PooledImsAlertUtils.addAlert(failedAlerts.get("12017")); + } + } catch (Exception e) { + if (e instanceof SocketTimeoutException) { + Integer timeoutValue = MonitorConfig.USER_MODE_INTERFACE_TIMEOUT.getValue(); + long timeout = TimeUnit.MILLISECONDS.toSeconds(timeoutValue); + logger.info("元数据查询服务用户态,执行超时:" + timeout + "秒"); + // parms.put("$timeout", String.valueOf(timeout)); + // Map failedAlerts = + // MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + // PooledImsAlertUtils.addAlert(failedAlerts.get("12018")); + } else { + logger.error("元数据查询服务用户态,执行异常:" + e); + // parms.put("$msg", e.getMessage()); + // Map failedAlerts = + // MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + // PooledImsAlertUtils.addAlert(failedAlerts.get("12017")); + } + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/CacheUtils.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/CacheUtils.java new file mode 100644 index 0000000000..a768fde555 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/CacheUtils.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.until; + +import java.util.concurrent.TimeUnit; + +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; + +public class CacheUtils { + + public static Cache cacheBuilder = + CacheBuilder.newBuilder() + .concurrencyLevel(5) + .expireAfterAccess(1, TimeUnit.DAYS) + .initialCapacity(20) + .maximumSize(1000) + .recordStats() + .build(); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/HttpsUntils.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/HttpsUntils.java new file mode 100644 index 0000000000..a504a9d41d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/HttpsUntils.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.until; + +import org.apache.linkis.bml.conf.BmlConfiguration; +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy; +import org.apache.linkis.httpclient.dws.config.DWSClientConfig; +import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder; +import org.apache.linkis.monitor.client.MonitorHTTPClient; +import org.apache.linkis.monitor.client.MonitorHTTPClientClientImpl; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.entity.IndexEntity; +import org.apache.linkis.monitor.request.EmsListAction; +import org.apache.linkis.monitor.request.EntranceTaskAction; +import org.apache.linkis.monitor.response.EntranceTaskResult; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.ujes.client.response.EmsListResult; + +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; + +import org.springframework.util.Assert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class HttpsUntils { + private static final Logger logger = LoggerFactory.getLogger(HttpsUntils.class); + + public static DWSClientConfig dwsClientConfig = createClientConfig(null, null); + // IOUtils.closeQuietly(client); + public static MonitorHTTPClient client = new MonitorHTTPClientClientImpl(dwsClientConfig); + public static final String localHost = Utils.getLocalHostname(); + + public static Map sendHttp(String url, Map properties) + throws IOException { + if (null == dwsClientConfig) { + dwsClientConfig = createClientConfig(url, properties); + } + if (null == client) { + client = new MonitorHTTPClientClientImpl(dwsClientConfig); + } + EmsListAction build = EmsListAction.newBuilder().setUser("hadoop").build(); + EmsListResult result = client.list(build); + return result.getResultMap(); + } + + public static DWSClientConfig createClientConfig(String url, Map properties) { + String realUrl = ""; + if (StringUtils.isBlank(url)) { + realUrl = Configuration.getGateWayURL(); + } else { + realUrl = url; + } + Map parms = new HashMap<>(); + if (MapUtils.isNotEmpty(properties)) { + parms = properties; + } + int maxConnection = + (int) + parms.getOrDefault( + BmlConfiguration.CONNECTION_MAX_SIZE_SHORT_NAME(), + BmlConfiguration.CONNECTION_MAX_SIZE().getValue()); + int connectTimeout = + (int) + parms.getOrDefault( + BmlConfiguration.CONNECTION_TIMEOUT_SHORT_NAME(), + BmlConfiguration.CONNECTION_TIMEOUT().getValue()); + int readTimeout = + (int) + parms.getOrDefault( + BmlConfiguration.CONNECTION_READ_TIMEOUT_SHORT_NAME(), + BmlConfiguration.CONNECTION_READ_TIMEOUT().getValue()); + String tokenKey = + (String) + parms.getOrDefault( + BmlConfiguration.AUTH_TOKEN_KEY_SHORT_NAME(), + BmlConfiguration.AUTH_TOKEN_KEY().getValue()); + String tokenValue = + (String) + parms.getOrDefault( + BmlConfiguration.AUTH_TOKEN_VALUE_SHORT_NAME(), + BmlConfiguration.AUTH_TOKEN_VALUE().getValue()); + + DWSClientConfig clientConfig = + ((DWSClientConfigBuilder) + (DWSClientConfigBuilder.newBuilder() + .addServerUrl(realUrl) + .connectionTimeout(connectTimeout) + .discoveryEnabled(false) + .discoveryFrequency(1, TimeUnit.MINUTES) + .loadbalancerEnabled(false) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(new TokenAuthenticationStrategy()) + .setAuthTokenKey(tokenKey) + .setAuthTokenValue(tokenValue))) + .setDWSVersion("v1") + .build(); + + return clientConfig; + } + + public static Map getEntranceTask(String url, String user, String Instance) + throws IOException { + if (null == dwsClientConfig) { + dwsClientConfig = createClientConfig(null, null); + } + if (null == client) { + client = new MonitorHTTPClientClientImpl(dwsClientConfig); + } + EntranceTaskAction build = + EntranceTaskAction.newBuilder().setUser(user).setInstance(Instance).build(); + EntranceTaskResult result = client.entranList(build); + return result.getResultMap(); + } + + public static void sendIndex(List list) throws IOException { + Map parm = new HashMap<>(); + parm.put("userAuthKey", MonitorConfig.ECM_TASK_USER_AUTHKEY.getValue()); + parm.put("metricDataList", list); + String json = BDPJettyServerHelper.gson().toJson(parm); + + RequestConfig requestConfig = RequestConfig.DEFAULT; + StringEntity entity = + new StringEntity( + json, ContentType.create(ContentType.APPLICATION_JSON.getMimeType(), "UTF-8")); + entity.setContentEncoding("UTF-8"); + + HttpPost httpPost = new HttpPost(MonitorConfig.ECM_TASK_IMURL.getValue()); + httpPost.setConfig(requestConfig); + httpPost.setEntity(entity); + + CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse execute = httpClient.execute(httpPost); + String responseStr = EntityUtils.toString(execute.getEntity(), "UTF-8"); + Map map = BDPJettyServerHelper.gson().fromJson(responseStr, Map.class); + logger.info("send index response :{}", map); + Assert.isTrue(!"0".equals(map.get("resultCode")), map.get("resultMsg")); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/ThreadUtils.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/ThreadUtils.java new file mode 100644 index 0000000000..15a2626379 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/ThreadUtils.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.until; + +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.ApplicationContext; +import org.springframework.context.event.ApplicationContextEvent; + +import java.util.*; +import java.util.concurrent.*; + +import scala.concurrent.ExecutionContextExecutorService; + +import org.slf4j.Logger; + +public class ThreadUtils extends ApplicationContextEvent { + + private static final Logger logger = LogUtils.stdOutLogger(); + + public static ExecutionContextExecutorService executors = + Utils.newCachedExecutionContext(5, "alert-pool-thread-", false); + + public ThreadUtils(ApplicationContext source) { + super(source); + } + + public static String run(List cmdList, String shellName) { + FutureTask future = new FutureTask(() -> Utils.exec(cmdList.toArray(new String[2]), -1)); + executors.submit(future); + String msg = ""; + try { + msg = future.get(MonitorConfig.SHELL_TIMEOUT.getValue(), TimeUnit.MINUTES).toString(); + } catch (TimeoutException e) { + logger.info("execute shell time out {}", shellName); + HashMap parms = new HashMap<>(); + parms.put("$shellName", shellName); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.THREAD_TIME_OUT_IM(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12014")); + } catch (ExecutionException | InterruptedException e) { + logger.error("Thread error msg {}", e.getMessage()); + } + return msg; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InsLabelRelationMapper.xml b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InsLabelRelationMapper.xml new file mode 100644 index 0000000000..6c51f6d0aa --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InsLabelRelationMapper.xml @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + l.`id`, l.`label_key`, l.`label_value`, l.`label_feature`, + l.`label_value_size`, l.`update_time`, l.`create_time` + + + + s.`id`, s.`instance`, s.`name`, s.`update_time`, s.`create_time` + + + + + + + + + \ No newline at end of file diff --git a/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceInfoMapper.xml b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceInfoMapper.xml new file mode 100644 index 0000000000..d5309a5f04 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceInfoMapper.xml @@ -0,0 +1,53 @@ + + + + + + + + + + + + + + + + `id`, `instance`, `name`, `update_time`, + `create_time` + + + + DELETE FROM linkis_ps_instance_info WHERE instance = #{instance} + + + + + + + \ No newline at end of file diff --git a/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceLabelMapper.xml b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceLabelMapper.xml new file mode 100644 index 0000000000..e7c7558017 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceLabelMapper.xml @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + `id`, `label_key`, `label_value`, `label_feature`, + `label_value_size`, `update_time`, `create_time` + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/JobHistoryMapper.xml b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/JobHistoryMapper.xml new file mode 100644 index 0000000000..730e8a43da --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/JobHistoryMapper.xml @@ -0,0 +1,172 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + `id`,`job_req_id`,`submit_user`,`execute_user`,`labels`,`params`,`status`,`error_code`,`created_time`, + `updated_time`,`instances`,`observe_info` + + + + + + + + UPDATE linkis_ps_job_history_group_history + + status = #{targetStatus} + + + + #{element} + + + + + + UPDATE linkis_ps_job_history_group_history + + status = #{targetStatus}, error_code=21304, error_desc='Automatically killed because entrance is dead' + + + created_time >= #{startDate} + AND instances = #{instanceName} + AND + + #{element} + + + LIMIT 5000 + + + + + + + + + + diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/LinkisJobHistoryScanSpringConfiguration.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/LinkisJobHistoryScanSpringConfiguration.scala new file mode 100644 index 0000000000..4154661f07 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/LinkisJobHistoryScanSpringConfiguration.scala @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor + +import org.apache.linkis.monitor.factory.MapperFactory +import org.apache.linkis.monitor.instance.dao.InstanceInfoDao +import org.apache.linkis.monitor.jobhistory.dao.JobHistoryMapper + +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.context.annotation.{ComponentScan, Configuration} + +import javax.annotation.PostConstruct + +@Configuration +@ComponentScan(Array("org.apache.linkis.monitor.scan", "org.apache.linkis.mybatis")) +class LinkisJobHistoryScanSpringConfiguration { + + @Autowired + private var jobHistoryMapper: JobHistoryMapper = _ + + @Autowired + private var instanceInfoMapper: InstanceInfoDao = _ + + @PostConstruct + def init(): Unit = { + MapperFactory.setJobHistoryMapper(jobHistoryMapper) + MapperFactory.setInstanceInfoMapper(instanceInfoMapper) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClient.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClient.scala new file mode 100644 index 0000000000..4caccd73a3 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClient.scala @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.client + +import org.apache.linkis.httpclient.authentication.AuthenticationStrategy +import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy +import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} +import org.apache.linkis.httpclient.response.Result +import org.apache.linkis.monitor.request.{EmsListAction, EntranceTaskAction, MonitorResourceAction} +import org.apache.linkis.monitor.response.EntranceTaskResult +import org.apache.linkis.ujes.client.response.EmsListResult + +import java.io.Closeable +import java.util.concurrent.TimeUnit + +abstract class MonitorHTTPClient extends Closeable { + + protected[client] def executeJob(ujesJobAction: MonitorResourceAction): Result + + def list(emsListAction: EmsListAction): EmsListResult = { + executeJob(emsListAction).asInstanceOf[EmsListResult] + } + + def entranList(entranceTaskAction: EntranceTaskAction): EntranceTaskResult = { + executeJob(entranceTaskAction).asInstanceOf[EntranceTaskResult] + } + +} + +object MonitorHTTPClient { + + def apply(clientConfig: DWSClientConfig): MonitorHTTPClient = new MonitorHTTPClientClientImpl( + clientConfig + ) + + def apply(serverUrl: String): MonitorHTTPClient = apply(serverUrl, 30000, 10) + + def apply(serverUrl: String, readTimeout: Int, maxConnection: Int): MonitorHTTPClient = + apply(serverUrl, readTimeout, maxConnection, new StaticAuthenticationStrategy, "v1") + + def apply( + serverUrl: String, + readTimeout: Int, + maxConnection: Int, + authenticationStrategy: AuthenticationStrategy, + dwsVersion: String + ): MonitorHTTPClient = { + val clientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(serverUrl) + .connectionTimeout(30000) + .discoveryEnabled(false) + .loadbalancerEnabled(false) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(authenticationStrategy) + .setDWSVersion(dwsVersion) + .build() + apply(clientConfig) + } + + def getDiscoveryClient(serverUrl: String): MonitorHTTPClient = + getDiscoveryClient(serverUrl, 30000, 10) + + def getDiscoveryClient( + serverUrl: String, + readTimeout: Int, + maxConnection: Int + ): MonitorHTTPClient = + getDiscoveryClient( + serverUrl, + readTimeout, + maxConnection, + new StaticAuthenticationStrategy, + "v1" + ) + + def getDiscoveryClient( + serverUrl: String, + readTimeout: Int, + maxConnection: Int, + authenticationStrategy: AuthenticationStrategy, + dwsVersion: String + ): MonitorHTTPClient = { + val clientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(serverUrl) + .connectionTimeout(30000) + .discoveryEnabled(true) + .discoveryFrequency(1, TimeUnit.MINUTES) + .loadbalancerEnabled(true) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(authenticationStrategy) + .setDWSVersion(dwsVersion) + .build() + apply(clientConfig) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClientClientImpl.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClientClientImpl.scala new file mode 100644 index 0000000000..5554701571 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClientClientImpl.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.client + +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.dws.config.DWSClientConfig +import org.apache.linkis.httpclient.request.Action +import org.apache.linkis.httpclient.response.Result +import org.apache.linkis.monitor.request.MonitorResourceAction + +class MonitorHTTPClientClientImpl(clientConfig: DWSClientConfig) extends MonitorHTTPClient { + + private val dwsHttpClient = + new DWSHttpClient(clientConfig, "Linkis-MonitorResource-Execution-Thread") + + override protected[client] def executeJob(ujesJobAction: MonitorResourceAction): Result = + ujesJobAction match { + + case action: Action => dwsHttpClient.execute(action) + + } + + override def close(): Unit = dwsHttpClient.close() +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClient.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClient.scala new file mode 100644 index 0000000000..d0660e1116 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClient.scala @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.client + +import org.apache.linkis.httpclient.authentication.AuthenticationStrategy +import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy +import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} +import org.apache.linkis.httpclient.response.Result +import org.apache.linkis.monitor.request.{EmsListAction, MonitorResourceAction} +import org.apache.linkis.ujes.client.response.EmsListResult + +import java.io.Closeable +import java.util.concurrent.TimeUnit + +abstract class MonitorResourceClient extends Closeable { + + protected[client] def executeJob(ujesJobAction: MonitorResourceAction): Result + + def list(jobListAction: EmsListAction): EmsListResult = { + executeJob(jobListAction).asInstanceOf[EmsListResult] + } + +} + +object MonitorResourceClient { + + def apply(clientConfig: DWSClientConfig): MonitorResourceClient = new MonitorResourceClientImpl( + clientConfig + ) + + def apply(serverUrl: String): MonitorResourceClient = apply(serverUrl, 30000, 10) + + def apply(serverUrl: String, readTimeout: Int, maxConnection: Int): MonitorResourceClient = + apply(serverUrl, readTimeout, maxConnection, new StaticAuthenticationStrategy, "v1") + + def apply( + serverUrl: String, + readTimeout: Int, + maxConnection: Int, + authenticationStrategy: AuthenticationStrategy, + dwsVersion: String + ): MonitorResourceClient = { + val clientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(serverUrl) + .connectionTimeout(30000) + .discoveryEnabled(false) + .loadbalancerEnabled(false) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(authenticationStrategy) + .setDWSVersion(dwsVersion) + .build() + apply(clientConfig) + } + + def getDiscoveryClient(serverUrl: String): MonitorResourceClient = + getDiscoveryClient(serverUrl, 30000, 10) + + def getDiscoveryClient( + serverUrl: String, + readTimeout: Int, + maxConnection: Int + ): MonitorResourceClient = + getDiscoveryClient( + serverUrl, + readTimeout, + maxConnection, + new StaticAuthenticationStrategy, + "v1" + ) + + def getDiscoveryClient( + serverUrl: String, + readTimeout: Int, + maxConnection: Int, + authenticationStrategy: AuthenticationStrategy, + dwsVersion: String + ): MonitorResourceClient = { + val clientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(serverUrl) + .connectionTimeout(30000) + .discoveryEnabled(true) + .discoveryFrequency(1, TimeUnit.MINUTES) + .loadbalancerEnabled(true) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(authenticationStrategy) + .setDWSVersion(dwsVersion) + .build() + apply(clientConfig) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClientImpl.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClientImpl.scala new file mode 100644 index 0000000000..06cff3b46a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClientImpl.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.client + +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.dws.config.DWSClientConfig +import org.apache.linkis.httpclient.request.Action +import org.apache.linkis.httpclient.response.Result +import org.apache.linkis.monitor.request.MonitorResourceAction + +class MonitorResourceClientImpl(clientConfig: DWSClientConfig) extends MonitorResourceClient { + + private val dwsHttpClient = + new DWSHttpClient(clientConfig, "Linkis-MonitorResource-Execution-Thread") + + override protected[client] def executeJob(ujesJobAction: MonitorResourceAction): Result = + ujesJobAction match { + + case action: Action => dwsHttpClient.execute(action) + + } + + override def close(): Unit = dwsHttpClient.close() +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/Constants.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/Constants.scala new file mode 100644 index 0000000000..04a0438794 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/Constants.scala @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.constants + +import org.apache.linkis.common.conf.CommonVars + +object Constants { + + val SCAN_PREFIX_ERRORCODE = "jobhistory.errorcode." + val SCAN_PREFIX_UNFINISHED_JOBTIME_EXCEED_SEC = "jobhistory.unfinished.time.exceed.sec." + val ALERT_RESOURCE_MONITOR = "ecm.resource.monitor.im." + + val UNFINISHED_JOB_STATUS = + "Inited,WaitForRetry,Scheduled,Running".split(",").map(s => s.toUpperCase()) + + val FINISHED_JOB_STATUS = + "Succeed,Failed,Cancelled,Timeout".split(",").map(s => s.toUpperCase()) + + val DATA_FINISHED_JOB_STATUS_ARRAY = "Succeed,Failed,Cancelled,Timeout".split(",") + + val DATA_UNFINISHED_JOB_STATUS_ARRAY = + "Inited,WaitForRetry,Scheduled,Running".split(",") + + val ALERT_PROPS_FILE_PATH = CommonVars.properties.getProperty( + "linkis.alert.conf.file.path", + "linkis-et-monitor-file.properties" + ) + + val ALERT_IMS_URL = CommonVars.properties.getProperty( + "linkis.alert.url", + "http://127.0.0.1:10812/ims_data_access/send_alarm.do" + ) + + val ALERT_SUB_SYSTEM_ID = + CommonVars.properties.getProperty("linkis.alert.sub_system_id", "10001") + + val ALERT_DEFAULT_RECEIVERS = CommonVars.properties + .getProperty("linkis.alert.receiver.default", "") + .split(",") + .toSet[String] + + val ALERT_IMS_MAX_LINES = CommonVars[Int]("linkis.alert.content.max.lines", 8).getValue + + val TIMEOUT_INTERVALS_SECONDS = + CommonVars[Long]("linkis.monitor.scanner.timeout.interval.seconds", 1 * 60 * 60).getValue + + val ERRORCODE_MAX_INTERVALS_SECONDS = + CommonVars[Long]("linkis.errorcode.scanner.max.interval.seconds", 1 * 60 * 60).getValue + + val SCAN_RULE_UNFINISHED_JOB_STATUS = + "Inited,WaitForRetry,Scheduled,Running".split(",").map(s => s.toUpperCase()) + + val USER_LABEL_MONITOR = "jobhistory.label.monitor.im." + + val USER_LABEL_TENANT: CommonVars[String] = + CommonVars[String]("linkis.monitor.jobhistory.userLabel.tenant", "{}") + + val USER_RESOURCE_MONITOR = "user.mode.monitor.im." + val BML_CLEAR_IM = "bml.clear.monitor.im." + val THREAD_TIME_OUT_IM = "thread.monitor.timeout.im." + val JOB_RESULT_IM = "jobhistory.result.monitor.im." + + val BML_VERSION_MAX_NUM: CommonVars[Int] = + CommonVars[Int]("linkis.monitor.bml.cleaner.version.max.num", 50) + + val BML_VERSION_KEEP_NUM: CommonVars[Int] = + CommonVars[Int]("linkis.monitor.bml.cleaner.version.keep.num", 20) + + val BML_PREVIOUS_INTERVAL_TIME_DAYS: CommonVars[Long] = + CommonVars[Long]("linkis.monitor.bml.cleaner.previous.interval.days", 30) + + val BML_CLEAN_ONCE_RESOURCE_LIMIT_NUM: CommonVars[Int] = + CommonVars[Int]("linkis.monitor.bml.cleaner.once.limit.num", 100) + + val BML_TRASH_PATH_PREFIX: CommonVars[String] = + CommonVars[String]("linkis.monitor.bml.trash.prefix.path", "hdfs:///tmp/linkis/trash/bml_trash") + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/ScanOperatorEnum.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/ScanOperatorEnum.scala new file mode 100644 index 0000000000..02fafa56d6 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/ScanOperatorEnum.scala @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.constants + +object ScanOperatorEnum extends Enumeration { + type ScanOperatorEnum = Value + val BML_VERSION, JOB_HISTORY = Value +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Event.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Event.scala new file mode 100644 index 0000000000..bf0508fc38 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Event.scala @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.ob + +trait Event { + def isRegistered: Boolean + + def register(observer: Observer): Unit + + def unRegister(observer: Observer): Unit + + def notifyObserver(event: Event, message: Any): Unit +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Observer.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Observer.scala new file mode 100644 index 0000000000..aecc9f7fd4 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Observer.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.ob + +trait Observer { + + /** + * Observer Pattern + */ + def update(event: Event, msg: Any): Unit +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/SingleObserverEvent.java b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/SingleObserverEvent.java new file mode 100644 index 0000000000..0414266668 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/SingleObserverEvent.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.ob; + + +public class SingleObserverEvent implements Event { + private Observer observer; + + @Override + public boolean isRegistered() { + return observer != null; + } + + @Override + public void register(Observer observer) { + this.observer = observer; + } + + @Override + public void unRegister(Observer observer) { + this.observer = null; + } + + @Override + public void notifyObserver(Event event, Object message) { + observer.update(event, message); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractDataFetcher.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractDataFetcher.scala new file mode 100644 index 0000000000..21a75986a1 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractDataFetcher.scala @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +abstract class AbstractDataFetcher(customName: String = "") extends DataFetcher { + + private val name: String = if (!customName.isEmpty) { + customName + } else { + this.getClass.getName + "@" + Integer.toHexString(this.hashCode) + } + + def getName(): String = this.name +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractScanRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractScanRule.scala new file mode 100644 index 0000000000..eedf532238 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractScanRule.scala @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.core.ob.Observer + +abstract class AbstractScanRule(customName: String = "", event: Event, observer: Observer) + extends ScanRule + with Logging { + event.register(observer) + + private val name: String = if (!customName.isEmpty) { + customName + } else { + this.getClass.getName + "@" + Integer.toHexString(this.hashCode) + } + + def getName(): String = this.name + + /** + * register an observer to trigger if this rule is matched + * + * @param observer + */ + override def addObserver(observer: Observer): Unit = event.register(observer) + + /** + * return registered event + * + * @return + */ + override def getHitEvent(): Event = event + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/BaseScannedData.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/BaseScannedData.scala new file mode 100644 index 0000000000..3597eb78d0 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/BaseScannedData.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import java.util + +class BaseScannedData(owner: String, data: util.List[scala.Any]) extends ScannedData { + override def getOwner(): String = this.owner + + override def getData(): util.List[scala.Any] = this.data +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/DataFetcher.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/DataFetcher.scala new file mode 100644 index 0000000000..3b86ce6c3d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/DataFetcher.scala @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import java.util + +/** + * ScanOperator should encapsulate lower-level client for accessing data from an arbitrary + * datasource. e.g. if we want to scan a DB table. Then operator should encapsulate a DAO + */ +trait DataFetcher { + def getName(): String + + /** + * get arguments for querying data + * + * @return + */ + def getArgs(): Array[scala.Any] + + /** + * make a query to mysql/hive etc. given args + */ + def getData(): util.List[scala.Any] +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanBuffer.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanBuffer.scala new file mode 100644 index 0000000000..8518738606 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanBuffer.scala @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import java.util +import java.util.concurrent.LinkedBlockingDeque + +class ScanBuffer { + val buffer: LinkedBlockingDeque[ScannedData] = new LinkedBlockingDeque[ScannedData] + + def write(data: ScannedData): Unit = buffer.add(data) + + def write(data: util.List[ScannedData]): Unit = buffer.addAll(data) + + def drain(maxSize: Int = -1): util.List[ScannedData] = { + val ret = new util.LinkedList[ScannedData] + val realSize = if (maxSize < 0) { + buffer.size + } else { + maxSize + } + buffer.drainTo(ret, realSize) + return ret + } + + def size(): Int = buffer.size() +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanRule.scala new file mode 100644 index 0000000000..fa599c4a68 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanRule.scala @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.core.ob.Observer + +import java.util + +trait ScanRule { + + def getName(): String + + /** + * register an observer to trigger if this rule is matched + * + * @param observer + */ + def addObserver(observer: Observer): Unit + + /** + * return registered event + * + * @return + */ + def getHitEvent(): Event + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + def triggerIfMatched(data: util.List[ScannedData]): Boolean +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScannedData.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScannedData.scala new file mode 100644 index 0000000000..2c4c1e0ded --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScannedData.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import java.util + +trait ScannedData { + def getOwner(): String + + def getData(): util.List[scala.Any] +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AbstractScanner.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AbstractScanner.scala new file mode 100644 index 0000000000..4f207cd697 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AbstractScanner.scala @@ -0,0 +1,164 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.scanner + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.core.pac._ +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException + +import java.util +import java.util.concurrent.CopyOnWriteArrayList +import java.util.concurrent.atomic.AtomicInteger + +abstract class AbstractScanner extends AnomalyScanner with Logging { + private val buffer: ScanBuffer = new ScanBuffer + + private val dataFetcherIdx: AtomicInteger = + new AtomicInteger(0) // mark next fetcher for sequentially produce data + + private val dataFetcherList: CopyOnWriteArrayList[DataFetcher] = + new CopyOnWriteArrayList[DataFetcher] + + private val scanRuleList: CopyOnWriteArrayList[ScanRule] = new CopyOnWriteArrayList[ScanRule] + + /** + * Producer + */ + override def addDataFetcher(fetcher: DataFetcher): Unit = { + if (fetcher != null) { + dataFetcherList.add(fetcher) + } else { + logger.warn("ignore null DataFetcher") + } + } + + override def addDataFetchers(fetchers: util.List[DataFetcher]): Unit = { + if (fetchers != null && fetchers.size != 0) { + dataFetcherList.addAll(fetchers) + } else { + logger.warn("ignore null or empty DataFetcher") + } + } + + override def getDataFetchers: util.List[DataFetcher] = dataFetcherList + + /** + * directly feed data to buffer + */ + override def feedData(data: util.List[ScannedData]): Unit = { + if (data != null && data.size != 0) { + buffer.write(data) + } else { + logger.warn("Fed with null or empty data") + } + } + + /** + * Returns a buffer that allows read/write simultaneously buffer is allowed to be written by other + * thread + */ + override def getBuffer(): ScanBuffer = buffer + + /** + * add rules to scanner + */ + override def addScanRule(rule: ScanRule): Unit = { + if (rule != null) { + scanRuleList.add(rule) + } else { + logger.warn("ignore null ScanRule") + } + } + + override def addScanRules(rules: util.List[ScanRule]): Unit = { + if (rules != null && rules.size != 0) { + scanRuleList.addAll(rules) + } else { + logger.warn("ignore null or empty ScanRule") + } + } + + override def getScanRules(): util.List[ScanRule] = scanRuleList + + /** + * blocking call, scan and analyze until all dataFetchers are accessed once + */ + override def run(): Unit = { + if (dataFetcherList.size() == 0) { + throw new AnomalyScannerException(21304, "attempting to run scanner with empty dataFetchers") + } + if (buffer == null) { + throw new AnomalyScannerException(21304, "attempting to run scanner with null buffer") + } + if (scanRuleList.size == 0) { + throw new AnomalyScannerException(21304, "attempting to run scanner with empty rules") + } + while (dataFetcherIdx.get() < dataFetcherList.size()) { + scanOneIteration() + analyzeOneIteration() + } + } + + /** + * 1. scan data for 1 iteration 2. should be a blocking call 3. see if [[ScanRule]] is matched + * 4. trigger [[Event]] and inform observer + */ + override def scanOneIteration(): Unit = { + val idx = dataFetcherIdx.getAndIncrement() + val fetcher = dataFetcherList.get(idx) + if (fetcher != null) { + val rawData = fetcher.getData() + logger.info("scanned " + rawData.size + " data. Rule: " + fetcher.getName); + if (rawData != null && rawData.size != 0) { + buffer.write(new BaseScannedData(fetcher.getName, rawData)) + } + } else { + logger.warn("ignored null fetcher!!") + } + } + + /** + * 1. should be a blocking call 2. read from [[ScanBuffer]] 2. see if [[ScanRule]] is matched 3. + * trigger [[Observer]] + */ + override def analyzeOneIteration(): Unit = { + val dataToAnalyze = buffer.drain() + if (dataToAnalyze != null && dataToAnalyze.size() != 0) { + val len = scanRuleList.size() + for (i <- 0 until len) { + val scanRule = scanRuleList.get(i) + if (scanRule != null) { + logger.info("analyzing " + dataToAnalyze.size + " data. Rule: " + scanRule.getName) + scanRule.triggerIfMatched(dataToAnalyze) + } else { + logger.warn("found empty or null ScanRule") + } + } + } else { + logger.info("analyzed 0 data.") + } + } + + /** + * 1. should be non-blocking 2. keeps calling scanOneIteration() and analyzeOneIteration() utils + * stop() is called + */ + override def start(): Unit = {} + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AnomalyScanner.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AnomalyScanner.scala new file mode 100644 index 0000000000..7fa84d3879 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AnomalyScanner.scala @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.scanner + +import org.apache.linkis.monitor.core.ob.Event +import org.apache.linkis.monitor.core.pac.{DataFetcher, ScanBuffer, ScannedData, ScanRule} +import org.apache.linkis.monitor.core.pac.DataFetcher + +import java.util + +/** + * A Scanner that: + * 1. scan a datasource using [[DataFetcher]], write data into [[ScanBuffer]] 2. read data from + * [[ScanBuffer]] see if [[ScanRule]] is matched 3. trigger [[Event]] in [[ScanRule]] and + * inform observer + */ +trait AnomalyScanner { + + /** + * Producer + */ + def addDataFetcher(dataFetcher: DataFetcher): Unit + + def addDataFetchers(dataFetchers: util.List[DataFetcher]): Unit + + def getDataFetchers: util.List[DataFetcher] + + /** + * directly feed data to buffer + */ + def feedData(data: util.List[ScannedData]): Unit + + /** + * Buffer + */ + + /** + * add rules to scanner + */ + def addScanRule(rule: ScanRule): Unit + + def addScanRules(rules: util.List[ScanRule]): Unit + + /** + * Consumer + */ + + def getScanRules(): util.List[ScanRule] + + /** + * scan and analyze for 1 iteration + */ + def run(): Unit + + /** + * 1. should be non-blocking 2. keeps calling scan() utils stop() is called + */ + def start(): Unit + + def shutdown(): Unit + + /** + * 1. should be a blocking call 2. call [[DataFetcher]] to read data 3. write result to + * [[ScanBuffer]] + */ + protected def scanOneIteration(): Unit + + /** + * Returns a buffer that allows read/write simultaneously buffer is allowed to be written by other + * thread + */ + protected def getBuffer(): ScanBuffer + + /** + * 1. should be a blocking call 2. read from [[ScanBuffer]] 2. see if [[ScanRule]] is matched 3. + * trigger [[[[org.apache.linkis.tools.core.ob.Observer]]]] + */ + protected def analyzeOneIteration(): Unit + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/DefaultScanner.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/DefaultScanner.scala new file mode 100644 index 0000000000..80ab7a5498 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/DefaultScanner.scala @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.scanner + +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils + +class DefaultScanner extends AbstractScanner { + + override def shutdown(): Unit = { + PooledImsAlertUtils.shutDown(true, -1) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/factory/MapperFactory.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/factory/MapperFactory.scala new file mode 100644 index 0000000000..eb503c52aa --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/factory/MapperFactory.scala @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.factory + +import org.apache.linkis.monitor.instance.dao.{ + InsLabelRelationDao, + InstanceInfoDao, + InstanceLabelDao +} +import org.apache.linkis.monitor.jobhistory.dao.JobHistoryMapper + +object MapperFactory { + + private var jobHistoryMapper: JobHistoryMapper = _ + + private var instanceInfoMapper: InstanceInfoDao = _ + + private var instanceLabelMapper: InstanceLabelDao = _ + + private var instanceLabelRelationMapper: InsLabelRelationDao = _ + + def getJobHistoryMapper(): JobHistoryMapper = jobHistoryMapper + + def setJobHistoryMapper(jobHistoryMapper: JobHistoryMapper): Unit = { + MapperFactory.jobHistoryMapper = jobHistoryMapper + } + + def getInstanceInfoMapper(): InstanceInfoDao = instanceInfoMapper + + def setInstanceInfoMapper(instanceInfoMapper: InstanceInfoDao): Unit = { + MapperFactory.instanceInfoMapper = instanceInfoMapper + } + + def getInstanceLabelMapper(): InstanceLabelDao = instanceLabelMapper + + def setInstanceLabelMapper(instanceLabelMapper: InstanceLabelDao): Unit = { + MapperFactory.instanceLabelMapper = instanceLabelMapper + } + + def getInsLabelRelationMapper(): InsLabelRelationDao = instanceLabelRelationMapper + + def setInsLabelRelationMapper(instanceLabelRelationMapper: InsLabelRelationDao): Unit = { + MapperFactory.instanceLabelRelationMapper = instanceLabelRelationMapper + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/JobHistoryDataFetcher.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/JobHistoryDataFetcher.scala new file mode 100644 index 0000000000..fb371a658d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/JobHistoryDataFetcher.scala @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.pac.AbstractDataFetcher +import org.apache.linkis.monitor.jobhistory.dao.JobHistoryMapper +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException + +import org.apache.commons.lang3.StringUtils + +import java.util +import java.util.Date + +class JobHistoryDataFetcher(args: Array[Any], mapper: JobHistoryMapper) + extends AbstractDataFetcher + with Logging { + + /** + * retrieve JobHistory Data starts from startTimeMs and ends at startTimeMs + intervalsMs + * + * @return + */ + /** + * get arguments for querying data + * + * @return + */ + override def getArgs(): Array[Any] = args + + /** + * 1. get Data given some arguments + */ + override def getData(): util.List[scala.Any] = { + if (!args.isInstanceOf[Array[String]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryDataFetcher. DataType: " + args.getClass.getCanonicalName + ) + } + if (args != null && args.length == 2) { + val start = Utils.tryCatch(args(0).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + val end = Utils.tryCatch(args(1).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + mapper + .search(null, null, null, new Date(start), new Date(end), null) + .asInstanceOf[util.List[scala.Any]] + } else if (args != null && args.length == 4) { + val start = Utils.tryCatch(args(0).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + val end = Utils.tryCatch(args(1).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + val id = Utils.tryCatch(args(2).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + if ( + StringUtils.isNotBlank(args(3).asInstanceOf[String]) && args(3) + .asInstanceOf[String] + .equals("updated_time") + ) { + val list = new util.ArrayList[String]() + Constants.DATA_FINISHED_JOB_STATUS_ARRAY.foreach(list.add) + mapper + .searchByCacheAndUpdateTime(id, null, list, new Date(start), new Date(end), null) + .asInstanceOf[util.List[scala.Any]] + } else { + val list = new util.ArrayList[String]() + Constants.DATA_UNFINISHED_JOB_STATUS_ARRAY.foreach(list.add) + mapper + .searchByCache(id, null, list, new Date(start), new Date(end), null) + .asInstanceOf[util.List[scala.Any]] + } + } else { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryDataFetcher. Data: " + args + ) + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala new file mode 100644 index 0000000000..4e36d44d86 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.errorcode + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class JobHistoryErrCodeHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeRule.scala new file mode 100644 index 0000000000..d354f7f7b0 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeRule.scala @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.errorcode + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.until.CacheUtils + +import java.util + +import scala.collection.JavaConverters._ + +/** + * Monitor the error codes returned by executing tasks. When executing script tasks, the executed + * error codes will be recorded in the database. The service will generate an alarm based on the + * error code recorded in the database. If the error code contains (11001, 11002), the alarm will be + * triggered. + */ +class JobHistoryErrCodeRule(errorCodes: util.Set[String], hitObserver: Observer) + extends AbstractScanRule(event = new JobHistoryErrCodeHitEvent, observer = hitObserver) + with Logging { + private val scanRuleList = CacheUtils.cacheBuilder + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + + if (!getHitEvent().isRegistered || null == data) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + d match { + case history: JobHistory => + if (errorCodes.contains(String.valueOf(history.getErrorCode))) { + alertData.add(history) + } + scanRuleList.put("jobHistoryId", history.getId) + case _ => + logger.warn( + "Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName + ) + } + } + } else { + logger.warn("Ignored null scanned data") + } + + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent().notifyObserver(getHitEvent(), alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala new file mode 100644 index 0000000000..7f3d8e10cb --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.errorcode + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc +import org.apache.linkis.monitor.utils.alert.ims.{ImsAlertDesc, PooledImsAlertUtils} + +import java.util + +import scala.collection.JavaConverters._ + +class JobHistoryErrorCodeAlertSender(alerts: util.Map[String, AlertDesc]) + extends Observer + with Logging { + + override def update(e: Event, jobHistoryList: scala.Any): Unit = { + if (!e.isInstanceOf[JobHistoryErrCodeHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobHistoryErrorCodeAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (null == jobHistoryList || !jobHistoryList.isInstanceOf[util.List[_]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryErrorCodeAlertSender. Input DataType: " + jobHistoryList.getClass.getCanonicalName + ) + } + val toSend = new util.HashMap[String, ImsAlertDesc] + for (a <- jobHistoryList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + val jobHistory = a.asInstanceOf[JobHistory] + val errorCode = String.valueOf(jobHistory.getErrorCode) + if (alerts.containsKey(errorCode) && alerts.get(errorCode).isInstanceOf[ImsAlertDesc]) { + val alert = if (!toSend.containsKey(errorCode)) { + alerts.get(errorCode).asInstanceOf[ImsAlertDesc] + } else { + toSend.get(errorCode) + } + + var newInfo = if (!toSend.containsKey(errorCode)) { + alert.alertInfo + "\n" + + "[error_code] " + jobHistory.getErrorCode + ", " + jobHistory.getErrorDesc + "\n" + } else { + alert.alertInfo + } + newInfo = newInfo + + "[job-info] " + + "submit-user: " + jobHistory.getSubmitUser + ", " + + "execute-user: " + jobHistory.getExecuteUser + ", " + + "engine_type: " + jobHistory.getEngineType + ", " + + "create_time: " + jobHistory.getCreatedTime + ", " + + "instance: " + jobHistory.getInstances + ". \n" + val newNumHit = alert.numHit + 1 + toSend.put(errorCode, alert.copy(alertInfo = newInfo, numHit = newNumHit)) + } else if (!alerts.containsKey(errorCode)) { + logger.warn("Ignored unregistered error code: " + errorCode) + } else if (!alerts.get(errorCode).isInstanceOf[ImsAlertDesc]) { + logger.warn( + "Ignored invalid alertDesc. DataType: " + alerts + .get(errorCode) + .getClass + .getCanonicalName + ) + } + } + } + for ((_, alert) <- toSend.asScala) { + PooledImsAlertUtils.addAlert(alert) + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedAlertSender.scala new file mode 100644 index 0000000000..0a53142eb7 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedAlertSender.scala @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.jobtime + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.monitor.config.MonitorConfig +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc +import org.apache.linkis.monitor.utils.alert.ims.{ImsAlertDesc, PooledImsAlertUtils} + +import java.text.MessageFormat +import java.util + +import scala.collection.JavaConverters._ +import scala.collection.mutable.ArrayBuffer + +class JobTimeExceedAlertSender(alerts: util.Map[String, AlertDesc]) extends Observer with Logging { + + private val orderedThresholds: Array[Long] = { + val ret = new ArrayBuffer[Long]() + if (alerts != null) { + for (k <- alerts.keySet().asScala) { + Utils.tryCatch(ret.append(k.toLong)) { t => + logger.warn("Ignored illegal threshold: " + k, t) + false + } + } + } + ret.toArray + } + + override def update(e: Event, jobHistoryList: scala.Any): Unit = { + if (!e.isInstanceOf[JobTimeExceedHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobTimeExceedAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (null == jobHistoryList || !jobHistoryList.isInstanceOf[util.List[_]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobTimeExceedAlertSender. Input DataType: " + jobHistoryList.getClass.getCanonicalName + ) + } + if (orderedThresholds.length == 0) { + logger.warn("Found none legal threshold, will not send any alert: " + this) + return + } + val toSend = new util.HashMap[String, ImsAlertDesc] + for (a <- jobHistoryList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + val jobHistory = a.asInstanceOf[JobHistory] + val elapse = System.currentTimeMillis() - jobHistory.getCreatedTime.getTime + var ts = 0L + for (t <- orderedThresholds) { // search max threshold that is smaller than elapse + if (elapse >= t) { + ts = t + } else {} + } + val name = ts.toString + val alert = if (!toSend.containsKey(name)) { + alerts + .get(name) + .asInstanceOf[ImsAlertDesc] + } else { + toSend.get(name) + } + + val newInfo = MessageFormat.format( + MonitorConfig.TASK_RUNTIME_TIMEOUT_DESC.getValue, + jobHistory.getId, + (elapse / 1000 / 60 / 60).toString, + jobHistory.getInstances, + MonitorConfig.SOLUTION_URL.getValue + ) + + val newNumHit = alert.numHit + 1 + val receiver = new util.HashSet[String]() + receiver.add(jobHistory.getSubmitUser) + receiver.add(jobHistory.getExecuteUser) + receiver.addAll(alert.alertReceivers) + val ImsAlertDesc = + alert.copy(alertInfo = newInfo, alertReceivers = receiver, numHit = newNumHit) + PooledImsAlertUtils.addAlert(ImsAlertDesc) + + } + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedHitEvent.scala new file mode 100644 index 0000000000..96c0b4206a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.jobtime + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class JobTimeExceedHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedRule.scala new file mode 100644 index 0000000000..f788173e43 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedRule.scala @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.jobtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.until.CacheUtils + +import java.util +import java.util.Locale + +import scala.collection.JavaConverters._ + +/** + * Monitor the execution status of tasks, scan data outside 12 hours and within 24 hours, If within + * the scope of the rule, there is data whose status is one of (Inited, WaitForRetry, Scheduled, + * Running), an alarm will be triggered. + */ +class JobTimeExceedRule(thresholds: util.Set[String], hitObserver: Observer) + extends AbstractScanRule(event = new JobTimeExceedHitEvent, observer = hitObserver) + with Logging { + + private val threshold: Long = { + if (thresholds == null) { + throw new AnomalyScannerException(21304, "thresholds should not be null") + } + var t = Long.MaxValue + for (k <- thresholds.asScala) { + if (k != null) { + if (t > k.toLong) { + t = k.toLong + } + } else { + logger.warn("ignored null input") + } + } + t + } + + private val scanRuleList = CacheUtils.cacheBuilder + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + if (!getHitEvent.isRegistered || data == null) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + if (d.isInstanceOf[JobHistory]) { + val jobHistory = d.asInstanceOf[JobHistory] + val status = jobHistory.getStatus.toUpperCase(Locale.getDefault) + if (Constants.UNFINISHED_JOB_STATUS.contains(status)) { + val elapse = System.currentTimeMillis() - jobHistory.getCreatedTime.getTime + if (elapse / 1000 >= threshold) { + alertData.add(d.asInstanceOf[JobHistory]) + } + } + scanRuleList.put("jobhistoryScan", jobHistory.getId) + } else { + logger.warn("Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName) + } + } + } else { + logger.warn("Ignored null scanned data") + } + + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent.notifyObserver(getHitEvent, alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsAlertSender.scala new file mode 100644 index 0000000000..7876156b1d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsAlertSender.scala @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.labels + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc +import org.apache.linkis.monitor.utils.alert.ims.{PooledImsAlertUtils, UserLabelAlertUtils} +import org.apache.linkis.server.BDPJettyServerHelper + +import java.util + +import scala.collection.JavaConverters._ +import scala.collection.mutable.ArrayBuffer + +class JobHistoryLabelsAlertSender() extends Observer with Logging { + + override def update(e: Event, jobHistoryList: scala.Any): Unit = { + if (!e.isInstanceOf[JobHistoryLabelsHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobHistoryLabelsAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (null == jobHistoryList || !jobHistoryList.isInstanceOf[util.List[_]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryLabelsAlertSender. Input DataType: " + jobHistoryList.getClass.getCanonicalName + ) + } + val toSend = new ArrayBuffer[String] + for (a <- jobHistoryList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + val jobHistory = a.asInstanceOf[JobHistory] + toSend.append(jobHistory.getLabels) + } + } + for (str <- toSend.distinct) { + val labelsMap: util.Map[String, String] = + BDPJettyServerHelper.gson.fromJson(str, classOf[java.util.Map[String, String]]) + val alerts: util.Map[String, AlertDesc] = + UserLabelAlertUtils.getAlerts(Constants.USER_LABEL_MONITOR, labelsMap.get("userCreator")) + PooledImsAlertUtils.addAlert(alerts.get("12010")); + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsHitEvent.scala new file mode 100644 index 0000000000..d51c3c424b --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.labels + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class JobHistoryLabelsHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsRule.scala new file mode 100644 index 0000000000..70d309b883 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsRule.scala @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.labels + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.until.CacheUtils +import org.apache.linkis.server.BDPJettyServerHelper + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters._ + +import com.google.common.collect.HashBiMap + +/** + * Scan the execution data within the previous 20 minutes and judge the labels field of the data. + * Judgment based on monitor configuration (linkis.monitor.jobhistory.userLabel.tenant) + */ +class JobHistoryLabelsRule(hitObserver: Observer) + extends AbstractScanRule(event = new JobHistoryLabelsHitEvent, observer = hitObserver) + with Logging { + + private val scanRuleList = CacheUtils.cacheBuilder + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + if (!getHitEvent.isRegistered || null == data) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + if (d.isInstanceOf[JobHistory]) { + logger.info(" start jobhistory user label rule data : {}", d) + val jobHistory = d.asInstanceOf[JobHistory] + val labels = jobHistory.getLabels + val labelsMap: util.Map[String, String] = + BDPJettyServerHelper.gson.fromJson(labels, classOf[java.util.Map[String, String]]) + val userCreator = labelsMap.get("userCreator"); + val tenant = labelsMap.get("tenant"); + if (StringUtils.isNotBlank(userCreator)) { + val configMap = BDPJettyServerHelper.gson.fromJson( + Constants.USER_LABEL_TENANT.getValue, + classOf[java.util.Map[String, String]] + ) + val listIterator = configMap.keySet.iterator + while ({ + listIterator.hasNext + }) { + val next = listIterator.next + if (userCreator.contains(next)) { + val value = configMap.get(next) + if (!value.equals(tenant)) { + alertData.add(d.asInstanceOf[JobHistory]) + } + } + } + if (configMap.values().contains(tenant)) { + val bimap: HashBiMap[String, String] = HashBiMap.create(configMap) + val key = bimap.inverse().get(tenant) + if (!key.contains(userCreator)) { + alertData.add(d.asInstanceOf[JobHistory]) + } + } + } + scanRuleList.put("jobHistoryId", jobHistory.getId) + } else { + logger.warn("Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName) + } + } + } else { + logger.warn("Ignored null scanned data") + } + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent.notifyObserver(getHitEvent, alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonJobRunTimeRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonJobRunTimeRule.scala new file mode 100644 index 0000000000..77d904fe4d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonJobRunTimeRule.scala @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters._ + +/** + * Scan the execution data within the first 20 minutes, + * 1. The ObserveInfo field of the data is judged whether it is empty, 2. The task status has been + * completed (Succeed, Failed, Cancelled, Timeout, ALL) Alarms can be triggered when conditions + * are met + */ +class CommonJobRunTimeRule(hitObserver: Observer) + extends AbstractScanRule(event = new JobHistoryRunTimeHitEvent, observer = hitObserver) + with Logging { + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + if (!getHitEvent.isRegistered || null == data) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + d match { + case jobHistory: JobHistory => + if ( + Constants.FINISHED_JOB_STATUS.contains(jobHistory.getStatus.toUpperCase()) + && StringUtils.isNotBlank(jobHistory.getObserveInfo) + ) { + alertData.add(jobHistory) + } else { + logger.warn("jobHistory is not completely , taskid :" + d) + } + case _ => + } + } + } else { + logger.warn("Ignored null scanned data") + } + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent.notifyObserver(getHitEvent, alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeAlertSender.scala new file mode 100644 index 0000000000..2380891463 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeAlertSender.scala @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.ims.{MonitorAlertUtils, PooledImsAlertUtils} +import org.apache.linkis.server.BDPJettyServerHelper + +import org.apache.commons.collections.MapUtils + +import java.net.InetAddress +import java.text.SimpleDateFormat +import java.util +import java.util.Date + +import scala.collection.JavaConverters._ + +class CommonRunTimeAlertSender() extends Observer with Logging { + private val dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") + + override def update(e: Event, jobHistoryList: scala.Any): Unit = { + if (!e.isInstanceOf[JobHistoryRunTimeHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobHistoryErrorCodeAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (!jobHistoryList.isInstanceOf[util.List[_]] || null == jobHistoryList) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryErrorCodeAlertSender. Input DataType: " + jobHistoryList.getClass.getCanonicalName + ) + } + for (a <- jobHistoryList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + val jobHistory = a.asInstanceOf[JobHistory] + val observeInfoMap = BDPJettyServerHelper.gson.fromJson( + jobHistory.getObserveInfo, + classOf[java.util.Map[String, String]] + ) + val extraMap = MapUtils.getMap(observeInfoMap, "extra") + observeInfoMap.put( + "title", + extraMap + .get("title") + .toString + ",任务id:" + jobHistory.getId + ",执行结果 :" + jobHistory.getStatus + ) + observeInfoMap.put( + "$detail", + extraMap.get("detail").toString + ",执行结果 :" + jobHistory.getStatus + ) + observeInfoMap.put("$submitUser", jobHistory.getSubmitUser) + observeInfoMap.put("$status", jobHistory.getStatus) + observeInfoMap.put("$id", jobHistory.getId.toString) + observeInfoMap.put("$date", dateFormat.format(new Date())) + var alterSysInfo = "" + if (null != extraMap.get("alterSysInfo")) { + alterSysInfo = extraMap.get("alterSysInfo").toString + } + observeInfoMap.put("$sysid", alterSysInfo) + var alterObject = "" + if (null != extraMap.get("alterObject")) { + alterObject = extraMap.get("alterObject").toString + } + observeInfoMap.put("$object", alterObject) + observeInfoMap.put("$ip", InetAddress.getLocalHost.getHostAddress) + observeInfoMap.remove("taskId") + observeInfoMap.remove("extra") + val alters = MonitorAlertUtils.getAlerts(Constants.JOB_RESULT_IM, observeInfoMap) + PooledImsAlertUtils.addAlert(alters.get("12016")) + } + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeHitEvent.scala new file mode 100644 index 0000000000..8b2f6d41e1 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class CommonRunTimeHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala new file mode 100644 index 0000000000..a1e870c7c5 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.ims.{MonitorAlertUtils, PooledImsAlertUtils} + +import java.util + +import scala.collection.JavaConverters._ + +/** + * Scan the execution data within the first 20 minutes, judge the completed tasks, + * 1. The parm field in jobhistory contains (task.notification.conditions) 2. If the result of + * executing the task is any one of (Succeed, Failed, Canceled, Timeout, ALL), an alarm will be + * triggered 3.The result of the job is that it has ended The alarm can be triggered if the + * above three conditions are met at the same time + */ +class JobHistoryRunTimeAlertSender() extends Observer with Logging { + + override def update(e: Event, jobHistroyList: scala.Any): Unit = { + if (!e.isInstanceOf[JobHistoryRunTimeHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobHistoryErrorCodeAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (null == jobHistroyList || !jobHistroyList.isInstanceOf[util.List[_]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryErrorCodeAlertSender. Input DataType: " + jobHistroyList.getClass.getCanonicalName + ) + } + for (a <- jobHistroyList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + // 您的任务ID 1234 执行完成,最终状态为:成功、失败、取消 + val jobHistory = a.asInstanceOf[JobHistory] + val status = jobHistory.getStatus + val replaceParm: util.HashMap[String, String] = new util.HashMap[String, String] + replaceParm.put("$id", String.valueOf(jobHistory.getId)) + replaceParm.put("$status", status) + replaceParm.put("$alteruser", jobHistory.getSubmitUser) + val alters = MonitorAlertUtils.getAlerts(Constants.JOB_RESULT_IM, replaceParm) + PooledImsAlertUtils.addAlert(alters.get("12015")) + } + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeHitEvent.scala new file mode 100644 index 0000000000..9daaf0236e --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class JobHistoryRunTimeHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeRule.scala new file mode 100644 index 0000000000..d350bc3ace --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeRule.scala @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.until.CacheUtils +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.server.BDPJettyServerHelper + +import java.util + +import scala.collection.JavaConverters._ + +class JobHistoryRunTimeRule(hitObserver: Observer) + extends AbstractScanRule(event = new JobHistoryRunTimeHitEvent, observer = hitObserver) + with Logging { + private val scanRuleList = CacheUtils.cacheBuilder + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + if (null == data || !getHitEvent.isRegistered) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + d match { + case jobHistory: JobHistory => + if (Constants.FINISHED_JOB_STATUS.contains(jobHistory.getStatus.toUpperCase())) { + val parmsMap: util.Map[String, scala.AnyRef] = BDPJettyServerHelper.gson.fromJson( + jobHistory.getParams, + classOf[util.Map[String, scala.AnyRef]] + ) + val runtimeMap = TaskUtils.getRuntimeMap(parmsMap) + if ( + runtimeMap.containsKey("task.notification.conditions") && + Constants.FINISHED_JOB_STATUS.contains( + String.valueOf(runtimeMap.get("task.notification.conditions")).toUpperCase() + ) + ) { + alertData.add(jobHistory) + } + } else { + logger.warn( + "Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName + ) + } + scanRuleList.put("jobHistoryId", jobHistory.getId) + case _ => + } + } + } else { + logger.warn("Ignored null scanned data") + } + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent.notifyObserver(getHitEvent, alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EmsListAction.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EmsListAction.scala new file mode 100644 index 0000000000..6f3158e869 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EmsListAction.scala @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.request + +import org.apache.linkis.httpclient.request.GetAction + +import org.apache.commons.lang3.StringUtils + +import scala.collection.mutable.ArrayBuffer + +class EmsListAction extends GetAction with MonitorResourceAction { + + override def suffixURLs: Array[String] = Array("linkisManager", "listAllEMs") + +} + +object EmsListAction { + def newBuilder(): Builder = new Builder + + class Builder private[EmsListAction] () { + private var user: String = _ + private var instance: String = _ + private var nodeHealthy: String = _ + private var owner: String = _ + + def setInstance(instance: String): Builder = { + this.instance = instance + this + } + + def setNodeHealthy(nodeHealthy: String): Builder = { + this.nodeHealthy = nodeHealthy + this + } + + def setOwner(owner: String): Builder = { + this.owner = owner + this + } + + def setUser(user: String): Builder = { + this.user = user + this + } + + def build(): EmsListAction = { + val emsListAction = new EmsListAction + if (StringUtils.isNotBlank(instance)) emsListAction.setParameter("instance", instance) + if (StringUtils.isNotBlank(nodeHealthy)) { + emsListAction.setParameter("nodeHealthy", nodeHealthy) + } + if (StringUtils.isNotBlank(owner)) emsListAction.setParameter("owner", owner) + if (StringUtils.isNotBlank(user)) emsListAction.setUser(user) + emsListAction + } + + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EntranceTaskAction.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EntranceTaskAction.scala new file mode 100644 index 0000000000..f3175d802f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EntranceTaskAction.scala @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.request + +import org.apache.linkis.httpclient.request.GetAction + +import org.apache.commons.lang3.StringUtils + +class EntranceTaskAction extends GetAction with MonitorResourceAction { + override def suffixURLs: Array[String] = Array("entrance/operation/metrics", "taskinfo") +} + +object EntranceTaskAction { + def newBuilder(): Builder = new Builder + + class Builder private[EntranceTaskAction] () { + private var user: String = _ + private var creator: String = _ + private var engineTypeLabel: String = _ + private var instance: String = _ + + def setCreator(creator: String): Builder = { + this.creator = creator + this + } + + def setEngineTypeLabel(engineTypeLabel: String): Builder = { + this.engineTypeLabel = engineTypeLabel + this + } + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setInstance(instance: String): Builder = { + this.instance = instance + this + } + + def build(): EntranceTaskAction = { + val entranceTaskAction = new EntranceTaskAction + if (StringUtils.isNotBlank(creator)) entranceTaskAction.setParameter("creator", creator) + if (StringUtils.isNotBlank(engineTypeLabel)) + entranceTaskAction.setParameter("engineTypeLabel", engineTypeLabel) + if (StringUtils.isNotBlank(instance)) entranceTaskAction.setParameter("instance", instance) + if (StringUtils.isNotBlank(user)) { + // hadoop用户应该获取全部用户entrance信息,则无需传user,即可获取全部entrance信息 + if (user.equals("hadoop")) { + entranceTaskAction.setParameter("user", "") + } else { + entranceTaskAction.setParameter("user", user) + } + } + if (StringUtils.isNotBlank(user)) entranceTaskAction.setUser(user) + entranceTaskAction + } + + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/MonitorResourceAction.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/MonitorResourceAction.scala new file mode 100644 index 0000000000..7ea2001481 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/MonitorResourceAction.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.request + +import org.apache.linkis.httpclient.dws.request.DWSHttpAction + +trait MonitorResourceAction extends DWSHttpAction with UserAction diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/UserAction.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/UserAction.scala new file mode 100644 index 0000000000..4733a1b45f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/UserAction.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.request + +trait UserAction extends org.apache.linkis.httpclient.request.UserAction { + private var user: String = _ + + override def setUser(user: String): Unit = this.user = user + + override def getUser: String = user +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/EntranceTaskResult.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/EntranceTaskResult.scala new file mode 100644 index 0000000000..33c695ca25 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/EntranceTaskResult.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult + +import java.util + +import scala.beans.BeanProperty + +@DWSHttpMessageResult("/api/rest_j/v\\d+/entrance/operation/metrics/taskinfo") +class EntranceTaskResult extends DWSResult { + + @BeanProperty + var tasks: util.ArrayList[util.Map[String, Object]] = _ + + @BeanProperty + var totalPage: Int = _ + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/MonitorResourceResult.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/MonitorResourceResult.scala new file mode 100644 index 0000000000..1c12662e2e --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/MonitorResourceResult.scala @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.response + +import org.apache.linkis.httpclient.dws.response.DWSResult +import org.apache.linkis.httpclient.request.UserAction + +trait MonitorResourceResult extends DWSResult with UserAction { + + private var execID: String = _ + + def getExecID: String = execID + + def setExecID(execID: String): Unit = { + this.execID = execID + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/ScanUtils.java b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/ScanUtils.java new file mode 100644 index 0000000000..5c5566c6f9 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/ScanUtils.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils; + + +public class ScanUtils { + public static int getNumOfLines(String str) { + if (str == null || str.length() == 0) { + return 0; + } + int lines = 1; + int len = str.length(); + for (int pos = 0; pos < len; pos++) { + char c = str.charAt(pos); + if (c == '\r') { + lines++; + if (pos + 1 < len && str.charAt(pos + 1) == '\n') { + pos++; + } + } else if (c == '\n') { + lines++; + } + } + return lines; + } + + public static int getFirstIndexSkippingLines(String str, Integer lines) { + if (str == null || str.length() == 0 || lines < 0) { + return -1; + } + if (lines == 0) { + return 0; + } + + int curLineIdx = 0; + int len = str.length(); + for (int pos = 0; pos < len; pos++) { + char c = str.charAt(pos); + if (c == '\r') { + curLineIdx++; + if (pos + 1 < len && str.charAt(pos + 1) == '\n') { + pos++; + } + } else if (c == '\n') { + curLineIdx++; + } else { + continue; + } + + if (curLineIdx >= lines) { + return pos + 1; + } + } + return -1; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertDesc.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertDesc.scala new file mode 100644 index 0000000000..8a3be387e9 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertDesc.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert + +trait AlertDesc { + + /** + * define necessary information for an alert e.g. alert title, alert receiver etc. + */ +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertSender.scala new file mode 100644 index 0000000000..68ec0f609f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertSender.scala @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert + +trait AlertSender { + + /** + * traverse all registered alertActions and send alert + * + * @return + * true if it is a success + */ + def doSendAlert(alertAction: AlertDesc): Boolean +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/PooledAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/PooledAlertSender.scala new file mode 100644 index 0000000000..6214b633ac --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/PooledAlertSender.scala @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert + +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.{Logging, Utils} + +import java.util.concurrent.{Future, LinkedBlockingQueue} +import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} + +abstract class PooledAlertSender extends AlertSender with Logging { + private val THREAD_POOL_SIZE = CommonVars[Int]("linkis.alert.pool.size", 5).getValue + + private val alertDescQ: LinkedBlockingQueue[AlertDesc] = + new LinkedBlockingQueue[AlertDesc](1000) + + protected implicit val executors = + Utils.newCachedExecutionContext(THREAD_POOL_SIZE, "alert-pool-thread-", false) + + private val stopped: AtomicBoolean = new AtomicBoolean(false) + private val runningNumber: AtomicInteger = new AtomicInteger(0) + private var future: Future[_] = _ + + /** + * add an alertDesc to queue + * + * @param alertDesc + * should encapsulates every information an alert platform needs for sending an alarm + */ + def addAlertToPool(alertDesc: AlertDesc): Unit = { + alertDescQ.add(alertDesc) + } + + /** + * describes actual actions for sending an alert + * + * @return + * true if it is a success + */ + override def doSendAlert(alertDesc: AlertDesc): Boolean + + def start(): Unit = { + future = Utils.defaultScheduler.submit(new Runnable() { + override def run() { + logger.info("Pooled alert thread started!") + while (!stopped.get) { + executors synchronized { + while (!stopped.get && runningNumber.get >= THREAD_POOL_SIZE) { + logger.info("Pooled alert thread is full, start waiting") + executors.wait() + } + } + logger.info("Pooled alert thread continue processing") + + if (stopped.get && alertDescQ.size() == 0) return + val alertDesc = Utils.tryQuietly(alertDescQ.take) + if (alertDesc == null) return + executors.submit(new Runnable { + override def run() { + runningNumber.addAndGet(1) + Utils.tryAndWarn { + logger.info("sending alert , information: " + alertDesc) + val ok = doSendAlert(alertDesc) + if (!ok) { + warn("Failed to send alert: " + alertDesc) + } else { + logger.info("successfully send alert: " + alertDesc) + } + runningNumber.decrementAndGet + executors synchronized executors.notify + } + } + }) + } + } + }) + } + + def shutdown(waitComplete: Boolean = true, timeoutMs: Long = -1): Unit = { + logger.info("stopping the Pooled alert thread...") + if (waitComplete) { + val startTime = System.currentTimeMillis() + while ( + (alertDescQ.size() > 0 || runningNumber + .get() > 0) && (timeoutMs == -1 || System.currentTimeMillis() - startTime > timeoutMs) + ) { + Utils.tryQuietly(Thread.sleep(5 * 1000L)) + } + } + executors.shutdown + stopped.set(true) + future.cancel(true) + logger.info("Pooled alert thread is stopped") + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertDesc.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertDesc.scala new file mode 100644 index 0000000000..06ef57f629 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertDesc.scala @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.utils.ScanUtils +import org.apache.linkis.monitor.utils.alert.AlertDesc + +import org.apache.commons.collections.CollectionUtils +import org.apache.commons.lang3.StringUtils + +import java.util +import java.util.HashSet + +import scala.collection.JavaConverters._ + +import ImsAlertLevel.ImsAlertLevel +import ImsAlertWay.ImsAlertWay + +case class ImsAlertDesc( + var subSystemId: String, + var alertTitle: String, + var alertObj: String, + var alertInfo: String, + alertLevel: ImsAlertLevel = ImsAlertLevel.INFO, + alertIp: String, + canRecover: Int = 0, // 默认0,为1时,需要有对应的恢复告警 + alertWays: util.Set[ImsAlertWay] = new HashSet[ImsAlertWay], + var alertReceivers: util.Set[String] = new HashSet[String], + var numHit: Int = 0, + var hitIntervalMs: Long = 0L +) extends AlertDesc { + + override val toString: String = { + val sb = new StringBuilder + sb.append("sub_system_id=").append(subSystemId).append("&alert_title=").append(alertTitle) + if (alertLevel != null) sb.append("&alert_level=").append(alertLevel.toString) + if (StringUtils.isNotEmpty(alertObj)) sb.append("&alert_obj=").append(alertObj) + if (StringUtils.isNotEmpty(alertInfo)) { + sb.append("&alert_info=") + .append(alertInfo) + .append( + "[freq_info] hit " + numHit + " time(s) within " + hitIntervalMs / 1000 / 60 + " mins" + ) + } + if (canRecover == 0 || canRecover == 1) sb.append("&can_recover=").append(canRecover) + if (alertWays != null && alertWays.size > 0) { + sb.append("&alert_way=") + sb.append(alertWays.asScala.map(_.toString).mkString(",")) + } + if (alertReceivers != null && alertReceivers.size > 0) { + sb.append("&alert_reciver=") + sb.append(alertReceivers.asScala.mkString(",")) + } + if (alertIp != null) { + sb.append("&alert_ip=").append(alertIp) + + } + sb.toString + } + + val toMap: Map[String, String] = { + val map = scala.collection.mutable.Map[String, String]() + map += "sub_system_id" -> subSystemId + map += "alert_title" -> alertTitle + if (alertLevel != null) map += "alert_level" -> alertLevel.toString + if (StringUtils.isNotEmpty(alertObj)) map += "alert_obj" -> alertObj + if (StringUtils.isNotEmpty(alertInfo)) { + map += "alert_info" + "[freq_info] hit " + numHit + " time(s) within " + hitIntervalMs / 1000 / 60 + " mins" -> alertInfo + } + if (canRecover == 0 || canRecover == 1) map += "can_recover" -> canRecover.toString + if (alertWays != null && alertWays.size > 0) { + map += "alert_way" -> alertWays.asScala.map(_.toString).mkString(",") + } + if (alertReceivers != null && alertReceivers.size > 0) { + map += "alert_reciver" -> alertReceivers.asScala.mkString(",") + } + map.toMap + } + + val toImsRequest: ImsRequest = { + val params = validate() + val alertEntity = AlertEntity( + params(0).asInstanceOf[String], + params(1).asInstanceOf[String], + params( + 3 + ) + "[freq_info] hit " + numHit + " time(s) within " + hitIntervalMs / 1000 / 60 + " mins", + alertWays.asScala.map(_.toString).mkString(","), + params(4).asInstanceOf[util.Set[String]].asScala.mkString(","), + alertLevel.toString, + params(2).asInstanceOf[String], + canRecover.toString + ) + + val alertEntityList = new util.ArrayList[AlertEntity] + alertEntityList.add(alertEntity) + + ImsRequest(alertEntityList) + } + + def validate(): Array[Any] = { + assert(StringUtils.isNumeric(subSystemId) && subSystemId.length == 4) + assert(StringUtils.isNotEmpty(alertTitle)) + val newAlertTitle = if (alertTitle.length > 100) { + alertTitle.substring(0, 96) + "... ..." + } else { + alertTitle + } + val newAlertObj = if (StringUtils.isNotEmpty(alertObj) && alertObj.length >= 50) { + alertObj = alertObj.substring(0, 36) + "... ..." + } else { + alertObj + } + val newAlertInfo = + if ( + StringUtils.isNotEmpty(alertInfo) && ScanUtils.getNumOfLines( + alertInfo + ) > Constants.ALERT_IMS_MAX_LINES + ) { + StringUtils.substring( + alertInfo, + 0, + ScanUtils.getFirstIndexSkippingLines(alertInfo, Constants.ALERT_IMS_MAX_LINES) + ) + "... ...\n" + } else { + alertInfo + } + val newAlertReceivers = + if (CollectionUtils.isNotEmpty(alertReceivers) && alertReceivers.size > 15) { + alertReceivers.asScala.take(15) + } else { + alertReceivers + } + + Array(subSystemId, newAlertTitle, newAlertObj, newAlertInfo, newAlertReceivers) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertLevel.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertLevel.scala new file mode 100644 index 0000000000..10801de03e --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertLevel.scala @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +object ImsAlertLevel extends Enumeration { + type ImsAlertLevel = Value + val INFO = Value("5") + val WARN = Value("4") + val MINOR = Value("3") + val MAJOR = Value("2") + val CRITICAL = Value("1") + val CLEAR = Value("0") +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertPropFileData.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertPropFileData.scala new file mode 100644 index 0000000000..1166453b87 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertPropFileData.scala @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import com.fasterxml.jackson.annotation.JsonProperty + +case class ImsAlertPropFileData( + @JsonProperty("alert_title") alertTitle: String, + @JsonProperty("alert_info") alertInfo: String, + @JsonProperty("alert_way") alertWays: String, + @JsonProperty("alert_reciver") alertReceivers: String, + @JsonProperty("alert_level") alertLevel: String, + @JsonProperty("alert_obj") alertObj: String, + @JsonProperty("can_recover") canRecover: String +) diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertWay.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertWay.scala new file mode 100644 index 0000000000..7f26c705a0 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertWay.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +object ImsAlertWay extends Enumeration { + type ImsAlertWay = Value + val NoAlert = Value("0") + val WXWork = Value("1") + val Email = Value("2") + val WeChat = Value("3") +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsRequest.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsRequest.scala new file mode 100644 index 0000000000..e497b1a41a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsRequest.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import java.util + +import com.fasterxml.jackson.annotation.JsonProperty + +case class ImsRequest(@JsonProperty("alertList") alertList: util.List[AlertEntity]) + +case class AlertEntity( + @JsonProperty("sub_system_id") subSystemId: String, + @JsonProperty("alert_title") alertTitle: String, + @JsonProperty("alert_info") alertInfo: String, + @JsonProperty("alert_way") alertWays: String, + @JsonProperty("alert_reciver") alertReceivers: String, + @JsonProperty("alert_level") alertLevel: String, + @JsonProperty("alert_obj") alertObj: String, + @JsonProperty("can_recover") canRecover: String +) diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/MonitorAlertUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/MonitorAlertUtils.scala new file mode 100644 index 0000000000..67c1b0358f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/MonitorAlertUtils.scala @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc + +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.exception.ExceptionUtils + +import java.io.{BufferedReader, File, FileInputStream, InputStream, InputStreamReader} +import java.text.SimpleDateFormat +import java.util +import java.util.Properties + +import scala.collection.JavaConverters._ + +import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} +import com.fasterxml.jackson.module.scala.DefaultScalaModule + +object MonitorAlertUtils extends Logging { + + private val mapper = { + val ret = new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) + ret.registerModule(DefaultScalaModule) + ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + ret + } + + val properties = { + val url = getClass.getClassLoader.getResource(Constants.ALERT_PROPS_FILE_PATH) + if (url == null) { + throw new AnomalyScannerException( + 21304, + "Failed to load alerts from alert properties. Alert properties file does not exist: " + Constants.ALERT_PROPS_FILE_PATH + ) + } + logger.info("reading alert properties from: " + url.getFile) + val properties = new Properties() + var inputStream: InputStream = null + var reader: InputStreamReader = null + var buff: BufferedReader = null + Utils.tryFinally { + Utils.tryCatch { + inputStream = new FileInputStream(new File(url.getFile)) + reader = new InputStreamReader(inputStream, "UTF-8") + buff = new BufferedReader(reader) + properties.load(buff) + } { t => + { + throw new AnomalyScannerException( + 21304, + "Failed to load alerts from alert properties. Cause: " + ExceptionUtils.getMessage(t) + ) + } + } + } { + IOUtils.closeQuietly(buff) + IOUtils.closeQuietly(reader) + IOUtils.closeQuietly(inputStream) + } + properties.asScala + } + + def getAlerts(prefix: String, params: util.Map[String, String]): util.Map[String, AlertDesc] = { + val ret = new util.HashMap[String, AlertDesc]() + + for ((k: String, v: String) <- properties) { + if (ret.containsKey(k)) { + logger.warn("found duplicate key in alert properties, accept only the first one") + } else if (StringUtils.startsWith(k, prefix)) { + val data = mapper.readValue(v, classOf[ImsAlertPropFileData]) + var alertInfo = new String( + new StringBuilder().append(data.alertInfo).toString().getBytes(), + "utf-8" + ).replace("$name", data.alertReceivers) + val interator = params.keySet.iterator + while (interator.hasNext) { + val key = interator.next + val value = params.get(key) + alertInfo = alertInfo.replace(key, value) + } + val receivers = { + val set: util.Set[String] = new util.HashSet[String] + if (StringUtils.isNotBlank(data.alertReceivers)) { + data.alertReceivers.split(",").map(r => set.add(r)) + } + if (!params.containsKey("$alteruser")) { + Constants.ALERT_DEFAULT_RECEIVERS.foreach(e => { + if (StringUtils.isNotBlank(e)) { + set.add(e) + } + }) + } else { + set.add(params.get("$alteruser")) + } + if (StringUtils.isNotBlank(params.get("receiver"))) { + params.get("receiver").split(",").map(r => set.add(r)) + } + set + } + + val subSystemId = params.getOrDefault("subSystemId", Constants.ALERT_SUB_SYSTEM_ID) + val alertTitle = params.getOrDefault("title", data.alertTitle) + val alertLevel = + if (StringUtils.isNotBlank(data.alertLevel)) { + ImsAlertLevel.withName(params.getOrDefault("monitorLevel", data.alertLevel)) + } else { + ImsAlertLevel.withName(params.getOrDefault("monitorLevel", ImsAlertLevel.WARN.toString)) + } + + val alertDesc = Utils.tryAndWarn( + ImsAlertDesc( + subSystemId, + alertTitle, + data.alertObj, + alertInfo, + alertLevel, + null, + 0, { + val set: util.Set[ImsAlertWay.Value] = new util.HashSet[ImsAlertWay.Value] + if (StringUtils.isNotBlank(data.alertWays)) { + data.alertWays + .split(",") + .map(alertWayStr => set.add(ImsAlertWay.withName(alertWayStr))) + } + set + }, + receivers + ) + ) + val realK = StringUtils.substringAfter(k, prefix) + ret.put(realK, alertDesc) + } + } + ret + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertSender.scala new file mode 100644 index 0000000000..70bd15567e --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertSender.scala @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} +import org.apache.linkis.monitor.utils.alert.{AlertDesc, PooledAlertSender} +import org.apache.linkis.monitor.utils.alert.AlertDesc +import org.apache.linkis.monitor.utils.log.LogUtils + +import org.apache.http.client.config.RequestConfig +import org.apache.http.client.methods.HttpPost +import org.apache.http.entity.{ContentType, StringEntity} +import org.apache.http.impl.client.HttpClients +import org.apache.http.util.EntityUtils + +import java.text.SimpleDateFormat +import java.util + +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.scala.DefaultScalaModule + +class PooledImsAlertSender(alertUrl: String) extends PooledAlertSender with Logging { + + protected val httpClient = HttpClients.createDefault + + private val mapper = + new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) + + /** + * describes actual actions for sending an alert + * + * @return + * true if it is a success + */ + + /** + * describes actual actions for sending an alert + * + * @return + * true if it is a success + */ + override def doSendAlert(alertDesc: AlertDesc): Boolean = { + if (!alertDesc.isInstanceOf[ImsAlertDesc]) { + logger.warn("wrong alertDesc dataType: " + alertDesc.getClass.getCanonicalName) + return false + } + logger.info("sending an alert to IMS, information: " + alertDesc) + val imsRequest = alertDesc.asInstanceOf[ImsAlertDesc].toImsRequest + + mapper.registerModule(DefaultScalaModule) + val paramContent = Utils.tryCatch(mapper.writeValueAsString(imsRequest)) { t => + logger.warn("ignore alert: " + imsRequest, t) + return false + } + if (paramContent.isEmpty) { + logger.warn("alertParams is empty, will not send alarm") + return false + } + + val requestConfig = RequestConfig.DEFAULT + + val entity = new StringEntity( + paramContent, + ContentType.create(ContentType.APPLICATION_JSON.getMimeType, "UTF-8") + ) + entity.setContentEncoding("UTF-8") + + val httpPost = new HttpPost(alertUrl) + + httpPost.setConfig(requestConfig) + httpPost.setEntity(entity) + + val response = Utils.tryAndErrorMsg(httpClient.execute(httpPost))("send alert to IMS failed") + + if (response != null) { + val responseInfo = EntityUtils.toString(response.getEntity, "UTF-8") + logger.info("Alert: " + paramContent + "Response: " + responseInfo) + LogUtils.stdOutLogger.info("Alert: " + paramContent + "Response: " + responseInfo) + if (response.getStatusLine.getStatusCode == 200) return true + } + false + } + + override def shutdown(waitComplete: Boolean = true, timeoutMs: Long = -1): Unit = { + super.shutdown(waitComplete, timeoutMs) + httpClient.close + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertUtils.scala new file mode 100644 index 0000000000..f7917a9e15 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertUtils.scala @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.utils.alert.AlertDesc + +import org.apache.commons.collections.CollectionUtils +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.exception.ExceptionUtils + +import java.net.InetAddress +import java.util +import java.util.HashSet + +import scala.collection.JavaConverters._ + +import ImsAlertWay.ImsAlertWay + +object PooledImsAlertUtils extends Logging { + + private val sender: PooledImsAlertSender = { + val ret = new PooledImsAlertSender(Constants.ALERT_IMS_URL) + ret.start() + ret + } + + private val localIp = InetAddress.getLocalHost.getHostAddress + + def addAlertAndLogException(message: String): Unit = Utils.tryAndError(addAlert(message)) + + def addAlert(message: String): Unit = addExceptionAlert(message, null, null) + + def addExceptionAlert(message: String, t: Throwable): Unit = + addExceptionAlert(message, t, null) + + def addExceptionAlertAndLogException(message: String, t: Throwable): Unit = + Utils.tryAndError(addExceptionAlert(message, t, null)) + + def addExceptionAlert(message: String, t: Throwable, alertWays: util.Set[ImsAlertWay]): Unit = { + val alertObj = + if (StringUtils.isEmpty(message) && t != null) t.getMessage + else if (StringUtils.isEmpty(message)) { + throw new NullPointerException("both message and exception are null!") + } else { + message + } + val _alertWays = + if (CollectionUtils.isNotEmpty(alertWays)) alertWays else new HashSet[ImsAlertWay]() + val (alertInfo, alertLevel) = if (t != null) { + _alertWays.add(ImsAlertWay.Email) + _alertWays.add(ImsAlertWay.WXWork) + _alertWays.add(ImsAlertWay.WeChat) + (ExceptionUtils.getRootCauseMessage(t), ImsAlertLevel.MAJOR) + } else { + _alertWays.add(ImsAlertWay.WXWork) + (message, ImsAlertLevel.WARN) + } + val alertDesc = new ImsAlertDesc( + Constants.ALERT_SUB_SYSTEM_ID, + "BDP Alert", + alertObj, + alertInfo, + alertLevel, + localIp, + 0, + _alertWays + ) + addAlert(alertDesc) + } + + def addAlert(alertDesc: AlertDesc): Unit = { + if (!alertDesc.isInstanceOf[ImsAlertDesc]) { + logger.warn("Ignore wrong alertDesc. DataType: " + alertDesc.getClass.getCanonicalName) + } else { + sender.addAlertToPool(alertDesc) + logger.info("successfully added alert") + } + } + + def addAlertAndLogException(alertDesc: ImsAlertDesc): Unit = + Utils.tryAndError(addAlert(alertDesc)) + + def clearAlert(alertDesc: ImsAlertDesc): Unit = { + assert(alertDesc.canRecover == 1) + assert(alertDesc.alertLevel == ImsAlertLevel.CLEAR) + sender.addAlertToPool(alertDesc) + } + + def shutDown(waitComplete: Boolean = true, timeoutMs: Long = -1): Unit = { + sender.shutdown(waitComplete, timeoutMs) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/UserLabelAlertUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/UserLabelAlertUtils.scala new file mode 100644 index 0000000000..64a587a21c --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/UserLabelAlertUtils.scala @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc + +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.exception.ExceptionUtils + +import java.io.{BufferedReader, File, FileInputStream, InputStream, InputStreamReader} +import java.text.SimpleDateFormat +import java.util +import java.util.Properties + +import scala.collection.JavaConverters._ + +import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} +import com.fasterxml.jackson.module.scala.DefaultScalaModule + +object UserLabelAlertUtils extends Logging { + + def getAlerts(prefix: String, userCreator: String): util.Map[String, AlertDesc] = { + val replaceParams: util.HashMap[String, String] = new util.HashMap[String, String] + replaceParams.put("$userCreator", userCreator) + MonitorAlertUtils.getAlerts(prefix, replaceParams) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/log/LogUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/log/LogUtils.scala new file mode 100644 index 0000000000..b63a690d24 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/log/LogUtils.scala @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.log + +import org.slf4j.LoggerFactory + +object LogUtils { + val stdOutLogger = LoggerFactory.getLogger("PlaintTextConsoleLogger") +} diff --git a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertSenderTest.java b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertSenderTest.java new file mode 100644 index 0000000000..2b82037377 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertSenderTest.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert; + +import org.apache.linkis.monitor.utils.alert.ims.ImsAlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.ImsAlertLevel; +import org.apache.linkis.monitor.utils.alert.ims.ImsAlertWay; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertSender; +import org.apache.linkis.server.utils.LinkisMainHelper; + +import java.util.HashSet; +import java.util.Set; + +public class PooledImsAlertSenderTest { + // @Before + public void before() { + System.getProperties().setProperty(LinkisMainHelper.SERVER_NAME_KEY(), "linkis-et-monitor"); + System.getProperties() + .setProperty("log4j.configurationFile", "src/test/resources/log4j2-console.xml"); + // System.getProperties().setProperty("wds.linkis.server.conf", + // "linkis-et-monitor.properties"); + } + + // @org.junit.Test + public void doSendAlert() throws Exception { + Set ways = new HashSet<>(); + ways.add(ImsAlertWay.WeChat()); + ways.add(ImsAlertWay.Email()); + + Set receivers = new HashSet<>(); + receivers.add("shangda, johnnwang"); + ImsAlertDesc desc = + new ImsAlertDesc( + "5435", + "linkis_alert_test", + "linkis_alert", + "this is a test for linkis", + ImsAlertLevel.MINOR(), + "10.127.0.0.1", + 0, + ways, + receivers, + 3, + 12); + + System.out.println(desc); + String url = "http://172.21.0.130:10812/ims_data_access/send_alarm_by_json.do"; + + PooledImsAlertSender sender = new PooledImsAlertSender(url); + sender.doSendAlert(desc); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertUtilsTest.java b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertUtilsTest.java new file mode 100644 index 0000000000..c0798168aa --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertUtilsTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert; + +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; +import org.apache.linkis.server.utils.LinkisMainHelper; + +import java.util.Map; + +public class PooledImsAlertUtilsTest { + // @Before + public void before() { + System.getProperties().setProperty(LinkisMainHelper.SERVER_NAME_KEY(), "linkis-et-monitor"); + System.getProperties() + .setProperty("log4j.configurationFile", "src/test/resources/log4j2-console.xml"); + // System.getProperties().setProperty("wds.linkis.server.conf", + // "linkis-et-monitor.properties"); + } + + // @Test + public void addAlert() throws Exception { + PooledImsAlertUtils.addAlert("1st test"); + Map alerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_ERRORCODE()), null); + for (Map.Entry kv : alerts.entrySet()) { + System.out.println(kv.getKey() + ": " + kv.getValue().toString()); + PooledImsAlertUtils.addAlert(kv.getValue()); + } + Thread.sleep(2000l); + PooledImsAlertUtils.shutDown(true, -1); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/test/resources/log4j2-console.xml b/linkis-extensions/linkis-et-monitor/src/test/resources/log4j2-console.xml new file mode 100644 index 0000000000..49eabc51e6 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/test/resources/log4j2-console.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/linkis-extensions/pom.xml b/linkis-extensions/pom.xml index 661b6be0bf..7233141565 100644 --- a/linkis-extensions/pom.xml +++ b/linkis-extensions/pom.xml @@ -28,6 +28,7 @@ pom linkis-io-file-client + linkis-et-monitor diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/conf/OrchestratorConfiguration.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/conf/OrchestratorConfiguration.scala index 50dbef632c..10f3a64d13 100644 --- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/conf/OrchestratorConfiguration.scala +++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/conf/OrchestratorConfiguration.scala @@ -48,7 +48,7 @@ object OrchestratorConfiguration { CommonVars("wds.linkis.orchestrator.execution.task.max.parallelism", 5) val TASK_RUNNER_MAX_SIZE = - CommonVars("wds.linkis.orchestrator.execution.task.runner.max.size", 200) + CommonVars("wds.linkis.orchestrator.execution.task.runner.max.size", 1000) val EXEC_RUNNER_FACTORY_CLASS = CommonVars("wds.linkis.orchestrator.exec.task.runner.factory.class", "") diff --git a/linkis-public-enhancements/linkis-configuration/pom.xml b/linkis-public-enhancements/linkis-configuration/pom.xml index fc7faea35c..45a1ee37d9 100644 --- a/linkis-public-enhancements/linkis-configuration/pom.xml +++ b/linkis-public-enhancements/linkis-configuration/pom.xml @@ -59,6 +59,13 @@ linkis-computation-client ${project.version} + + + org.instancio + instancio-junit + 2.16.1 + test + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterRuleKeys.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterRuleKeys.java new file mode 100644 index 0000000000..f2fee2ff1f --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterRuleKeys.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.conf; + +public class AcrossClusterRuleKeys { + + public static final String KEY_QUEUE_SUFFIX = "suffix"; + + public static final String KEY_ACROSS_CLUSTER_QUEUE_SUFFIX = "bdap2bdp"; + + public static final String KEY_START_TIME = "startTime"; + + public static final String KEY_END_TIME = "endTime"; + + public static final String KEY_CPU_THRESHOLD = "CPUThreshold"; + + public static final String KEY_MEMORY_THRESHOLD = "MemoryThreshold"; + + public static final String KEY_CPU_PERCENTAGE_THRESHOLD = "CPUPercentageThreshold"; + + public static final String KEY_MEMORY_PERCENTAGE_THRESHOLD = "MemoryPercentageThreshold"; + + public static final String KEY_QUEUE_RULE = "queueRule"; + + public static final String KEY_TIME_RULE = "timeRule"; + + public static final String KEY_THRESHOLD_RULE = "thresholdRule"; +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java new file mode 100644 index 0000000000..9dadcf918c --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.AcrossClusterRule; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface AcrossClusterRuleMapper { + + AcrossClusterRule getAcrossClusterRule(@Param("id") Long id); + + void deleteAcrossClusterRule( + @Param("creator") String creator, @Param("username") String username); + + void updateAcrossClusterRule(@Param("acrossClusterRule") AcrossClusterRule acrossClusterRule); + + void insertAcrossClusterRule(@Param("acrossClusterRule") AcrossClusterRule acrossClusterRule); + + List queryAcrossClusterRuleList( + @Param("username") String username, + @Param("creator") String creator, + @Param("clusterName") String clusterName); + + void validAcrossClusterRule(@Param("isValid") String isValid, @Param("id") Long id); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java new file mode 100644 index 0000000000..0993b2cbed --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.ConfigKeyLimitForUser; +import org.apache.linkis.configuration.entity.ConfigKeyLimitVo; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** for table linkis_ps_configuration_key_limit_for_user @Description */ +public interface ConfigKeyLimitForUserMapper { + + int batchInsertList(List list); + + int updateByPrimaryKey(ConfigKeyLimitForUser configKeyLimitForUser); + + int batchInsertOrUpdateList(List list); + + List selectByLabelAndKeyIds( + @Param("label") String label, @Param("keyIdList") List keyIdList); + + ConfigKeyLimitVo selectByLabelAndKeyId(@Param("label") String label, @Param("keyId") Long keyId); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java index 6b6b15a65c..ee5506d9eb 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java @@ -17,10 +17,7 @@ package org.apache.linkis.configuration.dao; -import org.apache.linkis.configuration.entity.CategoryLabel; -import org.apache.linkis.configuration.entity.ConfigKey; -import org.apache.linkis.configuration.entity.ConfigKeyValue; -import org.apache.linkis.configuration.entity.ConfigValue; +import org.apache.linkis.configuration.entity.*; import org.apache.ibatis.annotations.Param; @@ -28,19 +25,14 @@ public interface ConfigMapper { - List getConfigByEngineUserCreator( - @Param("engineType") String engineType, - @Param("creator") String creator, - @Param("userName") String userName); - List getConfigKeyByLabelIds(@Param("ids") List ids); List getConfigKeyValueByLabelId(@Param("labelId") Integer labelId); - Long selectAppIDByAppName(@Param("name") String appName); - void insertValue(ConfigValue configValue); + int batchInsertOrUpdateValueList(List list); + ConfigValue getConfigValueById(@Param("id") Long id); ConfigValue getConfigValueByKeyAndLabel(ConfigValue configValue); @@ -57,9 +49,14 @@ List getConfigByEngineUserCreator( List selectKeyByKeyName(@Param("keyName") String keyName); - List listKeyByStringValue(@Param("stringValue") String stringValue); + List selectKeyByEngineType(@Param("engineType") String engineType); + + List selectKeyByEngineTypeAndKeyList( + @Param("engineType") String engineType, @Param("keyList") List keyList); - void insertCreator(String creator); + List selectKeyByKeyIdList(@Param("keyIdList") List keyList); + + List listKeyByStringValue(@Param("stringValue") String stringValue); List getCategory(); @@ -74,4 +71,21 @@ List getConfigByEngineUserCreator( void insertKey(ConfigKey key); List getConfigEnKeyValueByLabelId(@Param("labelId") Integer labelId); + + void deleteConfigKey(@Param("id") Integer id); + + List getConfigBykey(@Param("engineType") String engineType, @Param("key") String key); + + List getConfigEnBykey( + @Param("engineType") String engineType, @Param("key") String key); + + List getUserConfigValue( + @Param("key") String key, + @Param("user") String user, + @Param("creator") String creator, + @Param("engineType") String engineType); + + void insertKeyByBase(ConfigKey configKey); + + void updateConfigKey(ConfigKey configKey); } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java index d199134b4b..1a513e3352 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java @@ -28,8 +28,13 @@ public interface LabelMapper { ConfigLabel getLabelByKeyValue( @Param("labelKey") String labelKey, @Param("stringValue") String stringValue); + // label key:combined_userCreator_engineType + List selectUserCreatorEngineTypeLabelList(@Param("itemList") List itemList); + void insertLabel(ConfigLabel label); + void batchInsertLabel(@Param("labelList") List labelList); + void deleteLabel(@Param("ids") List ids); ConfigLabel getLabelById(@Param("id") Integer id); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java new file mode 100644 index 0000000000..6862650f27 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.TemplateConfigKey; +import org.apache.linkis.configuration.entity.TemplateConfigKeyVO; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** The dao interface class of the linkis_ps_configuration_template_config_key table @Description */ +public interface TemplateConfigKeyMapper { + + int batchInsertList(List list); + + List selectListByTemplateUuid(@Param("templateUuid") String templateUuid); + + int deleteByTemplateUuidAndKeyIdList( + @Param("templateUuid") String templateUuid, @Param("keyIdList") List KeyIdList); + + int batchInsertOrUpdateList(List list); + + List selectListByTemplateUuidList( + @Param("templateUuidList") List templateUuidList); + + List selectInfoListByTemplateUuid( + @Param("templateUuid") String templateUuid); + + List selectInfoListByTemplateName( + @Param("templateName") String templateName); + + List selectEngineTypeByTemplateUuid(@Param("templateUuid") String templateUuid); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java new file mode 100644 index 0000000000..c24cfd3d44 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java @@ -0,0 +1,149 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +import java.util.Date; + +public class AcrossClusterRule { + + private Long id; + private String clusterName; + private String creator; + private String username; + private Date createTime; + private String createBy; + private Date updateTime; + private String updateBy; + private String rules; + private String isValid; + + public AcrossClusterRule() {} + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getClusterName() { + return clusterName; + } + + public void setClusterName(String clusterName) { + this.clusterName = clusterName; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public String getRules() { + return rules; + } + + public void setRules(String rules) { + this.rules = rules; + } + + public String getIsValid() { + return isValid; + } + + public void setIsValid(String isValid) { + this.isValid = isValid; + } + + @Override + public String toString() { + return "AcrossClusterRule{" + + "id=" + + id + + ", clusterName='" + + clusterName + + '\'' + + ", creator='" + + creator + + '\'' + + ", username='" + + username + + '\'' + + ", createTime=" + + createTime + + ", createBy='" + + createBy + + '\'' + + ", updateTime=" + + updateTime + + ", updateBy='" + + updateBy + + '\'' + + ", rules='" + + rules + + '\'' + + ", isValid='" + + isValid + + '\'' + + '}'; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java index 1e26252a7c..4c471409ab 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java @@ -17,6 +17,9 @@ package org.apache.linkis.configuration.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +@JsonIgnoreProperties(ignoreUnknown = true) public class ConfigKey { private Long id; @@ -43,6 +46,22 @@ public class ConfigKey { private String treeName; + /* + 0 none + 1 with mix + 2 with max + 3 min and max both + */ + private Integer boundaryType; + + private String enName; + + private String enDescription; + + private String enTreeName; + + private Boolean templateRequired; + public String getEngineType() { return engineType; } @@ -138,4 +157,84 @@ public Integer getLevel() { public void setLevel(Integer level) { this.level = level; } + + public Integer getBoundaryType() { + return boundaryType; + } + + public void setBoundaryType(Integer boundaryType) { + this.boundaryType = boundaryType; + } + + public String getEnName() { + return enName; + } + + public void setEnName(String enName) { + this.enName = enName; + } + + public String getEnDescription() { + return enDescription; + } + + public void setEnDescription(String enDescription) { + this.enDescription = enDescription; + } + + public String getEnTreeName() { + return enTreeName; + } + + public void setEnTreeName(String enTreeName) { + this.enTreeName = enTreeName; + } + + public Boolean getTemplateRequired() { + return templateRequired; + } + + public void setTemplateRequired(Boolean templateRequired) { + this.templateRequired = templateRequired; + } + + @Override + public String toString() { + return "ConfigKey{" + + "id=" + + id + + ", key='" + + key + + '\'' + + ", description='" + + description + + '\'' + + ", name='" + + name + + '\'' + + ", engineType='" + + engineType + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", validateType='" + + validateType + + '\'' + + ", validateRange='" + + validateRange + + '\'' + + ", isAdvanced=" + + isAdvanced + + ", isHidden=" + + isHidden + + ", level=" + + level + + ", treeName='" + + treeName + + '\'' + + ", boundaryType=" + + boundaryType + + '}'; + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java new file mode 100644 index 0000000000..a626f32255 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +import java.util.Date; + +/** for table linkis_ps_configuration_key_limit_for_user @Description */ +public class ConfigKeyLimitForUser { + + /** Table field: id Field type: bigint(19) */ + private Long id; + + /** Username table field: user_name field type: varchar(50) */ + private String userName; + + /** + * combined label combined_userCreator_engineType such as hadoop-IDE, spark-2.4.3 table field: + * combined_label_value field type: varchar(200) + */ + private String combinedLabelValue; + + /** id of linkis_ps_configuration_config_key table field: key_id field type: bigint(19) */ + private Long keyId; + + /** Configuration value table field: config_value field type: varchar(200) */ + private String configValue; + + /** Upper limit table field: max_value field type: varchar(50) */ + private String maxValue; + + /** Lower limit value (reserved) table field: min_value field type: varchar(50) */ + private String minValue; + + /** + * uuid The template id table field of the third-party record: latest_update_template_uuid Field + * type: varchar(34) + */ + private String latestUpdateTemplateUuid; + + /** Is it valid Reserved Y/N table field: is_valid field type: varchar(2) */ + private String isValid; + + /** Creator table field: create_by field type: varchar(50) */ + private String createBy; + + /** + * create time table field: create_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date createTime; + + /** Updater table field: update_by field type: varchar(50) */ + private String updateBy; + + /** + * update time table field: update_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date updateTime; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public String getCombinedLabelValue() { + return combinedLabelValue; + } + + public void setCombinedLabelValue(String combinedLabelValue) { + this.combinedLabelValue = combinedLabelValue; + } + + public Long getKeyId() { + return keyId; + } + + public void setKeyId(Long keyId) { + this.keyId = keyId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public String getMaxValue() { + return maxValue; + } + + public void setMaxValue(String maxValue) { + this.maxValue = maxValue; + } + + public String getMinValue() { + return minValue; + } + + public void setMinValue(String minValue) { + this.minValue = minValue; + } + + public String getLatestUpdateTemplateUuid() { + return latestUpdateTemplateUuid; + } + + public void setLatestUpdateTemplateUuid(String latestUpdateTemplateUuid) { + this.latestUpdateTemplateUuid = latestUpdateTemplateUuid; + } + + public String getIsValid() { + return isValid; + } + + public void setIsValid(String isValid) { + this.isValid = isValid; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()); + sb.append(" ["); + sb.append("Hash = ").append(hashCode()); + sb.append(", id=").append(id); + sb.append(", userName=").append(userName); + sb.append(", combinedLabelValue=").append(combinedLabelValue); + sb.append(", keyId=").append(keyId); + sb.append(", configValue=").append(configValue); + sb.append(", maxValue=").append(maxValue); + sb.append(", minValue=").append(minValue); + sb.append(", latestUpdateTemplateUuid=").append(latestUpdateTemplateUuid); + sb.append(", isValid=").append(isValid); + sb.append(", createBy=").append(createBy); + sb.append(", createTime=").append(createTime); + sb.append(", updateBy=").append(updateBy); + sb.append(", updateTime=").append(updateTime); + sb.append(']'); + return sb.toString(); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java new file mode 100644 index 0000000000..c612168713 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +public class ConfigKeyLimitVo { + + /** id : bigint(19) */ + private Long keyId; + + private String key; + + /** config_value varchar(200) */ + private String configValue; + + /** max_value varchar(50) */ + private String maxValue; + + public Long getKeyId() { + return keyId; + } + + public void setKeyId(Long keyId) { + this.keyId = keyId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public String getMaxValue() { + return maxValue; + } + + public void setMaxValue(String maxValue) { + this.maxValue = maxValue; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java index 143566218c..19266bc691 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java @@ -17,6 +17,8 @@ package org.apache.linkis.configuration.entity; +import java.util.Map; + public class ConfigKeyValue { private Long id; @@ -53,6 +55,16 @@ public class ConfigKeyValue { private Boolean isUserDefined; + private Map specialLimit; + + public Map getSpecialLimit() { + return specialLimit; + } + + public void setSpecialLimit(Map specialLimit) { + this.specialLimit = specialLimit; + } + public Boolean getIsUserDefined() { return isUserDefined; } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java new file mode 100644 index 0000000000..273828ff02 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +public class ConfigUserValue { + + private String key; + + private String name; + // linkis_ps_configuration_config_key id + private Integer configKeyId; + + private String description; + + private String defaultValue; + + private String engineType; + // linkis_ps_configuration_config_value id + private Integer configValueId; + + private String configValue; + // linkis_cg_manager_label id + private Integer configLabelId; + + private String labelValue; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public Integer getConfigKeyId() { + return configKeyId; + } + + public void setConfigKeyId(Integer configKeyId) { + this.configKeyId = configKeyId; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public Integer getConfigValueId() { + return configValueId; + } + + public void setConfigValueId(Integer configValueId) { + this.configValueId = configValueId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public Integer getConfigLabelId() { + return configLabelId; + } + + public void setConfigLabelId(Integer configLabelId) { + this.configLabelId = configLabelId; + } + + public String getLabelValue() { + return labelValue; + } + + public void setLabelValue(String labelValue) { + this.labelValue = labelValue; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public String toString() { + return "ConfigUserValue{" + + "key='" + + key + + '\'' + + ", name='" + + name + + '\'' + + ", configKeyId=" + + configKeyId + + ", description='" + + description + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", engineType='" + + engineType + + '\'' + + ", configValueId=" + + configValueId + + ", configValue='" + + configValue + + '\'' + + ", configLabelId=" + + configLabelId + + ", labelValue='" + + labelValue + + '\'' + + '}'; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java new file mode 100644 index 0000000000..b29b3742f2 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +import java.util.Date; + +/** The entity class of the linkis_ps_configuration_template_config_key table @Description */ +public class TemplateConfigKey { + + /** Table field: id Field type: bigint(19) */ + private Long id; + + /** + * Configuration template name redundant storage table field: template_name field type: + * varchar(200) + */ + private String templateName; + + /** + * uuid The template id table field of the third-party record: template_uuid Field type: + * varchar(34) + */ + private String templateUuid; + + /** id of linkis_ps_configuration_config_key table field: key_id field type: bigint(19) */ + private Long keyId; + + /** Configuration value table field: config_value field type: varchar(200) */ + private String configValue; + + /** Upper limit table field: max_value field type: varchar(50) */ + private String maxValue; + + /** Lower limit value (reserved) table field: min_value field type: varchar(50) */ + private String minValue; + + /** Validation regularity (reserved) table field: validate_range field type: varchar(50) */ + private String validateRange; + + /** Is it valid Reserved Y/N table field: is_valid field type: varchar(2) */ + private String isValid; + + /** Creator table field: create_by field type: varchar(50) */ + private String createBy; + + /** + * create time table field: create_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date createTime; + + /** Updater table field: update_by field type: varchar(50) */ + private String updateBy; + + /** + * update time table field: update_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date updateTime; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public Long getKeyId() { + return keyId; + } + + public void setKeyId(Long keyId) { + this.keyId = keyId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public String getMaxValue() { + return maxValue; + } + + public void setMaxValue(String maxValue) { + this.maxValue = maxValue; + } + + public String getMinValue() { + return minValue; + } + + public void setMinValue(String minValue) { + this.minValue = minValue; + } + + public String getValidateRange() { + return validateRange; + } + + public void setValidateRange(String validateRange) { + this.validateRange = validateRange; + } + + public String getIsValid() { + return isValid; + } + + public void setIsValid(String isValid) { + this.isValid = isValid; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()); + sb.append(" ["); + sb.append("Hash = ").append(hashCode()); + sb.append(", id=").append(id); + sb.append(", templateName=").append(templateName); + sb.append(", templateUuid=").append(templateUuid); + sb.append(", keyId=").append(keyId); + sb.append(", configValue=").append(configValue); + sb.append(", maxValue=").append(maxValue); + sb.append(", minValue=").append(minValue); + sb.append(", validateRange=").append(validateRange); + sb.append(", isValid=").append(isValid); + sb.append(", createBy=").append(createBy); + sb.append(", createTime=").append(createTime); + sb.append(", updateBy=").append(updateBy); + sb.append(", updateTime=").append(updateTime); + sb.append(']'); + return sb.toString(); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java new file mode 100644 index 0000000000..796a90fa63 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +public class TemplateConfigKeyVO extends TemplateConfigKey { + + private String key; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java index 05ec8046fd..77d2c67576 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java @@ -19,10 +19,12 @@ import java.util.Date; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; @ApiModel +@JsonIgnoreProperties(ignoreUnknown = true) public class UserIpVo { @ApiModelProperty("id") diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java new file mode 100644 index 0000000000..79bff7cae9 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.enumeration; + +public enum BoundaryTypeEnum { + /* + 0 none + 1 with mix + 2 with max + 3 min and max both + */ + NONE(0), + WITH_MIX(1), + WITH_MAX(2), + WITH_BOTH(3); + + private Integer id; + + BoundaryTypeEnum(Integer id) { + this.id = id; + } + + public Integer getId() { + return this.id; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java index f02e0398f5..9f4a369d8d 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java @@ -30,7 +30,7 @@ public enum LinkisConfigurationErrorCodeSummary implements LinkisErrorCode { CONFIGKEY_CANNOT_BE_NULL(14100, "ConfigKey cannot be null(configKey 不能为空)"), CONFIG_KEY_NOT_EXISTS(14100, "Config key not exists:{0}(配置键不存在:{0})"), LABEL_NOT_EXISTS(14100, "Label not exists:{0}(标签不存在{0})"), - KEY_OR_VALUE_CANNOT(14100, "Key or value cannot be null(键或值不能为空)"), + KEY_CANNOT_EMPTY(14100, "Key cannot be null(Key 不能为空)"), PARAMS_CANNOT_BE_EMPTY(14100, "Params cannot be empty!(参数不能为空!)"), TOKEN_IS_ERROR(14100, "Token is error(令牌是错误的)"), IS_NULL_CANNOT_BE_ADDED(14100, "CategoryName is null, cannot be added(categoryName 为空,无法添加)"), @@ -39,9 +39,9 @@ public enum LinkisConfigurationErrorCodeSummary implements LinkisErrorCode { ENGINE_TYPE_IS_NULL(14100, "Engine type is null, cannot be added(引擎类型为空,无法添加)"), INCORRECT_FIXED_SUCH( 14100, - "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-3.2.1(保存的引擎类型参数有误,请按照固定格式传送,例如spark-3.2.1)"), + "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)"), INCOMPLETE_RECONFIRM(14100, "Incomplete request parameters, please reconfirm(请求参数不完整,请重新确认)"), - ONLY_ADMIN_CAN_MODIFY(14100, "Only admin can modify category(只有管理员才能修改目录)"), + ONLY_ADMIN_PERFORM(14100, "Only admin have permission to perform this operation(限管理员执行此操作)"), THE_LABEL_PARAMETER_IS_EMPTY(14100, " The label parameter is empty(标签参数为空)"), ERROR_VALIDATOR_RANGE(14100, "Error validator range!(错误验证器范围!)"), TYPE_OF_LABEL_NOT_SUPPORTED(14100, "This type of label is not supported:{0}(不支持这种类型的标签:{0})"); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java new file mode 100644 index 0000000000..3a01c86060 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java @@ -0,0 +1,332 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.restful.api; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.configuration.entity.AcrossClusterRule; +import org.apache.linkis.configuration.service.AcrossClusterRuleService; +import org.apache.linkis.configuration.util.CommonUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; + +import java.util.Map; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "across cluster rule api") +@RestController +@RequestMapping(path = "/configuration/acrossClusterRule") +public class AcrossClusterRuleRestfulApi { + + @Autowired private AcrossClusterRuleService acrossClusterRuleService; + + private Logger log = LoggerFactory.getLogger(this.getClass()); + + @ApiOperation( + value = "valid acrossClusterRule", + notes = "valid acrossClusterRule", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "id", dataType = "Integer", value = "id"), + @ApiImplicitParam(name = "isValid", dataType = "String", value = "isValid"), + }) + @RequestMapping(path = "/isValid", method = RequestMethod.PUT) + public Message isValidRule(HttpServletRequest req, @RequestBody Map json) { + String operationUser = ModuleUserUtils.getOperationUser(req, "execute valid acrossClusterRule"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to valid acrossClusterRule List,msg: only administrators can configure"); + } + + Integer idInt = (Integer) json.get("id"); + Long id = idInt.longValue(); + String isValid = (String) json.get("isValid"); + + if (StringUtils.isBlank(isValid)) { + return Message.error("Failed to valid acrossClusterRule: Illegal Input Param"); + } + + try { + acrossClusterRuleService.validAcrossClusterRule(id, isValid); + } catch (Exception e) { + log.info("valid acrossClusterRule failed:" + e.getMessage()); + return Message.error("valid acrossClusterRule failed"); + } + + return Message.ok(); + } + + @ApiOperation( + value = "query acrossClusterRule list", + notes = "query acrossClusterRule list", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "username", dataType = "String", value = "username"), + @ApiImplicitParam(name = "clusterName", dataType = "String", value = "clusterName"), + }) + @RequestMapping(path = "/list", method = RequestMethod.GET) + public Message queryAcrossClusterRuleList( + HttpServletRequest req, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "username", required = false) String username, + @RequestParam(value = "clusterName", required = false) String clusterName, + @RequestParam(value = "pageNow", required = false) Integer pageNow, + @RequestParam(value = "pageSize", required = false) Integer pageSize) { + String operationUser = + ModuleUserUtils.getOperationUser(req, "execute query acrossClusterRule List"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to query acrossClusterRule List,msg: only administrators can configure"); + } + + if (StringUtils.isBlank(username)) username = null; + if (StringUtils.isBlank(creator)) creator = null; + if (StringUtils.isBlank(clusterName)) clusterName = null; + if (null == pageNow) pageNow = 1; + if (null == pageSize) pageSize = 20; + + Map resultMap = null; + try { + resultMap = + acrossClusterRuleService.queryAcrossClusterRuleList( + creator, username, clusterName, pageNow, pageSize); + } catch (Exception e) { + log.info("query acrossClusterRule List failed:" + e.getMessage()); + return Message.error("query acrossClusterRule List failed"); + } + + Message msg = Message.ok(); + msg.getData().putAll(resultMap); + return msg; + } + + @ApiOperation( + value = "delete acrossClusterRule", + notes = "delete acrossClusterRule", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "username", dataType = "String", value = "username"), + }) + @RequestMapping(path = "/delete", method = RequestMethod.DELETE) + public Message deleteAcrossClusterRule( + HttpServletRequest req, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "username", required = false) String username) { + String operationUser = + ModuleUserUtils.getOperationUser(req, "execute delete acrossClusterRule"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to delete acrossClusterRule,msg: only administrators can configure"); + } + + if (StringUtils.isBlank(creator) || StringUtils.isBlank(username)) { + return Message.error("Failed to delete acrossClusterRule: Illegal Input Param"); + } + + try { + acrossClusterRuleService.deleteAcrossClusterRule(creator, username); + } catch (Exception e) { + log.info("delete acrossClusterRule failed:" + e.getMessage()); + return Message.error("delete acrossClusterRule failed"); + } + + return Message.ok(); + } + + @ApiOperation( + value = "update acrossClusterRule", + notes = "update acrossClusterRule ", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "id", dataType = "Integer", value = "id"), + @ApiImplicitParam(name = "clusterName", dataType = "String", value = "clusterName"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "username", dataType = "String", value = "username"), + @ApiImplicitParam(name = "isValid", dataType = "String", value = "isValid"), + @ApiImplicitParam(name = "startTime", dataType = "String", value = "startTime"), + @ApiImplicitParam(name = "endTime", dataType = "String", value = "endTime"), + @ApiImplicitParam(name = "CPUThreshold", dataType = "String", value = "CPUThreshold"), + @ApiImplicitParam(name = "MemoryThreshold", dataType = "String", value = "MemoryThreshold"), + @ApiImplicitParam( + name = "CPUPercentageThreshold", + dataType = "String", + value = "CPUPercentageThreshold"), + @ApiImplicitParam( + name = "MemoryPercentageThreshold", + dataType = "String", + value = "MemoryPercentageThreshold"), + }) + @RequestMapping(path = "/update", method = RequestMethod.PUT) + public Message updateAcrossClusterRule( + HttpServletRequest req, @RequestBody Map json) { + String operationUser = + ModuleUserUtils.getOperationUser(req, "execute update acrossClusterRule"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to update acrossClusterRule,msg: only administrators can configure"); + } + + Integer idInt = (Integer) json.get("id"); + Long id = idInt.longValue(); + String clusterName = (String) json.get("clusterName"); + String creator = (String) json.get("creator"); + String username = (String) json.get("username"); + String isValid = (String) json.get("isValid"); + String startTime = (String) json.get("startTime"); + String endTime = (String) json.get("endTime"); + String CPUThreshold = (String) json.get("CPUThreshold"); + String MemoryThreshold = (String) json.get("MemoryThreshold"); + String CPUPercentageThreshold = (String) json.get("CPUPercentageThreshold"); + String MemoryPercentageThreshold = (String) json.get("MemoryPercentageThreshold"); + if (StringUtils.isBlank(clusterName) + || StringUtils.isBlank(creator) + || StringUtils.isBlank(username) + || StringUtils.isBlank(isValid) + || StringUtils.isBlank(startTime) + || StringUtils.isBlank(endTime) + || StringUtils.isBlank(CPUThreshold) + || StringUtils.isBlank(MemoryThreshold) + || StringUtils.isBlank(CPUPercentageThreshold) + || StringUtils.isBlank(MemoryPercentageThreshold)) { + return Message.error("Failed to add acrossClusterRule: Illegal Input Param"); + } + + try { + String rules = + CommonUtils.ruleMap2String( + startTime, + endTime, + CPUThreshold, + MemoryThreshold, + CPUPercentageThreshold, + MemoryPercentageThreshold); + AcrossClusterRule acrossClusterRule = new AcrossClusterRule(); + acrossClusterRule.setId(id); + acrossClusterRule.setClusterName(clusterName.toLowerCase()); + acrossClusterRule.setCreator(creator); + acrossClusterRule.setUsername(username); + acrossClusterRule.setUpdateBy(operationUser); + acrossClusterRule.setRules(rules); + acrossClusterRule.setIsValid(isValid); + acrossClusterRuleService.updateAcrossClusterRule(acrossClusterRule); + } catch (Exception e) { + log.info("update acrossClusterRule failed:" + e.getMessage()); + return Message.error("update acrossClusterRule failed:history already exist"); + } + return Message.ok(); + } + + @ApiOperation( + value = "add acrossClusterRule", + notes = "add acrossClusterRule ", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "clusterName", dataType = "String", value = "clusterName"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "username", dataType = "String", value = "username"), + @ApiImplicitParam(name = "isValid", dataType = "String", value = "isValid"), + @ApiImplicitParam(name = "startTime", dataType = "String", value = "startTime"), + @ApiImplicitParam(name = "endTime", dataType = "String", value = "endTime"), + @ApiImplicitParam(name = "CPUThreshold", dataType = "String", value = "CPUThreshold"), + @ApiImplicitParam(name = "MemoryThreshold", dataType = "String", value = "MemoryThreshold"), + @ApiImplicitParam( + name = "CPUPercentageThreshold", + dataType = "String", + value = "CPUPercentageThreshold"), + @ApiImplicitParam( + name = "MemoryPercentageThreshold", + dataType = "String", + value = "MemoryPercentageThreshold"), + }) + @RequestMapping(path = "/add", method = RequestMethod.POST) + public Message insertAcrossClusterRule( + HttpServletRequest req, @RequestBody Map json) { + String operationUser = ModuleUserUtils.getOperationUser(req, "execute add acrossClusterRule"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to add acrossClusterRule,msg: only administrators can configure"); + } + + String clusterName = (String) json.get("clusterName"); + String creator = (String) json.get("creator"); + String username = (String) json.get("username"); + String isValid = (String) json.get("isValid"); + String startTime = (String) json.get("startTime"); + String endTime = (String) json.get("endTime"); + String CPUThreshold = (String) json.get("CPUThreshold"); + String MemoryThreshold = (String) json.get("MemoryThreshold"); + String CPUPercentageThreshold = (String) json.get("CPUPercentageThreshold"); + String MemoryPercentageThreshold = (String) json.get("MemoryPercentageThreshold"); + if (StringUtils.isBlank(clusterName) + || StringUtils.isBlank(creator) + || StringUtils.isBlank(username) + || StringUtils.isBlank(isValid) + || StringUtils.isBlank(startTime) + || StringUtils.isBlank(endTime) + || StringUtils.isBlank(CPUThreshold) + || StringUtils.isBlank(MemoryThreshold) + || StringUtils.isBlank(CPUPercentageThreshold) + || StringUtils.isBlank(MemoryPercentageThreshold)) { + return Message.error("Failed to add acrossClusterRule: Illegal Input Param"); + } + + try { + String rules = + CommonUtils.ruleMap2String( + startTime, + endTime, + CPUThreshold, + MemoryThreshold, + CPUPercentageThreshold, + MemoryPercentageThreshold); + AcrossClusterRule acrossClusterRule = new AcrossClusterRule(); + acrossClusterRule.setClusterName(clusterName.toLowerCase()); + acrossClusterRule.setCreator(creator); + acrossClusterRule.setUsername(username); + acrossClusterRule.setCreateBy(operationUser); + acrossClusterRule.setUpdateBy(operationUser); + acrossClusterRule.setRules(rules); + acrossClusterRule.setIsValid(isValid); + acrossClusterRuleService.insertAcrossClusterRule(acrossClusterRule); + } catch (Exception e) { + log.info("add acrossClusterRule failed:" + e.getMessage()); + return Message.error("add acrossClusterRule failed:history already exist"); + } + + return Message.ok(); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java index 283960d5df..11dfee8de1 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java @@ -26,6 +26,7 @@ import org.apache.linkis.configuration.util.ConfigurationConfiguration; import org.apache.linkis.configuration.util.JsonNodeUtil; import org.apache.linkis.configuration.util.LabelEntityParser; +import org.apache.linkis.configuration.validate.ValidatorManager; import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; import org.apache.linkis.manager.label.utils.LabelUtils; @@ -33,6 +34,7 @@ import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -42,12 +44,13 @@ import java.io.IOException; import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import java.util.*; +import java.util.stream.Collectors; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; @@ -71,6 +74,8 @@ public class ConfigurationRestfulApi { @Autowired private ConfigKeyService configKeyService; + @Autowired private ValidatorManager validatorManager; + ObjectMapper mapper = new ObjectMapper(); private static final String NULL = "null"; @@ -144,6 +149,7 @@ public Message getFullTreesByAppName( ArrayList configTrees = configurationService.getFullTreeByLabelList( labelList, true, req.getHeader("Content-Language")); + return Message.ok().data("fullTree", configTrees); } @@ -152,9 +158,42 @@ public Message getFullTreesByAppName( public Message getCategory(HttpServletRequest req) { List categoryLabelList = categoryService.getAllCategory(req.getHeader("Content-Language")); + return Message.ok().data("Category", categoryLabelList); } + @ApiOperation( + value = "getItemList", + notes = "get configuration list by engineType", + response = Message.class) + @RequestMapping(path = "/getItemList", method = RequestMethod.GET) + public Message getItemList( + HttpServletRequest req, @RequestParam(value = "engineType") String engineType) + throws ConfigurationException { + ModuleUserUtils.getOperationUser(req, "getItemList with engineType:" + engineType); + // Adding * represents returning all configuration information + if ("*".equals(engineType)) { + engineType = null; + } + List result = configKeyService.getConfigKeyList(engineType); + List> filterResult = new ArrayList<>(); + for (ConfigKey configKey : result) { + Map temp = new HashMap<>(); + temp.put("key", configKey.getKey()); + temp.put("name", configKey.getName()); + temp.put("description", configKey.getDescription()); + temp.put("engineType", configKey.getEngineType()); + temp.put("validateType", configKey.getValidateType()); + temp.put("validateRange", configKey.getValidateRange()); + temp.put("boundaryType", configKey.getBoundaryType()); + temp.put("defaultValue", configKey.getDefaultValue()); + temp.put("require", configKey.getTemplateRequired()); + filterResult.add(temp); + } + + return Message.ok().data("itemList", filterResult); + } + @ApiOperation( value = "createFirstCategory", notes = "create first category", @@ -254,10 +293,23 @@ public Message saveFullTree(HttpServletRequest req, @RequestBody JsonNode json) String username = ModuleUserUtils.getOperationUser(req, "saveFullTree"); ArrayList createList = new ArrayList<>(); ArrayList updateList = new ArrayList<>(); + ArrayList> chekList = new ArrayList<>(); + String sparkConf = ""; for (Object o : fullTrees) { String s = BDPJettyServerHelper.gson().toJson(o); ConfigTree fullTree = BDPJettyServerHelper.gson().fromJson(s, ConfigTree.class); List settings = fullTree.getSettings(); + chekList.add(settings); + for (ConfigKeyValue configKeyValue : settings) { + if (configKeyValue.getKey().equals("spark.conf") + && StringUtils.isNotBlank(configKeyValue.getConfigValue())) { + sparkConf = configKeyValue.getConfigValue().trim(); + configKeyValue.setConfigValue(sparkConf); + } + } + } + for (List settings : chekList) { + sparkConfCheck(settings, sparkConf); Integer userLabelId = configurationService.checkAndCreateUserLabel(settings, username, creator); for (ConfigKeyValue setting : settings) { @@ -304,12 +356,39 @@ public Message saveFullTree(HttpServletRequest req, @RequestBody JsonNode json) engineVersion); } }); + configurationService.clearAMCacheConf(username, creator, null, null); } else { configurationService.clearAMCacheConf(username, creator, engine, version); } return Message.ok(); } + private void sparkConfCheck(List settings, String sparkConf) + throws ConfigurationException { + if (StringUtils.isNotBlank(sparkConf)) { + // Check if there are any duplicates in spark. conf + // spark.conf : spark.shuffle.compress=ture;spark.executor.memory=4g + String[] split = sparkConf.split(";"); + int setSize = + Arrays.stream(split).map(s -> s.split("=")[0].trim()).collect(Collectors.toSet()).size(); + int listSize = + Arrays.stream(split).map(s -> s.split("=")[0].trim()).collect(Collectors.toList()).size(); + if (listSize != setSize) { + throw new ConfigurationException("Spark.conf contains duplicate keys"); + } + // Check if there are any duplicates in the spark.conf configuration and other individual + for (String keyValue : split) { + String key = keyValue.split("=")[0].trim(); + boolean matchResult = + settings.stream().anyMatch(settingKey -> key.equals(settingKey.getKey())); + if (matchResult) { + throw new ConfigurationException( + "Saved key is duplicated with the spark conf key , key :" + key); + } + } + } + } + @ApiOperation( value = "listAllEngineType", notes = "list all engine type", @@ -376,7 +455,7 @@ public Message rpcTest( private void checkAdmin(String userName) throws ConfigurationException { if (!org.apache.linkis.common.conf.Configuration.isAdmin(userName)) { - throw new ConfigurationException(ONLY_ADMIN_CAN_MODIFY.getErrorDesc()); + throw new ConfigurationException(ONLY_ADMIN_PERFORM.getErrorDesc()); } } @@ -395,7 +474,7 @@ public Message getKeyValue( @RequestParam(value = "creator", required = false, defaultValue = "*") String creator, @RequestParam(value = "configKey") String configKey) throws ConfigurationException { - String username = ModuleUserUtils.getOperationUser(req, "saveKey"); + String username = ModuleUserUtils.getOperationUser(req, "getKeyValue"); if (engineType.equals("*") && !version.equals("*")) { return Message.error("When engineType is any engine, the version must also be any version"); } @@ -424,17 +503,26 @@ public Message getKeyValue( @RequestMapping(path = "/keyvalue", method = RequestMethod.POST) public Message saveKeyValue(HttpServletRequest req, @RequestBody Map json) throws ConfigurationException { + Message message = Message.ok(); String username = ModuleUserUtils.getOperationUser(req, "saveKey"); String engineType = (String) json.getOrDefault("engineType", "*"); + String user = (String) json.getOrDefault("user", ""); String version = (String) json.getOrDefault("version", "*"); String creator = (String) json.getOrDefault("creator", "*"); String configKey = (String) json.get("configKey"); String value = (String) json.get("configValue"); + boolean force = Boolean.parseBoolean(json.getOrDefault("force", "false").toString()); + if (!org.apache.linkis.common.conf.Configuration.isAdmin(username) && !username.equals(user)) { + return Message.error("Only admin can modify other user configuration data"); + } if (engineType.equals("*") && !version.equals("*")) { return Message.error("When engineType is any engine, the version must also be any version"); } - if (StringUtils.isBlank(configKey) || StringUtils.isBlank(value)) { - return Message.error("key or value cannot be empty"); + if (StringUtils.isBlank(configKey)) { + return Message.error("key cannot be empty"); + } + if (StringUtils.isNotBlank(user)) { + username = user; } List labelList = LabelEntityParser.generateUserCreatorEngineTypeLabelList( @@ -444,9 +532,22 @@ public Message saveKeyValue(HttpServletRequest req, @RequestBody Map json) throws ConfigurationException { - String username = ModuleUserUtils.getOperationUser(req, "saveKey"); + String username = ModuleUserUtils.getOperationUser(req, "deleteKeyValue"); String engineType = (String) json.getOrDefault("engineType", "*"); String version = (String) json.getOrDefault("version", "*"); String creator = (String) json.getOrDefault("creator", "*"); @@ -477,4 +578,226 @@ public Message deleteKeyValue(HttpServletRequest req, @RequestBody Map configValues = configKeyService.deleteConfigValue(configKey, labelList); return Message.ok().data("configValues", configValues); } + + @ApiOperation(value = "getBaseKeyValue", notes = "get key", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "key", required = false, dataType = "String", value = "key"), + @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", defaultValue = "1"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + defaultValue = "20"), + }) + @RequestMapping(path = "/baseKeyValue", method = RequestMethod.GET) + public Message getBaseKeyValue( + HttpServletRequest req, + @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "key", required = false) String key, + @RequestParam(value = "pageNow", required = false, defaultValue = "1") Integer pageNow, + @RequestParam(value = "pageSize", required = false, defaultValue = "20") Integer pageSize) + throws ConfigurationException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "getBaseKeyValue")); + if (StringUtils.isBlank(engineType)) { + engineType = null; + } + if (StringUtils.isBlank(key)) { + key = null; + } + PageHelper.startPage(pageNow, pageSize); + List list = null; + try { + list = configKeyService.getConfigBykey(engineType, key, req.getHeader("Content-Language")); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo<>(list); + long total = pageInfo.getTotal(); + return Message.ok().data("configKeyList", list).data("totalPage", total); + } + + @ApiOperation(value = "deleteBaseKeyValue", notes = "delete key", response = Message.class) + @ApiImplicitParams({@ApiImplicitParam(name = "id", required = true, dataType = "Integer")}) + @RequestMapping(path = "/baseKeyValue", method = RequestMethod.DELETE) + public Message deleteBaseKeyValue(HttpServletRequest req, @RequestParam(value = "id") Integer id) + throws ConfigurationException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "deleteBaseKeyValue ID:" + id)); + configKeyService.deleteConfigById(id); + return Message.ok(); + } + + @ApiOperation(value = "saveBaseKeyValue", notes = "save key", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", required = false, dataType = "Integer", value = "id"), + @ApiImplicitParam(name = "key", required = true, dataType = "String", value = "key"), + @ApiImplicitParam(name = "name", required = true, dataType = "String", value = "name"), + @ApiImplicitParam( + name = "description", + required = true, + dataType = "String", + value = "description"), + @ApiImplicitParam( + name = "defaultValue", + required = true, + dataType = "String", + value = "defaultValue"), + @ApiImplicitParam( + name = "validateType", + required = true, + dataType = "String", + value = "validateType"), + @ApiImplicitParam( + name = "validateRange", + required = true, + dataType = "String", + value = "validateRange"), + @ApiImplicitParam( + name = "boundaryType", + required = true, + dataType = "String", + value = "boundaryType"), + @ApiImplicitParam(name = "treeName", required = true, dataType = "String", value = "treeName"), + @ApiImplicitParam( + name = "engineType", + required = true, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "enName", required = false, dataType = "String", value = "enName"), + @ApiImplicitParam( + name = "enDescription", + required = false, + dataType = "String", + value = "enDescription"), + @ApiImplicitParam( + name = "enTreeName", + required = false, + dataType = "String", + value = "enTreeName"), + @ApiImplicitParam( + name = "templateRequired", + required = false, + dataType = "String", + value = "1"), + }) + @ApiOperationSupport(ignoreParameters = {"json"}) + @RequestMapping(path = "/baseKeyValue", method = RequestMethod.POST) + public Message saveBaseKeyValue(HttpServletRequest req, @RequestBody ConfigKey configKey) + throws ConfigurationException, InstantiationException, IllegalAccessException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "saveBaseKeyValue")); + String key = configKey.getKey(); + String name = configKey.getName(); + String treeName = configKey.getTreeName(); + String description = configKey.getDescription(); + Integer boundaryType = configKey.getBoundaryType(); + String defaultValue = configKey.getDefaultValue(); + String validateType = configKey.getValidateType(); + String validateRange = configKey.getValidateRange(); + String engineType = configKey.getEngineType(); + if (StringUtils.isBlank(key)) { + return Message.error("key cannot be empty"); + } + if (StringUtils.isBlank(name)) { + return Message.error("name cannot be empty"); + } + if (StringUtils.isBlank(description)) { + return Message.error("description cannot be empty"); + } + if (StringUtils.isBlank(treeName)) { + return Message.error("treeName cannot be empty"); + } + if (StringUtils.isBlank(validateType)) { + return Message.error("validateType cannot be empty"); + } + if (!validateType.equals("None") && StringUtils.isBlank(validateRange)) { + return Message.error("validateRange cannot be empty"); + } + if (null == boundaryType) { + return Message.error("boundaryType cannot be empty"); + } + if (StringUtils.isNotEmpty(defaultValue) + && !validatorManager + .getOrCreateValidator(validateType) + .validate(defaultValue, validateRange)) { + String msg = + MessageFormat.format( + "Parameter configValue verification failed(参数defaultValue校验失败):" + + "key:{0}, ValidateType:{1}, ValidateRange:{2},ConfigValue:{3}", + key, validateType, validateRange, defaultValue); + throw new ConfigurationException(msg); + } + if (null == configKey.getId()) { + List configBykey = + configKeyService.getConfigBykey(engineType, key, req.getHeader("Content-Language")); + if (CollectionUtils.isNotEmpty(configBykey)) { + return Message.error("The engine has the same key: " + key); + } + configKeyService.saveConfigKey(configKey); + } else { + configKey.setId(configKey.getId()); + configKeyService.updateConfigKey(configKey); + } + return Message.ok().data("configKey", configKey); + } + + @ApiOperation(value = "getUserkeyvalue", notes = "get key", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "key", required = false, dataType = "String", value = "key"), + @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator"), + @ApiImplicitParam(name = "user", required = false, dataType = "String", value = "user"), + @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", defaultValue = "1"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + defaultValue = "20"), + }) + @RequestMapping(path = "/userKeyValue", method = RequestMethod.GET) + public Message getUserKeyValue( + HttpServletRequest req, + @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "key", required = false) String key, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "user", required = false) String user, + @RequestParam(value = "pageNow", required = false, defaultValue = "1") Integer pageNow, + @RequestParam(value = "pageSize", required = false, defaultValue = "20") Integer pageSize) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "getUserKeyValue"); + if (StringUtils.isBlank(engineType)) { + engineType = null; + } + if (StringUtils.isBlank(key)) { + key = null; + } + if (StringUtils.isBlank(creator)) { + creator = null; + } + if (StringUtils.isBlank(user)) { + user = null; + } + + if (!org.apache.linkis.common.conf.Configuration.isAdmin(username) && !username.equals(user)) { + return Message.error("Only admin can query other user configuration data"); + } + + PageHelper.startPage(pageNow, pageSize); + List list; + try { + list = configKeyService.getUserConfigValue(engineType, key, creator, user); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo<>(list); + long total = pageInfo.getTotal(); + return Message.ok().data("configValueList", list).data("totalPage", total); + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TemplateManagerRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TemplateManagerRestfulApi.java new file mode 100644 index 0000000000..7ed5c4e579 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TemplateManagerRestfulApi.java @@ -0,0 +1,277 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.restful.api; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.utils.JsonUtils; +import org.apache.linkis.configuration.entity.ConfigKeyLimitVo; +import org.apache.linkis.configuration.exception.ConfigurationException; +import org.apache.linkis.configuration.service.TemplateConfigKeyService; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "configuration template") +@RestController +@RequestMapping(path = "/configuration/template") +public class TemplateManagerRestfulApi { + + private static final Logger logger = LoggerFactory.getLogger(TemplateManagerRestfulApi.class); + + @Autowired private TemplateConfigKeyService templateConfigKeyService; + + @ApiOperation( + value = "updateKeyMapping", + notes = "query engineconn info list", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "templateUid", + dataType = "String", + required = true, + value = "templateUid"), + @ApiImplicitParam( + name = "templateName", + dataType = "String", + required = true, + value = "engine type"), + @ApiImplicitParam(name = "engineType", dataType = "String", required = true, value = "String"), + @ApiImplicitParam(name = "operator", dataType = "String", value = "operator"), + @ApiImplicitParam(name = "isFullMode", dataType = "Boolbean", value = "isFullMode"), + @ApiImplicitParam(name = "itemList", dataType = "Array", value = "itemList"), + }) + @RequestMapping(path = "/updateKeyMapping", method = RequestMethod.POST) + public Message updateKeyMapping(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "updateKeyMapping"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to updateKeyMapping.", token); + return Message.error("Token:" + token + " has no permission to updateKeyMapping."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to updateKeyMapping.", username); + return Message.error("User:" + username + " has no permission to updateKeyMapping."); + } + + String templateUid = jsonNode.get("templateUid").asText(); + String templateName = jsonNode.get("templateName").asText(); + String engineType = jsonNode.get("engineType").asText(); + String operator = jsonNode.get("operator").asText(); + + if (StringUtils.isBlank(templateUid)) { + return Message.error("parameters:templateUid can not be empty(请求参数【templateUid】不能为空)"); + } + if (StringUtils.isBlank(templateName)) { + return Message.error("parameters:templateName can not be empty(请求参数【templateName】不能为空)"); + } + if (StringUtils.isBlank(engineType)) { + return Message.error("parameters:engineType can not be empty(请求参数【engineType】不能为空)"); + } + if (StringUtils.isBlank(operator)) { + return Message.error("parameters:operator can not be empty(请求参数【operator】不能为空)"); + } + boolean isFullMode = true; + try { + isFullMode = jsonNode.get("isFullMode").asBoolean(); + logger.info("will update by param isFullMode:" + isFullMode); + } catch (Exception e) { + logger.info("will update by default isFullMode:" + isFullMode); + } + + JsonNode itemParms = jsonNode.get("itemList"); + + List confKeyList = new ArrayList<>(); + if (itemParms != null && !itemParms.isNull()) { + try { + confKeyList = + JsonUtils.jackson() + .readValue(itemParms.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error( + "parameters:itemList parsing failed(请求参数【itemList】解析失败), error with:" + e.getMessage()); + } + } else { + return Message.error("parameters:itemList can not be empty(请求参数【itemList】不能为空)"); + } + + logger.info( + "request parameters templateUid:{}, templateName:{}, engineType:{}, operator:{},isFullMode:{}, itemList:[{}]", + templateUid, + templateName, + engineType, + operator, + itemParms.asText()); + + templateConfigKeyService.updateKeyMapping( + templateUid, templateName, engineType, operator, confKeyList, isFullMode); + return Message.ok(); + } + + @ApiOperation(value = "queryKeyInfoList", notes = "query key info list", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "templateUidList", dataType = "Array", value = "templateUidList"), + }) + @RequestMapping(path = "/queryKeyInfoList", method = RequestMethod.POST) + public Message queryKeyInfoList(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "queryKeyInfoList"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to queryKeyInfoList.", token); + return Message.error("Token:" + token + " has no permission to queryKeyInfoList."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to queryKeyInfoList.", username); + return Message.error("User:" + username + " has no permission to queryKeyInfoList."); + } + + JsonNode templateUidListParms = jsonNode.get("templateUidList"); + + List uuidList = new ArrayList<>(); + if (templateUidListParms != null && !templateUidListParms.isNull()) { + try { + uuidList = + JsonUtils.jackson() + .readValue(templateUidListParms.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error( + "parameters:templateUidList parsing failed(请求参数【templateUidList】解析失败), error with:" + + e.getMessage()); + } + } else { + return Message.error( + "parameters:templateUidList can not be empty(请求参数【templateUidList】不能为空)"); + } + + List result = templateConfigKeyService.queryKeyInfoList(uuidList); + + return Message.ok().data("list", result); + } + + @ApiOperation(value = "apply", notes = "apply conf template rule", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "templateUid", + dataType = "String", + required = true, + value = "templateUid"), + @ApiImplicitParam(name = "application", dataType = "String", value = "application"), + @ApiImplicitParam(name = "engineType", dataType = "String", value = "engineType"), + @ApiImplicitParam(name = "engineVersion", dataType = "String", value = "engineVersion"), + @ApiImplicitParam(name = "operator", dataType = "String", value = "operator"), + @ApiImplicitParam(name = "userList", dataType = "Array", value = "userList"), + }) + @RequestMapping(path = "/apply", method = RequestMethod.POST) + public Message apply(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "apply"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to apply.", token); + return Message.error("Token:" + token + " has no permission to apply."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to apply.", username); + return Message.error("User:" + username + " has no permission to apply."); + } + + String templateUid = jsonNode.get("templateUid").asText(); + String application = jsonNode.get("application").asText(); + String engineType = jsonNode.get("engineType").asText(); + String engineVersion = jsonNode.get("engineVersion").asText(); + String operator = jsonNode.get("operator").asText(); + + if (StringUtils.isBlank(templateUid)) { + return Message.error("parameters:templateUid can not be empty(请求参数【templateUid】不能为空)"); + } + if (StringUtils.isBlank(application)) { + return Message.error("parameters:application can not be empty(请求参数【application】不能为空)"); + } + if (StringUtils.isBlank(engineType)) { + return Message.error("parameters:engineType can not be empty(请求参数【engineType】不能为空)"); + } + if (StringUtils.isBlank(engineVersion)) { + return Message.error("parameters:engineVersion can not be empty(请求参数【engineVersion】不能为空)"); + } + if (StringUtils.isBlank(operator)) { + return Message.error("parameters:operator can not be empty(请求参数【operator】不能为空)"); + } + + JsonNode userParms = jsonNode.get("userList"); + List userList = new ArrayList<>(); + if (userParms != null && !userParms.isNull()) { + try { + userList = + JsonUtils.jackson() + .readValue(userParms.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error( + "parameters:userList parsing failed(请求参数【userList】解析失败), error with:" + e.getMessage()); + } + } else { + return Message.error("parameters:userList can not be empty(请求参数【userList】不能为空)"); + } + + logger.info( + "request parameters templateUid:{}, application:{}, engineType:{}, engineVersion:{}, operator:{},userList:[{}]", + templateUid, + application, + engineType, + engineVersion, + operator, + String.join(",", userList)); + + Map result = + templateConfigKeyService.apply( + templateUid, application, engineType, engineVersion, operator, userList); + + Message message = Message.ok(); + message.getData().putAll(result); + return message; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java new file mode 100644 index 0000000000..2fff11c871 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service; + +import org.apache.linkis.configuration.entity.AcrossClusterRule; + +import java.util.Map; + +public interface AcrossClusterRuleService { + + void deleteAcrossClusterRule(String creator, String username) throws Exception; + + void updateAcrossClusterRule(AcrossClusterRule acrossClusterRule) throws Exception; + + void insertAcrossClusterRule(AcrossClusterRule acrossClusterRule) throws Exception; + + Map queryAcrossClusterRuleList( + String creator, String username, String clusterName, Integer pageNow, Integer pageSize) + throws Exception; + + void validAcrossClusterRule(Long id, String isValid) throws Exception; +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java index 665f359483..758ac9e91d 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java @@ -17,7 +17,9 @@ package org.apache.linkis.configuration.service; +import org.apache.linkis.configuration.entity.ConfigKey; import org.apache.linkis.configuration.entity.ConfigKeyValue; +import org.apache.linkis.configuration.entity.ConfigUserValue; import org.apache.linkis.configuration.entity.ConfigValue; import org.apache.linkis.configuration.exception.ConfigurationException; import org.apache.linkis.manager.label.entity.Label; @@ -32,6 +34,19 @@ ConfigValue saveConfigValue(ConfigKeyValue configKeyValue, List> labelL List getConfigValue(String configKey, List> labelList) throws ConfigurationException; + List getConfigKeyList(String engineType) throws ConfigurationException; + List deleteConfigValue(String configKey, List> labelList) throws ConfigurationException; + + List getConfigBykey(String engineType, String key, String language); + + void deleteConfigById(Integer id); + + ConfigKey saveConfigKey(ConfigKey configKey); + + List getUserConfigValue( + String engineType, String key, String creator, String user); + + void updateConfigKey(ConfigKey configKey); } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java new file mode 100644 index 0000000000..bde686c6d0 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service; + +import org.apache.linkis.configuration.entity.ConfigKeyLimitVo; +import org.apache.linkis.configuration.exception.ConfigurationException; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfRequest; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfResponse; + +import java.util.List; +import java.util.Map; + +public interface TemplateConfigKeyService { + + Boolean updateKeyMapping( + String templateUid, + String templateName, + String engineType, + String operator, + List itemList, + Boolean isFullMode) + throws ConfigurationException; + + List queryKeyInfoList(List uuidList) throws ConfigurationException; + + Map apply( + String templateUid, + String application, + String engineType, + String engineVersion, + String operator, + List userList) + throws ConfigurationException; + + TemplateConfResponse queryKeyInfoList(TemplateConfRequest templateConfRequest); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java new file mode 100644 index 0000000000..a906ca2d1a --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service.impl; + +import org.apache.linkis.configuration.dao.AcrossClusterRuleMapper; +import org.apache.linkis.configuration.entity.AcrossClusterRule; +import org.apache.linkis.configuration.service.AcrossClusterRuleService; +import org.apache.linkis.governance.common.constant.job.JobRequestConstants; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.*; + +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class AcrossClusterRuleServiceImpl implements AcrossClusterRuleService { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + @Autowired private AcrossClusterRuleMapper ruleMapper; + + @Override + public void deleteAcrossClusterRule(String creator, String username) throws Exception { + ruleMapper.deleteAcrossClusterRule(creator, username); + } + + @Override + public void updateAcrossClusterRule(AcrossClusterRule newRule) throws Exception { + AcrossClusterRule beforeRule = ruleMapper.getAcrossClusterRule(newRule.getId()); + if (beforeRule == null) { + throw new Exception("acrossClusterRule not exit"); + } + + Date time = new Date(); + newRule.setCreateBy(beforeRule.getCreateBy()); + newRule.setCreateTime(beforeRule.getCreateTime()); + newRule.setUpdateTime(time); + + ruleMapper.updateAcrossClusterRule(newRule); + } + + @Override + public void insertAcrossClusterRule(AcrossClusterRule acrossClusterRule) throws Exception { + Date time = new Date(); + acrossClusterRule.setCreateTime(time); + acrossClusterRule.setUpdateTime(time); + ruleMapper.insertAcrossClusterRule(acrossClusterRule); + } + + @Override + public Map queryAcrossClusterRuleList( + String creator, String username, String clusterName, Integer pageNow, Integer pageSize) { + Map result = new HashMap<>(2); + List acrossClusterRules = null; + if (Objects.isNull(pageNow)) { + pageNow = 1; + } + if (Objects.isNull(pageSize)) { + pageSize = 20; + } + PageHelper.startPage(pageNow, pageSize); + + try { + acrossClusterRules = ruleMapper.queryAcrossClusterRuleList(username, creator, clusterName); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo<>(acrossClusterRules); + result.put("acrossClusterRuleList", acrossClusterRules); + result.put(JobRequestConstants.TOTAL_PAGE(), pageInfo.getTotal()); + return result; + } + + @Override + public void validAcrossClusterRule(Long id, String isValid) throws Exception { + AcrossClusterRule beforeRule = ruleMapper.getAcrossClusterRule(id); + + if (beforeRule == null) { + throw new Exception("acrossClusterRule not exit"); + } + + ruleMapper.validAcrossClusterRule(isValid, id); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java index 6811b5e7e2..0747afc57b 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java @@ -19,10 +19,7 @@ import org.apache.linkis.configuration.dao.ConfigMapper; import org.apache.linkis.configuration.dao.LabelMapper; -import org.apache.linkis.configuration.entity.ConfigKey; -import org.apache.linkis.configuration.entity.ConfigKeyValue; -import org.apache.linkis.configuration.entity.ConfigLabel; -import org.apache.linkis.configuration.entity.ConfigValue; +import org.apache.linkis.configuration.entity.*; import org.apache.linkis.configuration.exception.ConfigurationException; import org.apache.linkis.configuration.service.ConfigKeyService; import org.apache.linkis.configuration.util.LabelEntityParser; @@ -63,9 +60,8 @@ public class ConfigKeyServiceImpl implements ConfigKeyService { public ConfigValue saveConfigValue(ConfigKeyValue configKeyValue, List> labelList) throws ConfigurationException { - if (StringUtils.isBlank(configKeyValue.getConfigValue()) - || StringUtils.isBlank(configKeyValue.getKey())) { - throw new ConfigurationException(KEY_OR_VALUE_CANNOT.getErrorDesc()); + if (StringUtils.isBlank(configKeyValue.getKey())) { + throw new ConfigurationException(KEY_CANNOT_EMPTY.getErrorDesc()); } LabelParameterParser.labelCheck(labelList); @@ -163,6 +159,11 @@ public List getConfigValue(String key, List> labelList) return configValues; } + @Override + public List getConfigKeyList(String engineType) throws ConfigurationException { + return configMapper.selectKeyByEngineType(engineType); + } + @Override public List deleteConfigValue(String key, List> labelList) throws ConfigurationException { @@ -174,4 +175,37 @@ public List deleteConfigValue(String key, List> labelList) logger.info("succeed to remove key: {} by label:{} ", key, combinedLabel.getStringValue()); return configValues; } + + @Override + public List getConfigBykey(String engineType, String key, String language) { + List configkeyList; + if ("en".equals(language)) { + configkeyList = configMapper.getConfigEnBykey(engineType, key); + } else { + configkeyList = configMapper.getConfigBykey(engineType, key); + } + return configkeyList; + } + + @Override + public void deleteConfigById(Integer id) { + configMapper.deleteConfigKey(id); + } + + @Override + public ConfigKey saveConfigKey(ConfigKey configKey) { + configMapper.insertKeyByBase(configKey); + return null; + } + + @Override + public List getUserConfigValue( + String engineType, String key, String creator, String user) { + return configMapper.getUserConfigValue(key, user, creator, engineType); + } + + @Override + public void updateConfigKey(ConfigKey configKey) { + configMapper.updateConfigKey(configKey); + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java new file mode 100644 index 0000000000..cc10066a10 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java @@ -0,0 +1,500 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service.impl; + +import org.apache.linkis.configuration.dao.ConfigKeyLimitForUserMapper; +import org.apache.linkis.configuration.dao.ConfigMapper; +import org.apache.linkis.configuration.dao.LabelMapper; +import org.apache.linkis.configuration.dao.TemplateConfigKeyMapper; +import org.apache.linkis.configuration.entity.*; +import org.apache.linkis.configuration.enumeration.BoundaryTypeEnum; +import org.apache.linkis.configuration.exception.ConfigurationException; +import org.apache.linkis.configuration.service.ConfigKeyService; +import org.apache.linkis.configuration.service.ConfigurationService; +import org.apache.linkis.configuration.service.TemplateConfigKeyService; +import org.apache.linkis.configuration.util.LabelEntityParser; +import org.apache.linkis.configuration.validate.ValidatorManager; +import org.apache.linkis.governance.common.entity.TemplateConfKey; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfRequest; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfResponse; +import org.apache.linkis.manager.label.entity.CombinedLabel; +import org.apache.linkis.rpc.message.annotation.Receiver; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class TemplateConfigKeyServiceImpl implements TemplateConfigKeyService { + + private static final Logger logger = LoggerFactory.getLogger(TemplateConfigKeyServiceImpl.class); + + @Autowired private ConfigMapper configMapper; + + @Autowired private LabelMapper labelMapper; + + @Autowired private TemplateConfigKeyMapper templateConfigKeyMapper; + + @Autowired private ConfigurationService configurationService; + + @Autowired private ConfigKeyService configKeyService; + + @Autowired private ValidatorManager validatorManager; + + @Autowired private ConfigKeyLimitForUserMapper configKeyLimitForUserMapper; + + @Autowired private PlatformTransactionManager platformTransactionManager; + + @Override + @Transactional + public Boolean updateKeyMapping( + String templateUid, + String templateName, + String engineType, + String operator, + List itemList, + Boolean isFullMode) + throws ConfigurationException { + + // Query the corresponding data and check the validity of the data(查询对应的数据 并做数据合法性检查) + List keyList = itemList.stream().map(e -> e.getKey()).collect(Collectors.toList()); + List configKeyList = + configMapper.selectKeyByEngineTypeAndKeyList(engineType, keyList); + // List of key ids to be updated(待更新的key id 列表) + List keyIdList = configKeyList.stream().map(e -> e.getId()).collect(Collectors.toList()); + if (configKeyList.size() != itemList.size()) { + List dbKeyList = + configKeyList.stream().map(e -> e.getKey()).collect(Collectors.toList()); + String msg = + MessageFormat.format( + "The num of configuration item data from the DB is inconsistent with input(从DB中获取到的配置数据条数不一致) :" + + "engineType:{0}, input keys:{1}, db keys:{2}", + engineType, String.join(",", keyList), String.join(",", dbKeyList)); + throw new ConfigurationException(msg); + } + + List toUpdateOrInsertList = new ArrayList<>(); + + // map k:v---> key:ConfigKey + Map configKeyMap = + configKeyList.stream().collect(Collectors.toMap(ConfigKey::getKey, item -> item)); + for (ConfigKeyLimitVo item : itemList) { + + String key = item.getKey(); + ConfigKey temp = configKeyMap.get(item.getKey()); + String validateType = temp.getValidateType(); + String validateRange = temp.getValidateRange(); + String configValue = item.getConfigValue(); + String maxValue = item.getMaxValue(); + + if (StringUtils.isNotEmpty(configValue) + && !validatorManager + .getOrCreateValidator(validateType) + .validate(configValue, validateRange)) { + String msg = + MessageFormat.format( + "Parameter configValue verification failed(参数configValue校验失败):" + + "key:{0}, ValidateType:{1}, ValidateRange:{2},ConfigValue:{3}", + key, validateType, validateRange, configValue); + throw new ConfigurationException(msg); + } + + if (StringUtils.isNotEmpty(maxValue) + && BoundaryTypeEnum.WITH_BOTH.getId().equals(temp.getBoundaryType())) { + if (!validatorManager + .getOrCreateValidator(validateType) + .validate(maxValue, validateRange)) { + String msg = + MessageFormat.format( + "Parameter maxValue verification failed(参数maxValue校验失败):" + + "key:{0}, ValidateType:{1}, ValidateRange:{2}, maxValue:{3}", + key, validateType, validateRange, maxValue); + throw new ConfigurationException(msg); + } + + try { + Integer maxVal = Integer.valueOf(maxValue.replaceAll("[^0-9]", "")); + Integer configVal = Integer.valueOf(configValue.replaceAll("[^0-9]", "")); + if (configVal > maxVal) { + String msg = + MessageFormat.format( + "Parameter key:{0},config value:{1} verification failed, " + + "exceeds the specified max value: {2}:(参数校验失败,超过指定的最大值):", + key, configVal, maxVal); + throw new ConfigurationException(msg); + } + } catch (Exception exception) { + if (exception instanceof ConfigurationException) { + throw exception; + } else { + logger.warn( + "Failed to check special limit setting for key:" + + key + + ",config value:" + + configValue); + } + } + } + ; + + Long keyId = temp.getId(); + + TemplateConfigKey templateConfigKey = new TemplateConfigKey(); + templateConfigKey.setTemplateName(templateName); + templateConfigKey.setTemplateUuid(templateUid); + templateConfigKey.setKeyId(keyId); + templateConfigKey.setConfigValue(configValue); + templateConfigKey.setMaxValue(maxValue); + templateConfigKey.setCreateBy(operator); + templateConfigKey.setUpdateBy(operator); + toUpdateOrInsertList.add(templateConfigKey); + } + // Update data according to different mode + if (isFullMode) { + // The data previously in the database needs to be removed + List oldList = + templateConfigKeyMapper.selectListByTemplateUuid(templateUid); + List needToRemoveList = + oldList.stream() + .filter( + item -> { + return !keyIdList.contains(item.getKeyId()); + }) + .map(e -> e.getKeyId()) + .collect(Collectors.toList()); + if (needToRemoveList.size() > 0) { + logger.info( + "Try to remove old data:[" + needToRemoveList + "] for templateUid:" + templateUid); + templateConfigKeyMapper.deleteByTemplateUuidAndKeyIdList(templateUid, needToRemoveList); + } + } + + if (toUpdateOrInsertList.size() == 0) { + String msg = "No key data to update, Please check if the keys are correct"; + throw new ConfigurationException(msg); + } + templateConfigKeyMapper.batchInsertOrUpdateList(toUpdateOrInsertList); + + return true; + } + + @Override + public List queryKeyInfoList(List uuidList) throws ConfigurationException { + List result = new ArrayList<>(); + + List templateConfigKeyList = + templateConfigKeyMapper.selectListByTemplateUuidList(uuidList); + + Map> templateConfigKeyListGroupByUuid = + templateConfigKeyList.stream() + .collect(Collectors.groupingBy(TemplateConfigKey::getTemplateUuid)); + + List keyIdList = + templateConfigKeyList.stream() + .map(e -> e.getKeyId()) + .distinct() + .collect(Collectors.toList()); + + if (keyIdList.size() == 0) { + String msg = "can not get any config key info from db, Please check if the keys are correct"; + throw new ConfigurationException(msg); + } + List configKeyList = configMapper.selectKeyByKeyIdList(keyIdList); + // map k:v---> keyId:ConfigKey + Map configKeyMap = + configKeyList.stream().collect(Collectors.toMap(ConfigKey::getId, item -> item)); + + for (String uuid : templateConfigKeyListGroupByUuid.keySet()) { + Map item = new HashMap(); + List keys = new ArrayList<>(); + item.put("templateUid", uuid); + + String engineType = ""; + List engineTypeList = templateConfigKeyMapper.selectEngineTypeByTemplateUuid(uuid); + + if (engineTypeList.size() > 1) { + String msg = + MessageFormat.format( + "template uuid:{0} associated with the engine type:{1} more than one! Please check if the keys are correct", + uuid, StringUtils.join(engineTypeList.toArray(), ",")); + throw new ConfigurationException(msg); + } + + if (engineTypeList.size() == 0) { + String msg = + MessageFormat.format( + "template uuid:{0} can not associated with any engine type! Please check if the keys are correct", + uuid); + throw new ConfigurationException(msg); + } + + engineType = engineTypeList.get(0); + + Map templateConfigKeyMap = + templateConfigKeyListGroupByUuid.get(uuid).stream() + .collect(Collectors.toMap(TemplateConfigKey::getKeyId, elemt -> elemt)); + + List ecKeyList = configKeyService.getConfigKeyList(engineType); + for (ConfigKey configKey : ecKeyList) { + Map temp = new HashMap<>(); + temp.put("key", configKey.getKey()); + temp.put("name", configKey.getName()); + temp.put("description", configKey.getDescription()); + temp.put("engineType", configKey.getEngineType()); + temp.put("validateType", configKey.getValidateType()); + temp.put("validateRange", configKey.getValidateRange()); + temp.put("boundaryType", configKey.getBoundaryType()); + temp.put("defaultValue", configKey.getDefaultValue()); + temp.put("require", configKey.getTemplateRequired()); + temp.put("keyId", configKey.getId()); + + Long keyId = configKey.getId(); + TemplateConfigKey templateConfigKey = templateConfigKeyMap.get(keyId); + + if (templateConfigKey == null) { + temp.put("configValue", null); + temp.put("maxValue", null); + temp.put("createBy", null); + temp.put("createTime", null); + temp.put("updateBy", null); + temp.put("updateTime", null); + } else { + temp.put("configValue", templateConfigKey.getConfigValue()); + temp.put("maxValue", templateConfigKey.getMaxValue()); + temp.put("createBy", templateConfigKey.getCreateBy()); + temp.put("createTime", templateConfigKey.getCreateTime()); + temp.put("updateBy", templateConfigKey.getUpdateBy()); + temp.put("updateTime", templateConfigKey.getUpdateTime()); + } + + keys.add(temp); + } + + item.put("itemList", keys); + result.add(item); + } + return result; + } + + @Override + public Map apply( + String templateUid, + String application, + String engineType, + String engineVersion, + String operator, + List userList) + throws ConfigurationException { + List successList = new ArrayList<>(); + List errorList = new ArrayList<>(); + + // get the associated config itsm list + List templateUuidList = new ArrayList<>(); + templateUuidList.add(templateUid); + List templateConfigKeyList = + templateConfigKeyMapper.selectListByTemplateUuidList(templateUuidList); + if (templateConfigKeyList.size() == 0) { + String msg = + MessageFormat.format( + "The template configuration is empty. Please check the template associated configuration information in the database table" + + "(模板关联的配置为空,请检查数据库表中关于模板id:{0} 关联配置项是否完整)", + templateUid); + throw new ConfigurationException(msg); + } + // check input engineType is same as template key engineType + List keyIdList = + templateConfigKeyList.stream() + .map(e -> e.getKeyId()) + .distinct() + .collect(Collectors.toList()); + + if (keyIdList.size() == 0) { + String msg = "can not get any config key info from db, Please check if the keys are correct"; + throw new ConfigurationException(msg); + } + List configKeyList = configMapper.selectKeyByKeyIdList(keyIdList); + // map k:v---> keyId:ConfigKey + Set configKeyEngineTypeSet = + configKeyList.stream().map(ConfigKey::getEngineType).collect(Collectors.toSet()); + + if (configKeyEngineTypeSet == null || configKeyEngineTypeSet.size() == 0) { + String msg = + MessageFormat.format( + "Unable to get configuration parameter information associated with template id:{0}, please check whether the parameters are correct" + + "(无法获取模板:{0} 关联的配置参数信息,请检查参数是否正确)", + templateUid); + throw new ConfigurationException(msg); + } + + if (configKeyEngineTypeSet.size() != 1 || !configKeyEngineTypeSet.contains(engineType)) { + String msg = + MessageFormat.format( + "The engineType:{0} associated with the template:{1} does not match the input engineType:{2}, please check whether the parameters are correct" + + "(模板关联的引擎类型:{0} 和下发的引擎类型:{2} 不匹配,请检查参数是否正确)", + String.join(",", configKeyEngineTypeSet), templateUid, engineType); + throw new ConfigurationException(msg); + } + for (String user : userList) { + // try to create combined_userCreator_engineType label for user + Map res = new HashMap(); + res.put("user", user); + try { + CombinedLabel combinedLabel = + configurationService.generateCombinedLabel( + engineType, engineVersion, user, application); + String conbinedLabelKey = combinedLabel.getLabelKey(); + String conbinedLabelStringValue = combinedLabel.getStringValue(); + // check lable is ok + + ConfigLabel configLabel = + labelMapper.getLabelByKeyValue(conbinedLabelKey, conbinedLabelStringValue); + if (null == configLabel || configLabel.getId() < 0) { + configLabel = LabelEntityParser.parseToConfigLabel(combinedLabel); + labelMapper.insertLabel(configLabel); + logger.info("succeed to create label: {}", configLabel.getStringValue()); + } + + // batch update config value + List configValues = new ArrayList<>(); + + List configKeyLimitForUsers = new ArrayList<>(); + + for (TemplateConfigKey templateConfigKey : templateConfigKeyList) { + Long keyId = templateConfigKey.getKeyId(); + String uuid = templateConfigKey.getTemplateUuid(); + String confVal = templateConfigKey.getConfigValue(); + String maxVal = templateConfigKey.getMaxValue(); + + ConfigValue configValue = new ConfigValue(); + configValue.setConfigKeyId(keyId); + configValue.setConfigValue(confVal); + configValue.setConfigLabelId(configLabel.getId()); + configValues.add(configValue); + + ConfigKeyLimitForUser configKeyLimitForUser = new ConfigKeyLimitForUser(); + configKeyLimitForUser.setUserName(user); + configKeyLimitForUser.setCombinedLabelValue(configLabel.getStringValue()); + configKeyLimitForUser.setKeyId(keyId); + configKeyLimitForUser.setConfigValue(confVal); + configKeyLimitForUser.setMaxValue(maxVal); + configKeyLimitForUser.setLatestUpdateTemplateUuid(uuid); + configKeyLimitForUser.setCreateBy(operator); + configKeyLimitForUser.setUpdateBy(operator); + configKeyLimitForUsers.add(configKeyLimitForUser); + } + + if (configValues.size() == 0) { + res.put("msg", "can not get any right key form the db"); + errorList.add(res); + } else { + + DefaultTransactionDefinition transactionDefinition = new DefaultTransactionDefinition(); + TransactionStatus status = + platformTransactionManager.getTransaction(transactionDefinition); + try { + configMapper.batchInsertOrUpdateValueList(configValues); + // batch update user ConfigKeyLimitForUserMapper + configKeyLimitForUserMapper.batchInsertOrUpdateList(configKeyLimitForUsers); + + platformTransactionManager.commit(status); // commit transaction if everything's fine + } catch (Exception ex) { + platformTransactionManager.rollback( + status); // rollback transaction if any error occurred + throw ex; + } + successList.add(res); + } + + } catch (Exception e) { + logger.warn("try to update configurations for user:" + user + " with error", e); + res.put("msg", e.getMessage()); + errorList.add(res); + } + } + + Map result = new HashMap<>(); + + Map successResult = new HashMap<>(); + Map errorResult = new HashMap<>(); + + successResult.put("num", successList.size()); + successResult.put("infoList", successList); + + errorResult.put("num", errorList.size()); + errorResult.put("infoList", errorList); + + result.put("success", successResult); + result.put("error", errorResult); + return result; + } + + @Receiver + @Override + public TemplateConfResponse queryKeyInfoList(TemplateConfRequest templateConfRequest) { + TemplateConfResponse result = new TemplateConfResponse(); + String templateUid = templateConfRequest.getTemplateUuid(); + String templateName = templateConfRequest.getTemplateName(); + if (logger.isDebugEnabled()) { + logger.debug("query conf list with uid:{},name:{}", templateUid, templateName); + } + if (StringUtils.isBlank(templateUid) && StringUtils.isBlank(templateName)) { + return result; + } + + List voList = new ArrayList<>(); + + if (StringUtils.isNotBlank(templateUid)) { + voList = templateConfigKeyMapper.selectInfoListByTemplateUuid(templateUid); + + } else { + voList = templateConfigKeyMapper.selectInfoListByTemplateName(templateName); + } + List data = new ArrayList<>(); + if (voList != null) { + for (TemplateConfigKeyVO temp : voList) { + TemplateConfKey item = new TemplateConfKey(); + item.setTemplateUuid(temp.getTemplateUuid()); + item.setKey(temp.getKey()); + item.setTemplateName(temp.getTemplateName()); + item.setConfigValue(temp.getConfigValue()); + data.add(item); + if (logger.isDebugEnabled()) { + logger.debug("query conf item={}", item); + } + } + } + result.setList(data); + return result; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java index bf9755a307..df64521ad4 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java @@ -124,35 +124,35 @@ public void createTenant(TenantVo tenantVo) throws ConfigurationException { } private void dataProcessing(TenantVo tenantVo) throws ConfigurationException { + AtomicReference tenantResult = new AtomicReference<>(false); + // Obtain the tenant information of the ECM list + Map ecmListResult = null; + try { + ecmListResult = HttpsUtil.sendHttp(null, null); + logger.info("Request ecm list response {}:", ecmListResult); + } catch (IOException e) { + logger.warn("failed to get ecmResource data"); + } + Map>> data = MapUtils.getMap(ecmListResult, "data"); + List> emNodeVoList = data.get("EMs"); + // Compare ECM list tenant labels for task + emNodeVoList.forEach( + ecmInfo -> { + List> labels = (List>) ecmInfo.get("labels"); + labels.stream() + .filter(labelmap -> labelmap.containsKey("tenant")) + .forEach( + map -> { + String tenant = map.get("tenant").toString().toLowerCase(); + if (tenant.equals(tenantVo.getTenantValue().toLowerCase())) { + tenantResult.set(true); + } + }); + }); + // Compare the value of ecm tenant + if (!tenantResult.get()) + throw new ConfigurationException("The ECM with the corresponding label was not found"); if (!tenantVo.getCreator().equals("*")) { - AtomicReference tenantResult = new AtomicReference<>(false); - // Obtain the tenant information of the ECM list - Map ecmListResult = null; - try { - ecmListResult = HttpsUtil.sendHttp(null, null); - logger.info("Request ecm list response {}:", ecmListResult); - } catch (IOException e) { - logger.warn("failed to get ecmResource data"); - } - Map>> data = MapUtils.getMap(ecmListResult, "data"); - List> emNodeVoList = data.get("EMs"); - // Compare ECM list tenant labels for task - emNodeVoList.forEach( - ecmInfo -> { - List> labels = (List>) ecmInfo.get("labels"); - labels.stream() - .filter(labelmap -> labelmap.containsKey("tenant")) - .forEach( - map -> { - String tenant = map.get("tenant").toString().toLowerCase(); - if (tenant.equals(tenantVo.getTenantValue().toLowerCase())) { - tenantResult.set(true); - } - }); - }); - // Compare the value of ecm tenant - if (!tenantResult.get()) - throw new ConfigurationException("The ECM with the corresponding label was not found"); // The beginning of tenantValue needs to contain creator String creator = tenantVo.getCreator().toLowerCase(); String[] tenantArray = tenantVo.getTenantValue().toLowerCase().split("_"); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java index ed80f09a0a..2d3f9b2008 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java @@ -17,6 +17,16 @@ package org.apache.linkis.configuration.util; +import org.apache.linkis.server.BDPJettyServerHelper; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import static org.apache.linkis.configuration.conf.AcrossClusterRuleKeys.*; + public class CommonUtils { public static boolean ipCheck(String str) { if (str != null && !str.isEmpty()) { @@ -28,4 +38,32 @@ public static boolean ipCheck(String str) { } return false; } + + public static String ruleMap2String( + String startTime, + String endTime, + String CPUThreshold, + String MemoryThreshold, + String CPUPercentageThreshold, + String MemoryPercentageThreshold) + throws JsonProcessingException { + Map queueRuleMap = new HashMap<>(); + Map timeRuleMap = new HashMap<>(); + Map thresholdRuleMap = new HashMap<>(); + Map ruleMap = new HashMap<>(); + queueRuleMap.put(KEY_QUEUE_SUFFIX, KEY_ACROSS_CLUSTER_QUEUE_SUFFIX); + timeRuleMap.put(KEY_START_TIME, startTime); + timeRuleMap.put(KEY_END_TIME, endTime); + thresholdRuleMap.put(KEY_CPU_THRESHOLD, CPUThreshold); + thresholdRuleMap.put(KEY_MEMORY_THRESHOLD, MemoryThreshold); + thresholdRuleMap.put(KEY_CPU_PERCENTAGE_THRESHOLD, CPUPercentageThreshold); + thresholdRuleMap.put(KEY_MEMORY_PERCENTAGE_THRESHOLD, MemoryPercentageThreshold); + ruleMap.put(KEY_QUEUE_RULE, queueRuleMap); + ruleMap.put(KEY_TIME_RULE, timeRuleMap); + ruleMap.put(KEY_THRESHOLD_RULE, thresholdRuleMap); + ObjectMapper map2Json = BDPJettyServerHelper.jacksonJson(); + String rules = map2Json.writeValueAsString(ruleMap); + + return rules; + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml index cc92785262..78035f139d 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml @@ -47,7 +47,17 @@ + + @@ -57,6 +67,17 @@ #{labelKey}, #{stringValue}, #{feature}, #{labelValueSize}, now(), now()) + + + INSERT IGNORE INTO linkis_cg_manager_label( + label_key, label_value,label_feature, label_value_size, update_time, create_time) + VALUES + + #{item.labelKey}, #{item.stringValue}, #{item.feature}, #{item.labelValueSize}, now(), now() + + + + DELETE FROM linkis_cg_manager_label WHERE id IN diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/AcrossClusterRuleMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/AcrossClusterRuleMapper.xml new file mode 100644 index 0000000000..2d6c1898a9 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/AcrossClusterRuleMapper.xml @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + id,cluster_name,creator,username,create_time,create_by,update_time,update_by,rules,is_valid + + + + cluster_name,creator,username,create_time,create_by,update_time,update_by,rules,is_valid + + + + + + + + + INSERT INTO + linkis_ps_configutation_lm_across_cluster_rule () + VALUES + (#{acrossClusterRule.clusterName},#{acrossClusterRule.creator}, #{acrossClusterRule.username}, + + + #{acrossClusterRule.createTime} + + + now() + + , + #{acrossClusterRule.createBy}, + + + #{acrossClusterRule.updateTime} + + + now() + + , + #{acrossClusterRule.updateBy},#{acrossClusterRule.rules}, #{acrossClusterRule.isValid}) + + + + DELETE + FROM + `linkis_ps_configutation_lm_across_cluster_rule` + WHERE + creator = #{creator} AND username = #{username} + + + + UPDATE + `linkis_ps_configutation_lm_across_cluster_rule` + SET + cluster_name = #{acrossClusterRule.clusterName}, creator = #{acrossClusterRule.creator}, + username=#{acrossClusterRule.username}, create_time=#{acrossClusterRule.createTime}, + create_By=#{acrossClusterRule.createBy}, + + + update_time=#{acrossClusterRule.updateTime} + + + update_time = now() + + , + update_By=#{acrossClusterRule.updateBy}, rules=#{acrossClusterRule.rules}, + is_valid=#{acrossClusterRule.isValid} + WHERE + id = #{acrossClusterRule.id} + + + + UPDATE + `linkis_ps_configutation_lm_across_cluster_rule` + SET + is_valid = #{isValid} + WHERE + id = #{id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigKeyLimitForUserMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigKeyLimitForUserMapper.xml new file mode 100644 index 0000000000..74d1749105 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigKeyLimitForUserMapper.xml @@ -0,0 +1,145 @@ + + + + + + + + + + + + + + + + + + + + + id, user_name, combined_label_value, key_id, config_value, max_value, min_value, latest_update_template_uuid, is_valid, + create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_key_limit_for_user ( + id, user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + is_valid, create_by, create_time, update_by, + update_time) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, + #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.isValid,jdbcType=VARCHAR}, #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, + now()) + + + + + update linkis_ps_configuration_key_limit_for_user + set user_name = #{userName,jdbcType=VARCHAR}, + combined_label_value = #{combinedLabelValue,jdbcType=VARCHAR}, + key_id = #{keyId,jdbcType=BIGINT}, + config_value = #{configValue,jdbcType=VARCHAR}, + max_value = #{maxValue,jdbcType=VARCHAR}, + min_value = #{minValue,jdbcType=VARCHAR}, + latest_update_template_uuid = #{latestUpdateTemplateUuid,jdbcType=VARCHAR}, + is_valid = #{isValid,jdbcType=VARCHAR}, + create_by = #{createBy,jdbcType=VARCHAR}, + update_by = #{updateBy,jdbcType=VARCHAR}, + update_time = now() + where id = #{id,jdbcType=BIGINT} + + + + + + insert into linkis_ps_configuration_key_limit_for_user (user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + create_by, create_time, update_by, + update_time) + values + + ( + #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + config_value =values(config_value), + max_value =values(max_value), + latest_update_template_uuid =values(latest_update_template_uuid), + update_by =values(update_by), + update_time= now() + + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigMapper.xml index d282d1cb27..1fb4d179e6 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigMapper.xml +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigMapper.xml @@ -24,7 +24,6 @@ - @@ -32,8 +31,7 @@ - - + @@ -51,8 +49,9 @@ - - + + + @@ -78,9 +77,20 @@ + + + + + + + + + + + - `id`, `key`, `description`, `name`, `engine_conn_type`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName` + `id`, `key`, `description`, `name`, `engine_conn_type`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type` @@ -93,7 +103,7 @@ - k.id, k.key,COALESCE(k.en_description, k.description) as description, + k.id, k.`key`,COALESCE(k.en_description, k.description) as description, COALESCE(k.en_name, k.name) as name, k.engine_conn_type, k.default_value, k.validate_type, k.validate_range, k.is_hidden, k.is_advanced, k.level,COALESCE(k.en_treeName, k.treeName) as treeName, @@ -113,9 +123,19 @@ INSERT INTO linkis_ps_configuration_config_key ( - id, `key`, description, name, engine_conn_type, default_value, validate_type, validate_range, is_hidden, is_advanced, level, treeName) + `id`, `key`, `description`, + `name`, `engine_conn_type`, `default_value`, + `validate_type`, `validate_range`, `is_hidden`, + `is_advanced`, `level`, `treeName`, + `boundary_type`, `en_name`, `en_treeName`, + `en_description`) VALUES ( - #{id}, #{key}, #{description}, #{name}, #{engineType}, #{defaultValue}, #{validateType}, #{validateRange}, #{isHidden}, #{isAdvanced}, #{level}, #{treeName}) + #{id}, #{key}, #{description}, + #{name}, #{engineType}, #{defaultValue}, + #{validateType}, #{validateRange}, #{isHidden}, + #{isAdvanced}, #{level}, #{treeName}, + #{boundaryType}, #{enName}, #{enTreeName}, + #{enDescription}) + + + + + + + + + + + + + + SELECT * FROM linkis_ps_configuration_config_value WHERE id = #{id} @@ -257,4 +365,121 @@ WHERE id = #{categoryId} + + DELETE FROM linkis_ps_configuration_config_key + WHERE id = #{id} + + + + + + + + + + + + INSERT INTO linkis_ps_configuration_config_key ( + `key`, `description`, `name`, + `engine_conn_type`, `default_value`, `validate_type`, + `validate_range`, `is_hidden`, `is_advanced`, + `level`, `treeName`, `boundary_type`, + `en_name`, `en_treeName`, `en_description`, + `template_required` + ) + VALUES ( + #{key}, #{description}, #{name}, + #{engineType}, #{defaultValue}, #{validateType}, + #{validateRange}, #{isHidden}, #{isAdvanced}, + #{level}, #{treeName}, #{boundaryType}, + #{enName}, #{enTreeName}, #{enDescription}, + #{templateRequired} + ) + + + + UPDATE linkis_ps_configuration_config_key + + `key` = #{key}, + `name` = #{name}, + `description` = #{description}, + `engine_conn_type` = #{engineType}, + `default_value` = #{defaultValue}, + `validate_type` = #{validateType}, + `validate_range` = #{validateRange}, + `validate_range` = #{validateRange}, + `treeName` = #{treeName}, + `boundary_type` = #{boundaryType}, + `en_name` = #{enName}, + `en_treeName` = #{enTreeName}, + `en_description` = #{enDescription}, + `template_required` = #{templateRequired}, + + WHERE id = #{id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/TemplateConfigKeyMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/TemplateConfigKeyMapper.xml new file mode 100644 index 0000000000..50a11607c6 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/TemplateConfigKeyMapper.xml @@ -0,0 +1,161 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, template_name, template_uuid, key_id, config_value, max_value, min_value, validate_range, + is_valid, create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_template_config_key (id, template_name, template_uuid, + key_id, config_value, max_value, + min_value, validate_range, is_valid, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.validateRange,jdbcType=VARCHAR}, #{item.isValid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, now() + ) + + + + + + + delete + from linkis_ps_configuration_template_config_key + where template_uuid = #{templateUuid,jdbcType=VARCHAR} + and key_id in + + #{item} + + + + + insert into linkis_ps_configuration_template_config_key (template_name, template_uuid, + key_id, config_value, max_value, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + template_name =values(template_name), + config_value =values(config_value), + max_value =values(max_value), + update_by=values(update_by), + update_time= now() + + + + + + + + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/AcrossClusterRuleMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/AcrossClusterRuleMapper.xml new file mode 100644 index 0000000000..2245dbae35 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/AcrossClusterRuleMapper.xml @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + id,cluster_name,creator,username,create_time,create_by,update_time,update_by,rules,is_valid + + + + cluster_name,creator,username,create_time,create_by,update_time,update_by,rules,is_valid + + + + + + + + + INSERT INTO + linkis_ps_configutation_lm_across_cluster_rule () + VALUES + (#{acrossClusterRule.clusterName},#{acrossClusterRule.creator}, #{acrossClusterRule.username}, + + + #{acrossClusterRule.createTime} + + + now() + + , + #{acrossClusterRule.createBy}, + + + #{acrossClusterRule.updateTime} + + + now() + + , + #{acrossClusterRule.updateBy},#{acrossClusterRule.rules}, #{acrossClusterRule.isValid}) + + + + DELETE + FROM + "linkis_ps_configutation_lm_across_cluster_rule" + WHERE + creator = #{creator} AND username = #{username} + + + + UPDATE + "linkis_ps_configutation_lm_across_cluster_rule" + SET + cluster_name = #{acrossClusterRule.clusterName}, creator = #{acrossClusterRule.creator}, + username=#{acrossClusterRule.username}, create_time=#{acrossClusterRule.createTime}, + create_By=#{acrossClusterRule.createBy}, + + + update_time=#{acrossClusterRule.updateTime} + + + update_time = now() + + , + update_By=#{acrossClusterRule.updateBy}, rules=#{acrossClusterRule.rules}, + is_valid=#{acrossClusterRule.isValid} + WHERE + id = #{acrossClusterRule.id} + + + + UPDATE + "linkis_ps_configutation_lm_across_cluster_rule" + SET + is_valid = #{isValid} + WHERE + id = #{id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigKeyLimitForUserMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigKeyLimitForUserMapper.xml new file mode 100644 index 0000000000..74d1749105 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigKeyLimitForUserMapper.xml @@ -0,0 +1,145 @@ + + + + + + + + + + + + + + + + + + + + + id, user_name, combined_label_value, key_id, config_value, max_value, min_value, latest_update_template_uuid, is_valid, + create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_key_limit_for_user ( + id, user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + is_valid, create_by, create_time, update_by, + update_time) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, + #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.isValid,jdbcType=VARCHAR}, #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, + now()) + + + + + update linkis_ps_configuration_key_limit_for_user + set user_name = #{userName,jdbcType=VARCHAR}, + combined_label_value = #{combinedLabelValue,jdbcType=VARCHAR}, + key_id = #{keyId,jdbcType=BIGINT}, + config_value = #{configValue,jdbcType=VARCHAR}, + max_value = #{maxValue,jdbcType=VARCHAR}, + min_value = #{minValue,jdbcType=VARCHAR}, + latest_update_template_uuid = #{latestUpdateTemplateUuid,jdbcType=VARCHAR}, + is_valid = #{isValid,jdbcType=VARCHAR}, + create_by = #{createBy,jdbcType=VARCHAR}, + update_by = #{updateBy,jdbcType=VARCHAR}, + update_time = now() + where id = #{id,jdbcType=BIGINT} + + + + + + insert into linkis_ps_configuration_key_limit_for_user (user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + create_by, create_time, update_by, + update_time) + values + + ( + #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + config_value =values(config_value), + max_value =values(max_value), + latest_update_template_uuid =values(latest_update_template_uuid), + update_by =values(update_by), + update_time= now() + + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml index 45b51f969a..1a84794a05 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml @@ -1,4 +1,4 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + INSERT INTO linkis_ps_configuration_config_value( config_key_id, config_value, config_label_id, create_time, update_time) @@ -194,16 +273,40 @@ - - INSERT INTO linkis_ps_configuration_config_value( - config_key_id, config_value, config_label_id, create_time, update_time) + + REPLACE INTO linkis_ps_configuration_config_value( + id, config_key_id, config_value, config_label_id, create_time, update_time) VALUES ( - #{configKeyId},#{configValue}, #{configLabelId}, now(), now()) - on conflict(config_key_id, config_label_id) - do update set - config_value=EXCLUDED.config_value, - update_time=EXCLUDED.update_time, - create_time=EXCLUDED.create_time + #{id},#{configKeyId},#{configValue}, #{configLabelId}, now(), now()) + + + + INSERT INTO linkis_ps_configuration_config_value(config_key_id, config_value, config_label_id, create_time, update_time) + VALUES + + ( + #{item.configKeyId},#{item.configValue}, #{item.configLabelId}, + + + #{item.createTime}, + + + now(), + + + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + config_value =values(config_value), + update_time= now() + SELECT + "id", "key", "description", "name" , + "engine_conn_type", "default_value", "validate_type", "validate_range", + "is_hidden", "is_advanced", "level", "treeName", "boundary_type", "template_required" + FROM linkis_ps_configuration_config_key + + "engine_conn_type" = #{engineType} + and "key" like concat('%',#{key},'%') + + ORDER BY engine_conn_type + + + + + + + + + + INSERT INTO linkis_ps_configuration_config_key ( + "key", "description", "name", + "engine_conn_type", "default_value", "validate_type", + "validate_range", "is_hidden", "is_advanced", + "level", "treeName", "boundary_type", + "en_name", "en_treeName", "en_description", + "template_required" + ) + VALUES ( + #{key}, #{description}, #{name}, + #{engineType}, #{defaultValue}, #{validateType}, + #{validateRange}, #{isHidden}, #{isAdvanced}, + #{level}, #{treeName}, #{boundaryType}, + #{enName}, #{enTreeName}, #{enDescription}, + #{templateRequired} + ) + + + + UPDATE linkis_ps_configuration_config_key + + "key" = #{key}, + "name" = #{name}, + "description" = #{description}, + "engine_conn_type" = #{engineType}, + "default_value" = #{defaultValue}, + "validate_type" = #{validateType}, + "validate_range" = #{validateRange}, + "validate_range" = #{validateRange}, + "treeName" = #{treeName}, + "boundary_type" = #{boundaryType}, + "en_name" = #{enName}, + "en_treeName" = #{enTreeName}, + "en_description" = #{enDescription}, + "template_required" = #{templateRequired}, + + WHERE id = #{id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/TemplateConfigKeyMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/TemplateConfigKeyMapper.xml new file mode 100644 index 0000000000..16fd6c014d --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/TemplateConfigKeyMapper.xml @@ -0,0 +1,161 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, template_name, template_uuid, key_id, config_value, max_value, min_value, validate_range, + is_valid, create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_template_config_key (id, template_name, template_uuid, + key_id, config_value, max_value, + min_value, validate_range, is_valid, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.validateRange,jdbcType=VARCHAR}, #{item.isValid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, now() + ) + + + + + + + delete + from linkis_ps_configuration_template_config_key + where template_uuid = #{templateUuid,jdbcType=VARCHAR} + and key_id in + + #{item} + + + + + insert into linkis_ps_configuration_template_config_key (template_name, template_uuid, + key_id, config_value, max_value, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + template_name =values(template_name), + config_value =values(config_value), + max_value =values(max_value), + update_by=values(update_by), + update_time= now() + + + + + + + + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala index 1a0f714522..3f86697254 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala +++ b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala @@ -17,9 +17,9 @@ package org.apache.linkis.configuration.service -import org.apache.linkis.common.utils.Logging +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.configuration.conf.Configuration -import org.apache.linkis.configuration.dao.{ConfigMapper, LabelMapper} +import org.apache.linkis.configuration.dao.{ConfigKeyLimitForUserMapper, ConfigMapper, LabelMapper} import org.apache.linkis.configuration.entity._ import org.apache.linkis.configuration.exception.ConfigurationException import org.apache.linkis.configuration.util.{LabelEntityParser, LabelParameterParser} @@ -42,6 +42,7 @@ import org.springframework.stereotype.Service import org.springframework.transaction.annotation.Transactional import org.springframework.util.CollectionUtils +import java.text.MessageFormat import java.util import scala.collection.JavaConverters._ @@ -57,6 +58,8 @@ class ConfigurationService extends Logging { @Autowired private var validatorManager: ValidatorManager = _ + @Autowired private var configKeyLimitForUserMapper: ConfigKeyLimitForUserMapper = _ + private val combinedLabelBuilder: CombinedLabelBuilder = new CombinedLabelBuilder @Transactional @@ -93,12 +96,6 @@ class ConfigurationService extends Logging { } } - def insertCreator(creator: String): Unit = { - val creatorID: Long = configMapper.selectAppIDByAppName(creator) - if (creatorID > 0) configMapper.insertCreator(creator) - else logger.warn(s"creator${creator} exists") - } - def checkAndCreateUserLabel( settings: util.List[ConfigKeyValue], username: String, @@ -178,6 +175,33 @@ class ConfigurationService extends Logging { createList: util.List[ConfigValue], updateList: util.List[ConfigValue] ): Any = { + + val configLabel = labelMapper.getLabelById(setting.getConfigLabelId) + val combinedLabel = combinedLabelBuilder + .buildFromStringValue(configLabel.getLabelKey, configLabel.getStringValue) + .asInstanceOf[CombinedLabel] + val templateConfigKeyVo = + configKeyLimitForUserMapper.selectByLabelAndKeyId(combinedLabel.getStringValue, setting.getId) + if (templateConfigKeyVo != null && StringUtils.isNotBlank(templateConfigKeyVo.getMaxValue)) { + Utils.tryCatch { + val maxValue = Integer.valueOf(templateConfigKeyVo.getMaxValue.replaceAll("[^0-9]", "")) + val configValue = Integer.valueOf(setting.getConfigValue.replaceAll("[^0-9]", "")) + if (configValue > maxValue) { + throw new ConfigurationException( + s"Parameter key:${setting.getKey},config value:${setting.getConfigValue} verification failed,exceeds the specified max value:${templateConfigKeyVo.getMaxValue}:(参数校验失败,超过指定的最大值):" + + s"${setting.getValidateType}--${setting.getValidateRange}" + ) + } + } { case exception: Exception => + if (exception.isInstanceOf[ConfigurationException]) { + throw exception + } else { + logger.warn( + s"Failed to check special limit setting for key:${setting.getKey},config value:${setting.getConfigValue}" + ) + } + } + } paramCheck(setting) if (setting.getIsUserDefined) { val configValue = new ConfigValue @@ -259,6 +283,12 @@ class ConfigurationService extends Logging { combinedLabel.asInstanceOf[CombinedLabelImpl] } + /** + * Priority: configs > defaultConfigs + * @param configs + * @param defaultConfigs + * @return + */ def buildTreeResult( configs: util.List[ConfigKeyValue], defaultConfigs: util.List[ConfigKeyValue] = new util.ArrayList[ConfigKeyValue]() @@ -269,9 +299,8 @@ class ConfigurationService extends Logging { defaultConfig.setIsUserDefined(false) configs.asScala.foreach(config => { if (config.getKey != null && config.getKey.equals(defaultConfig.getKey)) { - if (StringUtils.isNotBlank(config.getConfigValue)) { - defaultConfig.setConfigValue(config.getConfigValue) - } + // configValue also needs to be replaced when the value is empty + defaultConfig.setConfigValue(config.getConfigValue) defaultConfig.setConfigLabelId(config.getConfigLabelId) defaultConfig.setValueId(config.getValueId) defaultConfig.setIsUserDefined(true) @@ -380,6 +409,35 @@ class ConfigurationService extends Logging { replaceCreatorToEngine(defaultCreatorConfigs, defaultEngineConfigs) } } + + // add special config limit info + if (defaultEngineConfigs.size() > 0) { + val keyIdList = defaultEngineConfigs.asScala.toStream + .map(e => { + e.getId + }) + .toList + .asJava + val limitList = + configKeyLimitForUserMapper.selectByLabelAndKeyIds(combinedLabel.getStringValue, keyIdList) + defaultEngineConfigs.asScala.foreach(entity => { + val keyId = entity.getId + val res = limitList.asScala.filter(v => v.getKeyId == keyId).toList.asJava + if (res.size() > 0) { + val specialMap = new util.HashMap[String, String]() + val maxValue = res.get(0).getMaxValue + if (StringUtils.isNotBlank(maxValue)) { + specialMap.put("maxValue", maxValue) + entity.setSpecialLimit(specialMap) + } + } + }) + } else { + logger.warn( + s"The configuration is empty. Please check the configuration information in the database table(配置为空,请检查数据库表中关于标签${combinedLabel.getStringValue}的配置信息是否完整)" + ) + } + (configs, defaultEngineConfigs) } diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java new file mode 100644 index 0000000000..a7ef5a1c14 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.ConfigKeyLimitForUser; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.List; +import java.util.UUID; + +import org.instancio.Instancio; +import org.instancio.Select; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class ConfigKeyLimitForUserMapperTest extends BaseDaoTest { + + @Autowired ConfigKeyLimitForUserMapper configKeyLimitForUserMapper; + + String uuid = UUID.randomUUID().toString(); + String name = "for-test"; + + private List initData() { + List list = + Instancio.ofList(ConfigKeyLimitForUser.class) + .generate(Select.field(ConfigKeyLimitForUser::getIsValid), gen -> gen.oneOf("Y", "N")) + .create(); + ConfigKeyLimitForUser configKeyLimitForUser = new ConfigKeyLimitForUser(); + configKeyLimitForUser.setUserName("testuser"); + configKeyLimitForUser.setCombinedLabelValue("IDE-hadoop,spark-2.3.3"); + configKeyLimitForUser.setKeyId(1L); + configKeyLimitForUser.setLatestUpdateTemplateUuid(uuid); + configKeyLimitForUser.setCreateBy("test"); + configKeyLimitForUser.setUpdateBy("test"); + list.add(configKeyLimitForUser); + configKeyLimitForUserMapper.batchInsertList(list); + return list; + } + + @Test + void batchInsertOrUpdateListTest() { + List list = initData(); + list.get(1).setLatestUpdateTemplateUuid("123456"); + int isOk = configKeyLimitForUserMapper.batchInsertOrUpdateList(list); + Assertions.assertTrue(isOk > 1); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java index 619bd2be2c..a2bea5fd68 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java @@ -17,23 +17,6 @@ package org.apache.linkis.configuration.dao; -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - import org.apache.linkis.configuration.entity.*; import org.springframework.beans.factory.annotation.Autowired; @@ -88,11 +71,6 @@ void testGetConfigKeyValueByLabelId() { assertEquals(7, configKeyValueList.size()); } - /** - * When using the h2 library for testing,if the function(on conflict) is not supported,an error - * will be reported, and the pg physical library will not guarantee an error pg使用h2库测试时不支持函数(on - * conflict)会报错,pg实体库不会报错 - */ @Test void testInsertValue() { ConfigValue result = insertConfigValue(); @@ -159,12 +137,6 @@ void testListKeyByStringValue() { // assertEquals(7, configKeyList.size()); } - @Test - void testInsertCreator() { - // mapper方法没有对应的实现类 - // configMapper.insertCreator("tom"); - } - @Test void testGetCategory() { List categoryLabelList = configMapper.getCategory(); @@ -208,6 +180,7 @@ void testUpdateCategory() { void testInsertKey() { ConfigKey configKey = new ConfigKey(); configKey.setKey("wds.linkis.rm.instance.max.max"); + configKey.setBoundaryType(3); configMapper.insertKey(configKey); ConfigKey result = configMapper.selectKeyByKeyID(8L); // assertEquals("wds.linkis.rm.instance.max.max", result.getKey()); diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/LabelMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/LabelMapperTest.java index 0046246818..4b7e69784e 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/LabelMapperTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/LabelMapperTest.java @@ -17,23 +17,6 @@ package org.apache.linkis.configuration.dao; -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - import org.apache.linkis.configuration.entity.ConfigLabel; import org.springframework.beans.factory.annotation.Autowired; diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java new file mode 100644 index 0000000000..64b12ba7f4 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.TemplateConfigKey; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import org.instancio.Instancio; +import org.instancio.Select; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +class TemplateConfigKeyMapperTest extends BaseDaoTest { + + @Autowired TemplateConfigKeyMapper templateConfigKeyMapper; + String uuid = UUID.randomUUID().toString(); + String name = "for-test"; + + private List initData() { + List list = + Instancio.ofList(TemplateConfigKey.class) + .size(9) + .generate(Select.field(TemplateConfigKey::getIsValid), gen -> gen.oneOf("Y", "N")) + .create(); + + TemplateConfigKey templateConfigKey = new TemplateConfigKey(); + templateConfigKey.setTemplateName(name); + templateConfigKey.setTemplateUuid(uuid); + templateConfigKey.setKeyId(1L); + templateConfigKey.setConfigValue("3"); + templateConfigKey.setMaxValue("8"); + templateConfigKey.setCreateBy("test"); + templateConfigKey.setUpdateBy("test"); + list.add(templateConfigKey); + templateConfigKeyMapper.batchInsertList(list); + return list; + } + + @Test + void selectListByTemplateUuid() { + initData(); + List res = templateConfigKeyMapper.selectListByTemplateUuid(uuid); + assertEquals(res.size(), 1); + assertEquals(res.get(0).getTemplateName(), name); + } + + @Test + void deleteByTemplateUuidAndKeyIdList() { + List list = initData(); + List KeyIdList = new ArrayList<>(); + KeyIdList.add(1L); + int num = templateConfigKeyMapper.deleteByTemplateUuidAndKeyIdList(uuid, KeyIdList); + assertEquals(num, 1); + } + + @Test + void batchInsertOrUpdateList() { + List list = initData(); + list.get(1).setConfigValue("20"); + int isOK = templateConfigKeyMapper.batchInsertOrUpdateList(list); + Assertions.assertTrue(isOK >= 1); + } + + @Test + void selectListByTemplateUuidList() { + List list = initData(); + List templateUuidList = new ArrayList<>(); + templateUuidList.add(uuid); + templateUuidList.add("123456"); + List res = + templateConfigKeyMapper.selectListByTemplateUuidList(templateUuidList); + Assertions.assertTrue(res.size() == 1); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserIpMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserIpMapperTest.java deleted file mode 100644 index ef466be542..0000000000 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserIpMapperTest.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.configuration.dao; - -import org.apache.linkis.configuration.entity.UserIpVo; - -import org.springframework.beans.factory.annotation.Autowired; - -import java.util.Date; -import java.util.List; - -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -class UserIpMapperTest extends BaseDaoTest { - - @Autowired UserIpMapper userIpMapper; - - UserIpVo insert() { - UserIpVo userIpVo = new UserIpVo(); - userIpVo.setUser("user"); - userIpVo.setBussinessUser("bussinessUser"); - userIpVo.setCreator("creator"); - userIpVo.setCreateTime(new Date()); - userIpVo.setUpdateTime(new Date()); - userIpVo.setDesc("desc"); - userIpVo.setIpList("ips"); - userIpMapper.createUserIP(userIpVo); - return userIpVo; - } - - @Test - void createUserIP() { - insert(); - UserIpVo userIpVo = userIpMapper.queryUserIP("user", "creator"); - assertTrue(userIpVo != null); - } - - @Test - void deleteUserIP() { - insert(); - UserIpVo userIpVo = userIpMapper.queryUserIP("user", "creator"); - userIpMapper.deleteUserIP(Integer.valueOf(userIpVo.getId())); - UserIpVo list = userIpMapper.queryUserIP("user", "creator"); - assertTrue(list == null); - } - - @Test - void updateUserIP() { - insert(); - UserIpVo userIpVo = userIpMapper.queryUserIP("user", "creator"); - UserIpVo updateUserIpVo = new UserIpVo(); - updateUserIpVo.setId(userIpVo.getId()); - updateUserIpVo.setDesc("desc2"); - updateUserIpVo.setBussinessUser("bussinessUser2"); - userIpMapper.updateUserIP(updateUserIpVo); - UserIpVo userIpVo1 = userIpMapper.queryUserIP("user", "creator"); - assertTrue(userIpVo1.getDesc().equals("desc2")); - assertTrue(userIpVo1.getBussinessUser().equals("bussinessUser2")); - } - - @Test - void queryUserIP() { - insert(); - UserIpVo userIpVo = userIpMapper.queryUserIP("user", "creator"); - assertTrue(userIpVo != null); - } - - @Test - void queryUserIPList() { - insert(); - List userIpVos = userIpMapper.queryUserIPList("user", "creator"); - assertTrue(userIpVos.size() > 0); - } -} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserTenantMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserTenantMapperTest.java deleted file mode 100644 index 788409f2ed..0000000000 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserTenantMapperTest.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.configuration.dao; - -import org.apache.linkis.configuration.entity.TenantVo; - -import org.springframework.beans.factory.annotation.Autowired; - -import java.util.Date; -import java.util.List; - -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -class UserTenantMapperTest extends BaseDaoTest { - - @Autowired UserTenantMapper userTenantMapper; - - TenantVo insert() { - TenantVo tenantVo = new TenantVo(); - tenantVo.setUser("user"); - tenantVo.setCreateTime(new Date()); - tenantVo.setCreator("creator"); - tenantVo.setTenantValue("tenantValue"); - tenantVo.setUpdateTime(new Date()); - tenantVo.setBussinessUser("bussinessUser"); - tenantVo.setDesc("desc"); - userTenantMapper.createTenant(tenantVo); - return tenantVo; - } - - @Test - void createTenant() { - insert(); - List tenantVos = userTenantMapper.queryTenantList("user", "creator", "tenantValue"); - assertTrue(tenantVos.size() > 0); - } - - @Test - void deleteTenant() { - insert(); - TenantVo tenantVo = userTenantMapper.queryTenant("user", "creator"); - userTenantMapper.deleteTenant(Integer.valueOf(tenantVo.getId())); - List tenantVos = userTenantMapper.queryTenantList("user", "creator", "tenantValue"); - assertTrue(tenantVos.size() == 0); - } - - @Test - void updateTenant() { - insert(); - TenantVo tenantVo = userTenantMapper.queryTenant("user", "creator"); - TenantVo updateTenantVo = new TenantVo(); - updateTenantVo.setId(tenantVo.getId()); - updateTenantVo.setDesc("desc2"); - updateTenantVo.setBussinessUser("bussinessUser2"); - userTenantMapper.updateTenant(updateTenantVo); - TenantVo queryTenant = userTenantMapper.queryTenant("user", "creator"); - assertTrue(queryTenant.getDesc().equals("desc2")); - assertTrue(queryTenant.getBussinessUser().equals("bussinessUser2")); - } - - @Test - void queryTenant() { - insert(); - TenantVo tenantVo = userTenantMapper.queryTenant("user", "creator"); - assertTrue(tenantVo != null); - } -} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/exception/ConfigurationExceptionTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/exception/ConfigurationExceptionTest.java index eae854ca13..9bfee23682 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/exception/ConfigurationExceptionTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/exception/ConfigurationExceptionTest.java @@ -17,21 +17,4 @@ package org.apache.linkis.configuration.exception; -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - public class ConfigurationExceptionTest {} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java index 0974743014..5170824281 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java @@ -57,7 +57,7 @@ public void TestAddKeyForEngine() throws Exception { paramsMap.add("engineType", "spark"); paramsMap.add("version", "3.2.1"); paramsMap.add("token", "e8724-e"); - paramsMap.add("keyJson", "{'engineType':'spark','version':'3.2.1'}"); + paramsMap.add("keyJson", "{'engineType':'spark','version':'3.2.1','boundaryType':'0'}"); String url = "/configuration/addKeyForEngine"; sendUrl(url, paramsMap, "get", null); } diff --git a/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties b/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties index 1b9ac21258..1dd49b9917 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties @@ -55,6 +55,5 @@ eureka.client.enabled=false eureka.client.serviceUrl.registerWithEureka=false mybatis-plus.mapper-locations=classpath*:mapper/common/*.xml,classpath*:mapper/mysql/*.xml -#mybatis-plus.mapper-locations=classpath*:mapper/common/*.xml,classpath*:mapper/postgresql/*.xml mybatis-plus.type-aliases-package=org.apache.linkis.configuration.entity mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql b/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql index fb22c7a114..4d3c587601 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql @@ -23,7 +23,7 @@ DROP TABLE IF EXISTS `linkis_cg_manager_label`; CREATE TABLE `linkis_cg_manager_label` ( `id` int(20) NOT NULL AUTO_INCREMENT, - `label_key` varchar(50) NOT NULL, + `label_key` varchar(32) NOT NULL, `label_value` varchar(255) NOT NULL, `label_feature` varchar(16) NOT NULL, `label_value_size` int(20) NOT NULL, @@ -48,6 +48,10 @@ CREATE TABLE `linkis_ps_configuration_config_key` `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', PRIMARY KEY (`id`) ); @@ -88,65 +92,42 @@ CREATE TABLE `linkis_ps_configuration_category` UNIQUE INDEX (`label_id`) ); -DROP TABLE IF EXISTS `linkis_cg_user_ip_config`; -CREATE TABLE `linkis_cg_user_ip_config` ( - `id` int(20) NOT NULL AUTO_INCREMENT, - `user` varchar(50) NOT NULL, - `creator` varchar(50) NOT NULL, - `ip_list` text NOT NULL, - `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - `desc` varchar(100) NOT NULL, - `bussiness_user` varchar(50) NOT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `uniq_user_creator_uic` (`user`,`creator`) -); -DROP TABLE IF EXISTS `linkis_cg_tenant_label_config`; -CREATE TABLE `linkis_cg_tenant_label_config` ( - `id` int(20) NOT NULL AUTO_INCREMENT, - `user` varchar(50) NOT NULL, - `creator` varchar(50) NOT NULL, - `tenant_value` varchar(128) NOT NULL, - `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - `desc` varchar(100) NOT NULL, - `bussiness_user` varchar(50) NOT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `uniq_user_creator_tlc` (`user`,`creator`) +DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; +CREATE TABLE `linkis_ps_configuration_template_config_key` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `template_name` varchar(200) NOT NULL COMMENT '配置模板名称 冗余存储', + `template_uuid` varchar(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', + `key_id` int(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` varchar(200) NULL DEFAULT NULL COMMENT '配置值', + `max_value` varchar(50) NULL DEFAULT NULL COMMENT '上限值', + `min_value` varchar(50) NULL DEFAULT NULL COMMENT '下限值(预留)', + `validate_range` varchar(50) NULL DEFAULT NULL COMMENT '校验正则(预留) ', + `is_valid` varchar(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', + `create_by` varchar(50) NOT NULL COMMENT '创建人', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` varchar(50) NULL DEFAULT NULL COMMENT '更新人', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_tid_kid` (`template_uuid`, `key_id`) ); -DELETE FROM linkis_cg_manager_label; - -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-*,*-*', 'OPTIONAL', 2, now(), now()); -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-IDE,*-*', 'OPTIONAL', 2, now(), now()); -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-Visualis,*-*', 'OPTIONAL', 2, now(), now()); -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now()); - -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'ide', 'None', NULL, '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源'); - -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (1,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (2,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (3,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (4,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (5,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (6,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (7,1); - -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (1,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (2,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (3,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (4,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (5,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (6,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (7,'1',1); -insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (1, 1); -insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (2, 1); -insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (3, 1); \ No newline at end of file +DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; +CREATE TABLE `linkis_ps_configuration_key_limit_for_user` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `user_name` varchar(50) NOT NULL COMMENT '用户名', + `combined_label_value` varchar(200) NOT NULL COMMENT '组合标签 combined_userCreator_engineType 如 hadoop-IDE,spark-2.4.3', + `key_id` int(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` varchar(200) NULL DEFAULT NULL COMMENT '配置值', + `max_value` varchar(50) NULL DEFAULT NULL COMMENT '上限值', + `min_value` varchar(50) NULL DEFAULT NULL COMMENT '下限值(预留)', + `latest_update_template_uuid` varchar(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', + `is_valid` varchar(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', + `create_by` varchar(50) NOT NULL COMMENT '创建人', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` varchar(50) NULL DEFAULT NULL COMMENT '更新人', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) +); \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql b/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql new file mode 100644 index 0000000000..4137dbbf16 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +DELETE FROM linkis_cg_manager_label; + +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-*,*-*', 'OPTIONAL', 2, now(), now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-IDE,*-*', 'OPTIONAL', 2, now(), now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-Visualis,*-*', 'OPTIONAL', 2, now(), now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now()); + +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'ide', 'None', NULL, '0', '0', '1', '队列资源',0); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源',3); + +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (1,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (2,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (3,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (4,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (5,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (6,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (7,1); + +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (1,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (2,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (3,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (4,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (5,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (6,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (7,'1',1); + +insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (1, 1); +insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (2, 1); +insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (3, 1); diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml index 8e44eefd8b..31bf7a38e5 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml @@ -44,6 +44,11 @@ linkis-pes-rpc-client ${project.version} + + org.apache.linkis + linkis-ps-common-lock + ${project.version} + org.apache.linkis diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/conf/ContextServerConf.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/conf/ContextServerConf.java index 4892d6b090..3cf7d67a01 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/conf/ContextServerConf.java +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/conf/ContextServerConf.java @@ -34,4 +34,7 @@ public class ContextServerConf { public static final long CS_SCHEDULER_JOB_WAIT_MILLS = CommonVars.apply("wds.linkis.cs.job.wait.mills", 10000).getValue(); + + public static final String CS_LABEL_SUFFIX = + CommonVars.apply("wds.linkis.cs.label.suffix", "").getValue(); } diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/label/CSInstanceLabelClient.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/label/CSInstanceLabelClient.java new file mode 100644 index 0000000000..7e3b671385 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/label/CSInstanceLabelClient.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.server.label; + +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.cs.server.conf.ContextServerConf; +import org.apache.linkis.instance.label.client.InstanceLabelClient; +import org.apache.linkis.manager.label.constant.LabelKeyConstant; +import org.apache.linkis.protocol.label.InsLabelRefreshRequest; +import org.apache.linkis.protocol.label.InsLabelRemoveRequest; +import org.apache.linkis.publicservice.common.lock.entity.CommonLock; +import org.apache.linkis.publicservice.common.lock.service.CommonLockService; +import org.apache.linkis.rpc.Sender; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.availability.AvailabilityChangeEvent; +import org.springframework.boot.availability.AvailabilityState; +import org.springframework.boot.availability.ReadinessState; +import org.springframework.context.event.ContextClosedEvent; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Component; + +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.springframework.boot.availability.ReadinessState.ACCEPTING_TRAFFIC; + +@Component +public class CSInstanceLabelClient { + private static final Logger logger = LoggerFactory.getLogger(CSInstanceLabelClient.class); + + @Autowired CommonLockService commonLockService; + + private String _LOCK = "_MASTER_PS_CS_LABEL_LOCK"; + CommonLock commonLock = new CommonLock(); + private boolean lock = false; + + @EventListener(classes = {AvailabilityChangeEvent.class}) + public void init(AvailabilityChangeEvent availabilityChangeEvent) { + AvailabilityState state = availabilityChangeEvent.getState(); + logger.info("CSInstanceLabelClient app state {}", state); + + if (state instanceof ReadinessState && state == ACCEPTING_TRAFFIC) { + Map labels = new HashMap<>(1); + commonLock.setLockObject(_LOCK); + commonLock.setCreateTime(new Date()); + commonLock.setUpdateTime(new Date()); + commonLock.setCreator(Utils.getJvmUser()); + commonLock.setLocker(Utils.getLocalHostname()); + commonLock.setUpdator(Utils.getJvmUser()); + lock = commonLockService.reentrantLock(commonLock, -1L); + String suffix = ContextServerConf.CS_LABEL_SUFFIX; + String confLabel; + + if (lock) { + // master node set cs_1_xxx label + logger.info("The master ps-cs node get lock by {}", _LOCK + "-" + commonLock.getLocker()); + confLabel = "cs_1_" + suffix; + } else { + confLabel = "cs_2_" + suffix; + } + logger.info("register label {} to ps-cs node.", confLabel); + labels.put(LabelKeyConstant.ROUTE_KEY, confLabel); + InsLabelRefreshRequest insLabelRefreshRequest = new InsLabelRefreshRequest(); + insLabelRefreshRequest.setLabels(labels); + insLabelRefreshRequest.setServiceInstance(Sender.getThisServiceInstance()); + InstanceLabelClient.getInstance().refreshLabelsToInstance(insLabelRefreshRequest); + } + } + + @EventListener(classes = {ContextClosedEvent.class}) + public void shutdown(ContextClosedEvent contextClosedEvent) { + logger.info("To remove labels for instance"); + InsLabelRemoveRequest insLabelRemoveRequest = new InsLabelRemoveRequest(); + insLabelRemoveRequest.setServiceInstance(Sender.getThisServiceInstance()); + InstanceLabelClient.getInstance().removeLabelsFromInstance(insLabelRemoveRequest); + logger.info("success to send clear label rpc request"); + if (lock) { + commonLockService.unlock(commonLock); + logger.info( + "The master ps-cs node has released lock {}.", + commonLock.getLockObject() + "-" + commonLock.getLocker()); + } + } +} diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/pom.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/pom.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/pom.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/src/main/assembly/distribution.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/assembly/distribution.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/src/main/assembly/distribution.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticConnection.java similarity index 96% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticConnection.java index 5f47452cd6..24aea200e5 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticConnection.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticConnection.java @@ -111,11 +111,11 @@ public List getTypes(String index) throws Exception { } public Map getProps(String index, String type) throws Exception { - Request request = new Request("GET", index + "/_mappings/" + type); + Request request = new Request("GET", index + "/_mappings"); Response response = restClient.performRequest(request); Map> result = Json.fromJson(response.getEntity().getContent(), Map.class); - Map mappings = (Map) result.get(index).get("mappings"); + Map mappings = (Map) result.get(index).get(DEFAULT_MAPPING_NAME); Map propsMap = mappings; if (mappings.containsKey(type)) { Object typeMap = mappings.get(type); @@ -124,10 +124,10 @@ public Map getProps(String index, String type) throws Exception } } Object props = propsMap.get(FIELD_PROPS); - if (props instanceof Map) { + if (null != props && props instanceof Map) { return (Map) props; } - return null; + return propsMap; } public void ping() throws IOException { diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticParamsMapper.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticParamsMapper.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticParamsMapper.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/ElasticParamsMapper.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/EsMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/EsMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/EsMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/elasticsearch/src/main/java/org/apache/linkis/metadata/query/service/EsMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/pom.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/pom.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/pom.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/assembly/distribution.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/assembly/distribution.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/assembly/distribution.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsParamsMapper.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsParamsMapper.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsParamsMapper.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/HdfsParamsMapper.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/conf/ConfigurationUtils.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/conf/ConfigurationUtils.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/conf/ConfigurationUtils.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hdfs/src/main/java/org/apache/linkis/metadata/query/service/conf/ConfigurationUtils.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/pom.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/pom.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/src/main/assembly/distribution.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/assembly/distribution.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/src/main/assembly/distribution.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveParamsMapper.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveParamsMapper.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveParamsMapper.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/hive/src/main/java/org/apache/linkis/metadata/query/service/HiveParamsMapper.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/pom.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/pom.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/pom.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/assembly/distribution.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/assembly/distribution.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/assembly/distribution.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/AbstractSqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/AbstractSqlConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/AbstractSqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/AbstractSqlConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/ClickhouseMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/ClickhouseMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/ClickhouseMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/ClickhouseMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/Db2MetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/Db2MetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/Db2MetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/Db2MetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/DmMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/DmMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/DmMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/DmMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/GreenplumMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/GreenplumMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/GreenplumMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/GreenplumMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/KingbaseMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/KingbaseMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/KingbaseMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/KingbaseMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/MysqlMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/MysqlMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/MysqlMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/MysqlMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/OracleMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/OracleMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/OracleMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/OracleMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/PostgresqlMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/PostgresqlMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/PostgresqlMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/PostgresqlMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/SqlserverMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/SqlserverMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/SqlserverMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/SqlserverMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/clickhouse/SqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/clickhouse/SqlConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/clickhouse/SqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/clickhouse/SqlConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/conf/SqlParamsMapper.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/conf/SqlParamsMapper.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/conf/SqlParamsMapper.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/conf/SqlParamsMapper.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/db2/SqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/db2/SqlConnection.java similarity index 90% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/db2/SqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/db2/SqlConnection.java index 3f61ac51f5..09201d58d8 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/db2/SqlConnection.java +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/db2/SqlConnection.java @@ -18,6 +18,7 @@ package org.apache.linkis.metadata.query.service.db2; import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.exception.LinkisSecurityException; import org.apache.linkis.metadata.query.service.AbstractSqlConnection; import org.apache.commons.collections.MapUtils; @@ -42,6 +43,10 @@ public class SqlConnection extends AbstractSqlConnection { private static final CommonVars SQL_CONNECT_URL = CommonVars.apply("wds.linkis.server.mdm.service.db2.url", "jdbc:db2://%s:%s/%s"); + /** clientRerouteServerListJNDIName */ + private static final CommonVars DB2_SENSITIVE_PARAMS = + CommonVars.apply("linkis.db2.sensitive.params", "clientRerouteServerListJNDIName"); + public SqlConnection( String host, Integer port, @@ -115,6 +120,9 @@ public Connection getDBConnection(ConnectMessage connectMessage, String database .collect(Collectors.joining("&")); url += "?" + extraParamString; } + if (url.toLowerCase().contains(DB2_SENSITIVE_PARAMS.getValue().toLowerCase())) { + throw new LinkisSecurityException(35000, "Invalid db2 connection params."); + } return DriverManager.getConnection(url, connectMessage.username, connectMessage.password); } diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/dm/SqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/dm/SqlConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/dm/SqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/dm/SqlConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/greenplum/SqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/greenplum/SqlConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/greenplum/SqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/greenplum/SqlConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/kingbase/SqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/kingbase/SqlConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/kingbase/SqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/kingbase/SqlConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/mysql/SqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/mysql/SqlConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/mysql/SqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/mysql/SqlConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/oracle/SqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/oracle/SqlConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/oracle/SqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/oracle/SqlConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/postgres/SqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/postgres/SqlConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/postgres/SqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/postgres/SqlConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/sqlserver/SqlConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/sqlserver/SqlConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/sqlserver/SqlConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/jdbc/src/main/java/org/apache/linkis/metadata/query/service/sqlserver/SqlConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/pom.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/pom.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/pom.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/src/main/assembly/distribution.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/assembly/distribution.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/src/main/assembly/distribution.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaParamsMapper.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaParamsMapper.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaParamsMapper.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/kafka/src/main/java/org/apache/linkis/metadata/query/service/KafkaParamsMapper.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/pom.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/pom.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/pom.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/src/main/assembly/distribution.xml b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/src/main/assembly/distribution.xml similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/src/main/assembly/distribution.xml rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/src/main/assembly/distribution.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbConnection.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbConnection.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbConnection.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbConnection.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbParamsMapper.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbParamsMapper.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbParamsMapper.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongoDbParamsMapper.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongodbMetaService.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongodbMetaService.java similarity index 100% rename from linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongodbMetaService.java rename to linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/service/mongodb/src/main/java/org/apache/linkis/metadata/query/service/MongodbMetaService.java diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml index 3f683a33f5..9b61302d77 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml @@ -26,8 +26,6 @@ linkis-metadata jar - linkis-metadata - org.apache.linkis diff --git a/linkis-public-enhancements/linkis-datasource/pom.xml b/linkis-public-enhancements/linkis-datasource/pom.xml index c7d700a137..b0eae4131a 100644 --- a/linkis-public-enhancements/linkis-datasource/pom.xml +++ b/linkis-public-enhancements/linkis-datasource/pom.xml @@ -28,11 +28,12 @@ pom linkis-datasource-manager/server - linkis-metadata-query/service/elasticsearch - linkis-metadata-query/service/hive - linkis-metadata-query/service/kafka - linkis-metadata-query/service/jdbc - linkis-metadata-query/service/hdfs - linkis-metadata-query/service/mongodb + linkis-datasource-manager/service/elasticsearch + linkis-datasource-manager/service/hive + linkis-datasource-manager/service/kafka + linkis-datasource-manager/service/jdbc + linkis-datasource-manager/service/hdfs + linkis-datasource-manager/service/mongodb + linkis-metadata diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java index 5b333cd6f6..db379f0853 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java @@ -37,19 +37,14 @@ import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RestController; +import org.springframework.cloud.client.discovery.DiscoveryClient; +import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletRequest; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Set; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; import com.fasterxml.jackson.databind.JsonNode; import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; @@ -74,6 +69,8 @@ public class InstanceRestful { @Autowired private DefaultInsLabelService insLabelService; + @Autowired private DiscoveryClient discoveryClient; + @ApiOperation( value = "listAllInstanceWithLabel", notes = "list all instance with label", @@ -172,4 +169,31 @@ public Message getServiceRegistryURL(HttpServletRequest request) throws Exceptio ModuleUserUtils.getOperationUser(request, "getServiceRegistryURL"); return Message.ok().data("url", serviceRegistryURL); } + + @ApiOperation(value = "getServiceInstances", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "serviceName", required = false, dataType = "String"), + @ApiImplicitParam(name = "ip", required = false, dataType = "ip") + }) + @RequestMapping(path = "/serviceInstances", method = RequestMethod.GET) + public Message getServiceInstance( + HttpServletRequest request, + @RequestParam(value = "serviceName", required = false) String serviceName, + @RequestParam(value = "ip", required = false) String ip) { + Stream serviceStream = discoveryClient.getServices().stream(); + serviceStream = serviceStream.filter(s -> s.toUpperCase().contains("LINKIS")); + if (StringUtils.isNotBlank(serviceName)) { + serviceStream = + serviceStream.filter(s -> s.toUpperCase().contains(serviceName.toUpperCase())); + } + List instanceList = + serviceStream + .flatMap(serviceId -> discoveryClient.getInstances(serviceId).stream()) + .collect(Collectors.toList()); + if (StringUtils.isNotBlank(ip)) { + instanceList = + instanceList.stream().filter(s -> s.getHost().equals(ip)).collect(Collectors.toList()); + } + return Message.ok().data("list", instanceList); + } } diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java index 5cc3bcc633..3b7aaf4c4b 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java @@ -140,12 +140,6 @@ public void refreshLabelsToInstance( List> labels, ServiceInstance serviceInstance) throws InstanceErrorException { List insLabels = toInsPersistenceLabels(labels); - // Label candidate to be removed - List labelsCandidateRemoved = - insLabelRelationDao.searchLabelsByInstance(serviceInstance.getInstance()); - if (!labelsCandidateRemoved.isEmpty()) { - labelsCandidateRemoved.removeAll(insLabels); - } LOG.info("Drop relationships related by instance: [" + serviceInstance.getInstance() + "]"); insLabelRelationDao.dropRelationsByInstance(serviceInstance.getInstance()); // Attach labels to instance diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql/InsLabelRelationMapper.xml b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql/InsLabelRelationMapper.xml index d3d0cfbe42..3263252d33 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql/InsLabelRelationMapper.xml +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql/InsLabelRelationMapper.xml @@ -197,7 +197,10 @@ \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml index 9f19cdd14b..6d984aa1d6 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml @@ -197,6 +197,9 @@ \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java index c83d730b8b..7ff5aeb32d 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java @@ -195,12 +195,7 @@ public void refreshUndoneTask() { Date sDate = DateUtils.addDays(eDate, -1); queryTasks = jobHistoryMapper.searchWithIdOrderAsc( - undoneTaskMinId, - null, - Arrays.asList("Running", "Inited", "Scheduled"), - sDate, - eDate, - null); + sDate, eDate, undoneTaskMinId, Arrays.asList("Running", "Inited", "Scheduled")); } finally { PageHelper.clearPage(); } diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java index 7bb7656346..300d00b989 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java @@ -23,6 +23,7 @@ import java.util.Date; import java.util.List; +import java.util.Map; public interface JobHistoryMapper { @@ -33,12 +34,10 @@ public interface JobHistoryMapper { void updateJobHistory(JobHistory jobReq); List searchWithIdOrderAsc( - @Param("id") Long id, - @Param("umUser") String username, - @Param("status") List status, @Param("startDate") Date startDate, @Param("endDate") Date endDate, - @Param("engineType") String engineType); + @Param("startId") Long startId, + @Param("status") List status); List search( @Param("id") Long id, @@ -108,4 +107,27 @@ Integer countUndoneTaskWithCreatorOnly( void updateJobHistoryCancelById( @Param("idList") List idList, @Param("errorDesc") String errorDesc); + + /** + * query wait for failover job + * + *

Sql example: SELECT a.* FROM linkis_ps_job_history_group_history a where (a.instances = '' + * or a.instances is null or a.instances not in ('192.168.1.123:9104','192.168.1.124:9104') or + * EXISTS ( select 1 from ( select '192.168.1.123:9104' as instances, 1697775054098 as + * registryTime union all select '192.168.1.124:9104' as instances, 1666239054098 as registryTime + * ) b where a.instances = b.instances and a.created_time < FROM_UNIXTIME(b.registryTime/1000) ) ) + * and status in ('Inited','Running','Scheduled','WaitForRetry') and a.created_time >= + * FROM_UNIXTIME(1666239054098/1000) limit 10 + * + * @param instancesMap + * @param statusList + * @param startTimestamp + * @param limit + * @return + */ + List selectFailoverJobHistory( + @Param("instancesMap") Map instancesMap, + @Param("statusList") List statusList, + @Param("startTimestamp") Long startTimestamp, + @Param("limit") Integer limit); } diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApi.java b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApi.java index f627fb6b15..a18da3a042 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApi.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApi.java @@ -25,6 +25,7 @@ import org.apache.linkis.jobhistory.conversions.TaskConversions; import org.apache.linkis.jobhistory.entity.*; import org.apache.linkis.jobhistory.service.JobHistoryQueryService; +import org.apache.linkis.jobhistory.transitional.TaskStatus; import org.apache.linkis.jobhistory.util.QueryUtils; import org.apache.linkis.protocol.constants.TaskConstant; import org.apache.linkis.server.Message; @@ -101,6 +102,10 @@ public Message getTaskByID(HttpServletRequest req, @PathVariable("id") Long jobI return Message.error( "The corresponding job was not found, or there may be no permission to view the job" + "(没有找到对应的job,也可能是没有查看该job的权限)"); + } else if (taskVO.getStatus().equals(TaskStatus.Running.toString())) { + // 任务运行时不显示异常信息(Do not display exception information during task runtime) + taskVO.setErrCode(null); + taskVO.setErrDesc(null); } return Message.ok().data(TaskConstant.TASK, taskVO); @@ -227,14 +232,30 @@ public Message list( /** Method list should not contain subjob, which may cause performance problems. */ @ApiOperation(value = "listundonetasks", notes = "list undone tasks", response = Message.class) @ApiImplicitParams({ - @ApiImplicitParam(name = "startDate", dataType = "long"), - @ApiImplicitParam(name = "endDate", required = false, dataType = "long", value = "end date"), + @ApiImplicitParam( + name = "startDate", + required = false, + dataType = "Long", + value = "start date"), + @ApiImplicitParam(name = "endDate", required = false, dataType = "Long", value = "end date"), @ApiImplicitParam(name = "status", required = false, dataType = "String", value = "status"), @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", value = "page now"), - @ApiImplicitParam(name = "pageSize", dataType = "Integer"), - @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator"), - @ApiImplicitParam(name = "engineType", dataType = "String"), - @ApiImplicitParam(name = "startTaskID", dataType = "long"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + value = "page size"), + @ApiImplicitParam( + name = "startTaskID", + required = false, + dataType = "Long", + value = "start task id"), + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engine type"), + @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator") }) @RequestMapping(path = "/listundonetasks", method = RequestMethod.GET) public Message listundonetasks( @@ -321,13 +342,29 @@ public Message listundonetasks( @ApiOperation(value = "listundone", notes = "list undone", response = Message.class) @ApiImplicitParams({ - @ApiImplicitParam(name = "startDate", dataType = "long"), - @ApiImplicitParam(name = "endDate", required = false, dataType = "long", value = "end date"), - @ApiImplicitParam(name = "status", required = false, dataType = "String", value = "status"), + @ApiImplicitParam( + name = "startDate", + required = false, + dataType = "Long", + value = "start date"), + @ApiImplicitParam(name = "endDate", required = false, dataType = "Long", value = "end date"), @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", value = "page now"), - @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator"), - @ApiImplicitParam(name = "engineType", dataType = "String"), - @ApiImplicitParam(name = "startTaskID", dataType = "long"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + value = "page size"), + @ApiImplicitParam( + name = "startTaskID", + required = false, + dataType = "Long", + value = "startTaskID"), + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator") }) /** Method list should not contain subjob, which may cause performance problems. */ @RequestMapping(path = "/listundone", method = RequestMethod.GET) @@ -350,17 +387,13 @@ public Message listundone( } if (StringUtils.isEmpty(creator)) { creator = null; - } else { - if (!QueryUtils.checkNameValid(creator)) { - return Message.error("Invalid creator : " + creator); - } + } else if (!QueryUtils.checkNameValid(creator)) { + return Message.error("Invalid creator : " + creator); } if (StringUtils.isEmpty(engineType)) { engineType = null; - } else { - if (!QueryUtils.checkNameValid(engineType)) { - return Message.error("Invalid engienType: " + engineType); - } + } else if (!QueryUtils.checkNameValid(engineType)) { + return Message.error("Invalid engienType: " + engineType); } Date sDate = new Date(startDate); Date eDate = new Date(endDate); diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/mysql/JobHistoryMapper.xml b/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/mysql/JobHistoryMapper.xml index 7a81b6c87a..a5b769f2d3 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/mysql/JobHistoryMapper.xml +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/mysql/JobHistoryMapper.xml @@ -98,13 +98,11 @@ + SELECT a.* FROM linkis_ps_job_history_group_history a + WHERE ( + a.instances = '' + OR a.instances IS NULL + OR a.instances NOT IN #{key} + OR EXISTS ( + SELECT 1 FROM + ( + + SELECT #{key} AS instances, #{val} AS registryTime + + ) b + WHERE a.instances = b.instances AND a.created_time FROM_UNIXTIME(b.registryTime/1000) + ) + ) + AND + status IN #{status} + AND a.created_time >= FROM_UNIXTIME(#{startTimestamp}/1000) + LIMIT #{limit} + + + diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/postgresql/JobHistoryMapper.xml b/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/postgresql/JobHistoryMapper.xml index 30e4e85b34..f7e75dea0e 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/postgresql/JobHistoryMapper.xml +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/postgresql/JobHistoryMapper.xml @@ -229,4 +229,26 @@ #{id} + diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/JobHistoryQueryService.java b/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/JobHistoryQueryService.java index 433cbe0474..b8731554d4 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/JobHistoryQueryService.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/JobHistoryQueryService.java @@ -38,6 +38,8 @@ public interface JobHistoryQueryService { JobRespProtocol query(JobReqQuery jobReqQuery); + JobRespProtocol queryFailoverJobs(RequestFailoverJob requestFailoverJob); + JobHistory getJobHistoryByIdAndName(Long jobID, String userName); List search(Long jobId, String username, String creator, String status, Date sDate, Date eDate, String engineType, Long startJobId, String instance); diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala b/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala index f00abc5568..5c49be89b7 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala @@ -113,7 +113,7 @@ class JobHistoryQueryServiceImpl extends JobHistoryQueryService with Logging { logger.info(s"${jobReq.getErrorDesc}") } } - if (jobReq.getStatus != null) { + if (jobReq.getUpdateOrderFlag && jobReq.getStatus != null) { val oldStatus: String = jobHistoryMapper.selectJobHistoryStatusForUpdate(jobReq.getId) if (oldStatus != null && !shouldUpdate(oldStatus, jobReq.getStatus)) { throw new QueryException( @@ -178,7 +178,7 @@ class JobHistoryQueryServiceImpl extends JobHistoryQueryService with Logging { logger.info(s"${jobReq.getErrorDesc}") } } - if (jobReq.getStatus != null) { + if (jobReq.getUpdateOrderFlag && jobReq.getStatus != null) { val oldStatus: String = jobHistoryMapper.selectJobHistoryStatusForUpdate(jobReq.getId) if (oldStatus != null && !shouldUpdate(oldStatus, jobReq.getStatus)) { throw new QueryException( @@ -247,6 +247,30 @@ class JobHistoryQueryServiceImpl extends JobHistoryQueryService with Logging { jobResp } + @Receiver + override def queryFailoverJobs(requestFailoverJob: RequestFailoverJob): JobRespProtocol = { + val reqMap = requestFailoverJob.reqMap + val statusList = requestFailoverJob.statusList + val startTimestamp = requestFailoverJob.startTimestamp + val limit = requestFailoverJob.limit + logger.info(s"query failover jobs, start timestamp:${startTimestamp}, limit:${limit}") + val jobResp = new JobRespProtocol + Utils.tryCatch { + val jobList = + jobHistoryMapper.selectFailoverJobHistory(reqMap, statusList, startTimestamp, limit) + val jobReqList = jobList.asScala.map(jobHistory2JobRequest).toList + val map = new util.HashMap[String, Object]() + map.put(JobRequestConstants.JOB_HISTORY_LIST, jobReqList) + jobResp.setStatus(0) + jobResp.setData(map) + } { case e: Exception => + logger.error(s"Failed to query failover job, instances ${reqMap.keySet()}", e) + jobResp.setStatus(1) + jobResp.setMsg(ExceptionUtils.getRootCauseMessage(e)) + } + jobResp + } + override def getJobHistoryByIdAndName(jobId: java.lang.Long, userName: String): JobHistory = { val jobReq = new JobHistory jobReq.setId(jobId) @@ -353,10 +377,9 @@ class JobHistoryQueryServiceImpl extends JobHistoryQueryService with Logging { startJobId: lang.Long ): Integer = { val cacheKey = - if (StringUtils.isNoneBlank(username, creator, engineType)) "" - else { + if (StringUtils.isNoneBlank(username, creator, engineType)) { s"${username}_${creator}_${engineType}" - } + } else "" if (StringUtils.isBlank(cacheKey)) { getCountUndoneTasks(username, creator, sDate, eDate, engineType, startJobId) } else { diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java index a2bded49f6..c26ea51c6d 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/java/org/apache/linkis/jobhistory/dao/JobHistoryMapperTest.java @@ -19,6 +19,8 @@ import org.apache.linkis.jobhistory.entity.JobHistory; +import org.apache.commons.lang3.time.DateUtils; + import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; @@ -98,8 +100,9 @@ public void searchWithIdOrderAscTest() { List status = new ArrayList<>(); status.add("Succeed"); - List histories = - jobHistoryMapper.searchWithIdOrderAsc(1L, "hadoop", status, null, null, "spark"); + Date eDate = new Date(System.currentTimeMillis()); + Date sDate = DateUtils.addDays(eDate, -1); + List histories = jobHistoryMapper.searchWithIdOrderAsc(sDate, eDate, 1L, status); Assertions.assertTrue(histories.size() > 0); } diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql index 8d7bd16fa1..14d86348c4 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql @@ -60,5 +60,5 @@ CREATE TABLE linkis_ps_job_history_group_history ( KEY submit_user (submit_user) ) ; -INSERT INTO linkis_ps_job_history_group_history (job_req_id,submit_user,execute_user,source,labels,params,progress,status,log_path,error_code,error_desc,created_time,updated_time,instances,metrics,engine_type,execution_code,result_location) VALUES - ('LINKISCLI_hadoop_spark_0','hadoop','hadoop','{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"}','{"userCreator":"hadoop-LINKISCLI","engineType":"spark-3.0.1","codeType":"sql","executeOnce":""}','{"configuration":{"startup":{},"runtime":{"hive.resultset.use.unique.column.names":true,"wds.linkis.resultSet.store.path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1","source":{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"},"job":{"resultsetIndex":0,"#rt_rs_store_path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1"}}},"variable":{}}','1.0','Succeed','hdfs:///tmp/linkis/log/2022-07-14/LINKISCLI/hadoop/1.log',0,'','2022-07-14 18:58:39.019000000','2022-07-14 18:59:51.589000000','127.0.0.1:9104','{"scheduleTime":"2022-07-14T18:58:40+0800","timeToOrchestrator":"2022-07-14T18:58:41+0800","submitTime":"2022-07-14T18:58:39+0800","yarnResource":{"application_1657595967414_0003":{"queueMemory":1073741824,"queueCores":1,"queueInstances":0,"jobStatus":"RUNNING","queue":"default"}},"completeTime":"2022-07-14T18:59:51+0800"}','spark','show databases;','hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1'); +INSERT INTO linkis_ps_job_history_group_history (job_req_id,submit_user,execute_user,source,labels,params,progress,status,log_path,error_code,error_desc,instances,metrics,engine_type,execution_code,result_location) VALUES + ('LINKISCLI_hadoop_spark_0','hadoop','hadoop','{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"}','{"userCreator":"hadoop-LINKISCLI","engineType":"spark-3.0.1","codeType":"sql","executeOnce":""}','{"configuration":{"startup":{},"runtime":{"hive.resultset.use.unique.column.names":true,"wds.linkis.resultSet.store.path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1","source":{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"},"job":{"resultsetIndex":0,"#rt_rs_store_path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1"}}},"variable":{}}','1.0','Succeed','hdfs:///tmp/linkis/log/2022-07-14/LINKISCLI/hadoop/1.log',0,'','127.0.0.1:9104','{"scheduleTime":"2022-07-14T18:58:40+0800","timeToOrchestrator":"2022-07-14T18:58:41+0800","submitTime":"2022-07-14T18:58:39+0800","yarnResource":{"application_1657595967414_0003":{"queueMemory":1073741824,"queueCores":1,"queueInstances":0,"jobStatus":"RUNNING","queue":"default"}},"completeTime":"2022-07-14T18:59:51+0800"}','spark','show databases;','hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1'); diff --git a/linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/UDFInfo.java b/linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/UDFInfo.java index 502047ac6a..19d1793608 100644 --- a/linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/UDFInfo.java +++ b/linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/UDFInfo.java @@ -33,6 +33,7 @@ public class UDFInfo { private String clusterName; private Boolean isLoad; + private String description; public UDFInfo() {}; @@ -61,6 +62,7 @@ public UDFInfo( this.createTime = createTime; this.updateTime = updateTime; this.isLoad = isLoad; + this.description = description; } public Long getId() { @@ -158,4 +160,12 @@ public String getClusterName() { public void setClusterName(String clusterName) { this.clusterName = clusterName; } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } } diff --git a/linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/UDFManager.java b/linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/UDFManager.java index 6859fdb91b..0591313ae8 100644 --- a/linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/UDFManager.java +++ b/linkis-public-enhancements/linkis-pes-common/src/main/java/org/apache/linkis/udf/entity/UDFManager.java @@ -17,9 +17,13 @@ package org.apache.linkis.udf.entity; +import java.util.Date; + public class UDFManager { private Integer id; private String userName; + private Date createTime; + private Date updateTime; public Integer getId() { return id; @@ -36,4 +40,20 @@ public String getUserName() { public void setUserName(String userName) { this.userName = userName; } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } } diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/conf/UdfTreeConf.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/conf/UdfTreeConf.java new file mode 100644 index 0000000000..ded1d7e434 --- /dev/null +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/conf/UdfTreeConf.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.basedatamanager.server.conf; + +import org.apache.linkis.common.conf.CommonVars; + +public class UdfTreeConf { + + public static final CommonVars UDF_FUN_SYSTEM_CATEGORY = + CommonVars.apply("linkis.udf.fun.system.category", "user_name,sys,expire,share,bdp"); +} diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/dao/UdfBaseInfoMapper.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/dao/UdfBaseInfoMapper.java new file mode 100644 index 0000000000..c2d585993b --- /dev/null +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/dao/UdfBaseInfoMapper.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.basedatamanager.server.dao; + +import org.apache.linkis.basedatamanager.server.domain.UdfBaseInfoEntity; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * @description Database operation Mapper for the linkis_PS_UDF_tree table + * @createDate 2022-08-13 15:13:27 @Entity + * org.apache.linkis.basedatamanager.server.domain.LinkisPsUdfTree + */ +public interface UdfBaseInfoMapper extends BaseMapper {} diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfBaseInfoEntity.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfBaseInfoEntity.java new file mode 100644 index 0000000000..0c8776ed26 --- /dev/null +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfBaseInfoEntity.java @@ -0,0 +1,205 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.basedatamanager.server.domain; + +import java.io.Serializable; +import java.util.Date; +import java.util.Objects; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +/** @TableName linkis_ps_udf_baseinfo */ +@TableName(value = "linkis_ps_udf_baseinfo") +@JsonIgnoreProperties(ignoreUnknown = true) +public class UdfBaseInfoEntity implements Serializable { + + @TableId(type = IdType.AUTO) + private Long id; + + private String createUser; + private String udfName; + private Integer udfType; + private Boolean isExpire; + private Boolean isShared; + private Long treeId; + private Date createTime; + private Date updateTime; + private String sys; + private String clusterName; + + @TableField(exist = false) + private static final long serialVersionUID = 1L; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getUdfName() { + return udfName; + } + + public void setUdfName(String udfName) { + this.udfName = udfName; + } + + public Integer getUdfType() { + return udfType; + } + + public void setUdfType(Integer udfType) { + this.udfType = udfType; + } + + public Boolean getExpire() { + return isExpire; + } + + public void setExpire(Boolean expire) { + isExpire = expire; + } + + public Boolean getShared() { + return isShared; + } + + public void setShared(Boolean shared) { + isShared = shared; + } + + public Long getTreeId() { + return treeId; + } + + public void setTreeId(Long treeId) { + this.treeId = treeId; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getSys() { + return sys; + } + + public void setSys(String sys) { + this.sys = sys; + } + + public String getClusterName() { + return clusterName; + } + + public void setClusterName(String clusterName) { + this.clusterName = clusterName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UdfBaseInfoEntity that = (UdfBaseInfoEntity) o; + return Objects.equals(id, that.id) + && Objects.equals(createUser, that.createUser) + && Objects.equals(udfName, that.udfName) + && Objects.equals(udfType, that.udfType) + && Objects.equals(isExpire, that.isExpire) + && Objects.equals(isShared, that.isShared) + && Objects.equals(treeId, that.treeId) + && Objects.equals(createTime, that.createTime) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(sys, that.sys) + && Objects.equals(clusterName, that.clusterName); + } + + @Override + public int hashCode() { + return Objects.hash( + id, + createUser, + udfName, + udfType, + isExpire, + isShared, + treeId, + createTime, + updateTime, + sys, + clusterName); + } + + @Override + public String toString() { + return "UdfBaseInfoEntity{" + + "id=" + + id + + ", createUser='" + + createUser + + '\'' + + ", udfName='" + + udfName + + '\'' + + ", udfType=" + + udfType + + ", isExpire=" + + isExpire + + ", isShared=" + + isShared + + ", treeId=" + + treeId + + ", createTime=" + + createTime + + ", updateTime=" + + updateTime + + ", sys='" + + sys + + '\'' + + ", clusterName='" + + clusterName + + '\'' + + '}'; + } +} diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfManagerEntity.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfManagerEntity.java index fa6db837e1..8c5abbeec1 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfManagerEntity.java +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfManagerEntity.java @@ -18,6 +18,7 @@ package org.apache.linkis.basedatamanager.server.domain; import java.io.Serializable; +import java.util.Date; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; @@ -36,6 +37,12 @@ public class UdfManagerEntity implements Serializable { /** */ private String userName; + /** */ + private Date createTime; + + /** */ + private Date updateTime; + @TableField(exist = false) private static final long serialVersionUID = 1L; @@ -59,6 +66,22 @@ public void setUserName(String userName) { this.userName = userName; } + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + @Override public boolean equals(Object that) { if (this == that) { diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfTreeEntity.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfTreeEntity.java index 9d2a51d471..32615f095f 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfTreeEntity.java +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfTreeEntity.java @@ -19,6 +19,7 @@ import java.io.Serializable; import java.util.Date; +import java.util.List; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; @@ -58,6 +59,9 @@ public class UdfTreeEntity implements Serializable { @TableField(exist = false) private static final long serialVersionUID = 1L; + @TableField(exist = false) + private List childrenList; + /** */ public Long getId() { return id; @@ -138,6 +142,14 @@ public void setCategory(String category) { this.category = category; } + public List getChildrenList() { + return childrenList; + } + + public void setChildrenList(List childrenList) { + this.childrenList = childrenList; + } + @Override public boolean equals(Object that) { if (this == that) { diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApi.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApi.java index 3c215a831b..1a24b36fd4 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApi.java +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApi.java @@ -89,8 +89,12 @@ public Message add(HttpServletRequest request, @RequestBody ErrorCodeEntity erro @ApiOperation(value = "remove", notes = "Remove an Error Code by id", httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser( - request, "Remove a Datasource Code Record,id:" + id.toString()); + String username = + ModuleUserUtils.getOperationUser( + request, "Try to remove error code record with id:" + id.toString()); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } boolean result = errorCodeService.removeById(id); return Message.ok("").data("result", result); } diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApi.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApi.java index 2b86e5bb43..7d5668c074 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApi.java +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApi.java @@ -127,7 +127,12 @@ public Message update(HttpServletRequest request, @RequestBody GatewayAuthTokenE httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser(request, "Remove a Gateway Auth Token Record,id:" + id); + String username = + ModuleUserUtils.getOperationUser( + request, "Try to remove gateway auto token record with id:" + id); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } boolean result = gatewayAuthTokenService.removeById(id); return Message.ok("").data("result", result); } diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApi.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApi.java index 5575eca20b..8b06b2e63f 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApi.java +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApi.java @@ -112,8 +112,13 @@ public Message add( httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser( - request, "Remove a Resource manager External Resource Provider Record,id:" + id.toString()); + String username = + ModuleUserUtils.getOperationUser( + request, + "Try to remove resource external resource provider record with id:" + id.toString()); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } boolean result = rmExternalResourceProviderService.removeById(id); return Message.ok("").data("result", result); } diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApi.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApi.java index f6e684f837..588b0b6a26 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApi.java +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApi.java @@ -32,6 +32,8 @@ import javax.servlet.http.HttpServletRequest; +import java.util.Date; + import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.github.pagehelper.PageInfo; import io.swagger.annotations.Api; @@ -89,10 +91,12 @@ public Message add(HttpServletRequest request, @RequestBody UdfManagerEntity udf new QueryWrapper<>(udfManagerEntity).eq("user_name", udfManagerEntity.getUserName()); UdfManagerEntity udfManager = udfManagerService.getOne(queryWrapper); if (udfManager == null) { + udfManagerEntity.setCreateTime(new Date()); + udfManagerEntity.setUpdateTime(new Date()); boolean result = udfManagerService.save(udfManagerEntity); return Message.ok("").data("result", result); } else { - return Message.error("The username already exists,Please add again!"); + return Message.error("The " + udfManager.getUserName() + " already exists,Please add again!"); } } @@ -103,7 +107,12 @@ public Message add(HttpServletRequest request, @RequestBody UdfManagerEntity udf httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser(request, "Remove a UDF Manager Record,id:" + id.toString()); + String username = + ModuleUserUtils.getOperationUser( + request, "Remove a UDF Manager Record,id:" + id.toString()); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } boolean result = udfManagerService.removeById(id); return Message.ok("").data("result", result); } @@ -121,7 +130,16 @@ public Message update( if (!Configuration.isAdmin(username)) { return Message.error("User '" + username + "' is not admin user[非管理员用户]"); } - boolean result = udfManagerService.updateById(udfManagerEntity); - return Message.ok("").data("result", result); + QueryWrapper queryWrapper = new QueryWrapper(); + queryWrapper.eq("user_name", udfManagerEntity.getUserName()); + UdfManagerEntity udfManager = udfManagerService.getOne(queryWrapper); + if (udfManager == null) { + udfManagerEntity.setUpdateTime(new Date()); + boolean result = udfManagerService.updateById(udfManagerEntity); + return Message.ok("").data("result", result); + } else { + return Message.error( + "The " + udfManager.getUserName() + " already exists,Please update again!"); + } } } diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApi.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApi.java index e5c9cbc442..7b8f434236 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApi.java +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApi.java @@ -17,12 +17,18 @@ package org.apache.linkis.basedatamanager.server.restful; +import org.apache.linkis.basedatamanager.server.domain.UdfBaseInfoEntity; import org.apache.linkis.basedatamanager.server.domain.UdfTreeEntity; +import org.apache.linkis.basedatamanager.server.service.UdfBaseInfoService; import org.apache.linkis.basedatamanager.server.service.UdfTreeService; +import org.apache.linkis.basedatamanager.server.utils.UdfTreeUtils; import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; @@ -32,8 +38,10 @@ import javax.servlet.http.HttpServletRequest; +import java.util.ArrayList; import java.util.List; +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.github.pagehelper.PageInfo; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; @@ -47,6 +55,8 @@ public class UdfTreeRestfulApi { @Autowired UdfTreeService udfTreeService; + @Autowired UdfBaseInfoService udfBaseinfoService; + @ApiImplicitParams({ @ApiImplicitParam(paramType = "query", dataType = "string", name = "searchName"), @ApiImplicitParam(paramType = "query", dataType = "int", name = "currentPage"), @@ -62,12 +72,26 @@ public Message list( return Message.ok("").data("list", pageList); } + @ApiImplicitParams({ + @ApiImplicitParam(paramType = "query", dataType = "string", name = "searchName"), + @ApiImplicitParam(paramType = "query", dataType = "string", name = "category") + }) @ApiOperation(value = "all", notes = "Query all data of UDF Tree", httpMethod = "GET") @RequestMapping(path = "/all", method = RequestMethod.GET) - public Message all(HttpServletRequest request, String searchName) { + public Message all(HttpServletRequest request, String searchName, String category) { ModuleUserUtils.getOperationUser( request, "Query all data of UDF Tree,search name:" + searchName); - List udfTreeEntityList = udfTreeService.list(); + List udfTreeEntityList = new ArrayList<>(); + if (StringUtils.isNotBlank(searchName) && StringUtils.isNotBlank(category)) { + UdfTreeEntity entity = new UdfTreeEntity(); + entity.setCategory(category); + entity.setUserName(searchName); + QueryWrapper queryWrapper = + new QueryWrapper<>(entity) + .eq("user_name", entity.getUserName()) + .eq("category", entity.getCategory()); + udfTreeEntityList = new UdfTreeUtils(udfTreeService.list(queryWrapper)).buildTree(); + } return Message.ok("").data("list", udfTreeEntityList); } @@ -100,9 +124,27 @@ public Message add(HttpServletRequest request, @RequestBody UdfTreeEntity udfTre @ApiOperation(value = "remove", notes = "Remove a UDF Tree Record by id", httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser(request, "Remove a UDF Tree Record,id:" + id.toString()); - boolean result = udfTreeService.removeById(id); - return Message.ok("").data("result", result); + String username = + ModuleUserUtils.getOperationUser(request, "Remove a UDF Tree Record,id:" + id.toString()); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } + UdfTreeEntity entity = udfTreeService.getById(id); + if (null != entity && entity.getParent() == -1) { + return Message.error("The root directory is forbidden to delete[\"根目录禁止删除\"]"); + } + QueryWrapper queryWrapper = + new QueryWrapper<>(new UdfTreeEntity()).eq("parent", id); + List folderList = udfTreeService.list(queryWrapper); + QueryWrapper udfQueryWrapper = + new QueryWrapper<>(new UdfBaseInfoEntity()).eq("tree_id", id); + List functoinList = udfBaseinfoService.list(udfQueryWrapper); + if (CollectionUtils.isEmpty(folderList) && CollectionUtils.isEmpty(functoinList)) { + boolean result = udfTreeService.removeById(id); + return Message.ok("").data("result", result); + } else { + return Message.error("Please delete the subdirectory first[请先删除子目录]"); + } } @ApiImplicitParams({ diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/service/UdfBaseInfoService.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/service/UdfBaseInfoService.java new file mode 100644 index 0000000000..032c818b43 --- /dev/null +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/service/UdfBaseInfoService.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.basedatamanager.server.service; + +import org.apache.linkis.basedatamanager.server.domain.UdfBaseInfoEntity; + +import com.baomidou.mybatisplus.extension.service.IService; + +/** + * @description Database operation Service for the [linkis_ps_udf_baseinfo] table + * @createDate 2022-08-13 15:13:27 + */ +public interface UdfBaseInfoService extends IService {} diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/service/impl/UdfBaseInfoServicelmpl.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/service/impl/UdfBaseInfoServicelmpl.java new file mode 100644 index 0000000000..c3c62854a1 --- /dev/null +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/service/impl/UdfBaseInfoServicelmpl.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.basedatamanager.server.service.impl; + +import org.apache.linkis.basedatamanager.server.dao.UdfBaseInfoMapper; +import org.apache.linkis.basedatamanager.server.domain.UdfBaseInfoEntity; +import org.apache.linkis.basedatamanager.server.service.UdfBaseInfoService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +@Service +public class UdfBaseInfoServicelmpl extends ServiceImpl + implements UdfBaseInfoService {} diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/utils/UdfTreeUtils.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/utils/UdfTreeUtils.java new file mode 100644 index 0000000000..f7f2b19fca --- /dev/null +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/basedatamanager/server/utils/UdfTreeUtils.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.basedatamanager.server.utils; + +import org.apache.linkis.basedatamanager.server.domain.UdfTreeEntity; + +import java.util.ArrayList; +import java.util.List; + +public class UdfTreeUtils { + + /** Build tree structure */ + public List udfTreeList = new ArrayList<>(); + + /** Construction method */ + public UdfTreeUtils(List udfTreeList) { + this.udfTreeList = udfTreeList; + } + + /** + * Obtain all root nodes (top-level nodes) that need to be built + * + * @return All Root Node List Collection + */ + public List getRootNode() { + // Save all root nodes (data for all root nodes) + List rootudfTreeList = new ArrayList<>(); + // UdfTreeEntity: Each piece of data (node) found in the query + for (UdfTreeEntity UdfTreeEntity : udfTreeList) { + // Determine whether the current node is a root node. Note here that if the parentId type is + // String, the equals() method should be used to determine. + if (-1 == UdfTreeEntity.getParent()) { + rootudfTreeList.add(UdfTreeEntity); + } + } + return rootudfTreeList; + } + + /** + * Build a tree structure according to each top-level node (root node) + * + * @return Build the entire tree + */ + public List buildTree() { + // UdfTreeEntities: Saves the complete tree structure constructed by a top-level node + List UdfTreeEntitys = new ArrayList(); + // GetRootNode(): Get all root nodes + for (UdfTreeEntity treeRootNode : getRootNode()) { + // Build subtrees from top-level nodes + treeRootNode = buildChildTree(treeRootNode); + // Complete the tree structure constructed by a top-level node and add it in + UdfTreeEntitys.add(treeRootNode); + } + return UdfTreeEntitys; + } + + /** + * Recursion ----- construct sub tree structure + * + * @param udfTreeEntity Root node (top-level node) + * @return Whole tree + */ + public UdfTreeEntity buildChildTree(UdfTreeEntity udfTreeEntity) { + List childTree = new ArrayList(); + // udfTreeList:All node sets (all data) + for (UdfTreeEntity UdfTreeEntity : udfTreeList) { + // Determine whether the parent node ID of the current node is equal to the ID of the root + // node, that is, if the current node is a child node under it + if (UdfTreeEntity.getParent().equals(udfTreeEntity.getId())) { + // Recursively judge the current node's situation and call its own method + childTree.add(buildChildTree(UdfTreeEntity)); + } + } + // Recursively judge the current node's situation and call its own method + udfTreeEntity.setChildrenList(childTree); + return udfTreeEntity; + } +} diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/filesystem/restful/api/BMLFsRestfulApi.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/filesystem/restful/api/BMLFsRestfulApi.java index 64d872f907..4aea974802 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/filesystem/restful/api/BMLFsRestfulApi.java +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/filesystem/restful/api/BMLFsRestfulApi.java @@ -21,6 +21,7 @@ import org.apache.linkis.filesystem.bml.BMLHelper; import org.apache.linkis.filesystem.exception.WorkSpaceException; import org.apache.linkis.filesystem.exception.WorkspaceExceptionManager; +import org.apache.linkis.server.BDPJettyServerHelper; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; import org.apache.linkis.storage.script.*; @@ -87,7 +88,8 @@ public Message openScriptFromBML( Pair> collect = fileSource.collect()[0]; Message message; try { - message = new Gson().fromJson(collect.getSecond().get(0)[0], Message.class); + message = + BDPJettyServerHelper.gson().fromJson(collect.getSecond().get(0)[0], Message.class); if (message == null) throw WorkspaceExceptionManager.createException(80019); } catch (Exception e) { return Message.ok() diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/filesystem/restful/api/FsRestfulApi.java b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/filesystem/restful/api/FsRestfulApi.java index ae7e19106c..189ab711fd 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/filesystem/restful/api/FsRestfulApi.java +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/main/java/org/apache/linkis/filesystem/restful/api/FsRestfulApi.java @@ -20,6 +20,7 @@ import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.common.io.FsPath; import org.apache.linkis.common.io.FsWriter; +import org.apache.linkis.common.utils.ResultSetUtils; import org.apache.linkis.filesystem.conf.WorkSpaceConfiguration; import org.apache.linkis.filesystem.entity.DirFileTree; import org.apache.linkis.filesystem.entity.LogLevel; @@ -42,6 +43,7 @@ import org.apache.linkis.storage.utils.StorageUtils; import org.apache.commons.io.IOUtils; +import org.apache.commons.io.input.BOMInputStream; import org.apache.commons.math3.util.Pair; import org.apache.http.Consts; @@ -88,16 +90,15 @@ public class FsRestfulApi { * @param userName * @return */ - private boolean checkIsUsersDirectory(String requestPath, String userName) { + private boolean checkIsUsersDirectory(String requestPath, String userName, Boolean withAdmin) { + // 配置文件默认关闭检查,withadmin默认true,特殊情况传false 开启权限检查( + // The configuration file defaults to disable checking, with admin defaulting to true, and in + // special cases, false is passed to enable permission checking) boolean ownerCheck = WorkSpaceConfiguration.FILESYSTEM_PATH_CHECK_OWNER.getValue(); - if (!ownerCheck) { + if (!ownerCheck && withAdmin) { LOGGER.debug("not check filesystem owner."); return true; } - if (requestPath.contains(WorkspaceUtil.suffixTuning(HDFS_USER_ROOT_PATH_PREFIX.getValue())) - || Configuration.isAdmin(userName)) { - return true; - } requestPath = requestPath.toLowerCase().trim() + "/"; String hdfsUserRootPathPrefix = WorkspaceUtil.suffixTuning(HDFS_USER_ROOT_PATH_PREFIX.getValue()); @@ -106,7 +107,11 @@ private boolean checkIsUsersDirectory(String requestPath, String userName) { String workspacePath = hdfsUserRootPathPrefix + userName + hdfsUserRootPathSuffix; String enginconnPath = localUserRootPath + userName; - if (Configuration.isJobHistoryAdmin(userName)) { + // 管理员修改其他用户文件目录时,会导致用户无法使用文件,故此优化管理员不能修改(When administrators modify the file directory of other + // users, + // it will cause users to be unable to use the file, so the optimization administrator cannot + // modify it) + if (withAdmin && Configuration.isJobHistoryAdmin(userName)) { workspacePath = hdfsUserRootPathPrefix; enginconnPath = localUserRootPath; } @@ -117,6 +122,10 @@ private boolean checkIsUsersDirectory(String requestPath, String userName) { return (requestPath.contains(workspacePath)) || (requestPath.contains(enginconnPath)); } + private boolean checkIsUsersDirectory(String requestPath, String userName) { + return checkIsUsersDirectory(requestPath, userName, true); + } + @ApiOperation(value = "getUserRootPath", notes = "get user root path", response = Message.class) @ApiImplicitParams({ @ApiImplicitParam(name = "pathType", required = false, dataType = "String", value = "path type") @@ -233,7 +242,7 @@ public Message rename(HttpServletRequest req, @RequestBody JsonNode json) PathValidator$.MODULE$.validate(oldDest, userName); PathValidator$.MODULE$.validate(newDest, userName); } - if (!checkIsUsersDirectory(newDest, userName)) { + if (!checkIsUsersDirectory(newDest, userName, false)) { throw WorkspaceExceptionManager.createException(80010, userName, newDest); } if (StringUtils.isEmpty(oldDest)) { @@ -561,6 +570,7 @@ public Message openFile( @RequestParam(value = "page", defaultValue = "1") Integer page, @RequestParam(value = "pageSize", defaultValue = "5000") Integer pageSize, @RequestParam(value = "charset", defaultValue = "utf-8") String charset, + @RequestParam(value = "nullValue", defaultValue = "") String nullValue, @RequestParam(value = "limitBytes", defaultValue = "0") Long limitBytes, @RequestParam(value = "limitColumnLength", defaultValue = "0") Integer limitColumnLength) throws IOException, WorkSpaceException { @@ -582,7 +592,13 @@ public Message openFile( FileSource fileSource = null; try { fileSource = FileSource.create(fsPath, fileSystem); + if (nullValue != null && BLANK.equalsIgnoreCase(nullValue)) { + nullValue = ""; + } if (FileSource.isResultSet(fsPath.getPath())) { + if (!StringUtils.isEmpty(nullValue)) { + fileSource.addParams("nullValue", nullValue); + } fileSource = fileSource.page(page, pageSize); } if (limitBytes > 0) { @@ -853,7 +869,12 @@ public void resultsetsToExcel( if (fsPathListWithError == null) { throw WorkspaceExceptionManager.createException(80029); } - FsPath[] fsPaths = fsPathListWithError.getFsPaths().toArray(new FsPath[] {}); + + List fsPathList = fsPathListWithError.getFsPaths(); + // sort asc by _num.dolphin of num + ResultSetUtils.sortByNameNum(fsPathList); + FsPath[] fsPaths = fsPathList.toArray(new FsPath[] {}); + boolean isLimitDownloadSize = RESULT_SET_DOWNLOAD_IS_LIMIT.getValue(); Integer excelDownloadSize = RESULT_SET_DOWNLOAD_MAX_SIZE_EXCEL.getValue(); if (limit > 0) { @@ -867,7 +888,9 @@ public void resultsetsToExcel( response.setCharacterEncoding(StandardCharsets.UTF_8.name()); outputStream = response.getOutputStream(); // 前台传""会自动转为null - if (nullValue != null && BLANK.equalsIgnoreCase(nullValue)) nullValue = ""; + if (nullValue != null && BLANK.equalsIgnoreCase(nullValue)) { + nullValue = ""; + } fileSource = FileSource.create(fsPaths, fileSystem).addParams("nullValue", nullValue); if (!FileSource.isTableResultSet(fileSource)) { throw WorkspaceExceptionManager.createException(80024); @@ -952,7 +975,10 @@ public Message formate( res.put("sheetName", info.get(0)); } else { String[][] column = null; - BufferedReader reader = new BufferedReader(new InputStreamReader(in, encoding)); + // fix csv file with utf-8 with bom chart[] + BOMInputStream bomIn = new BOMInputStream(in, false); // don't include the BOM + BufferedReader reader = new BufferedReader(new InputStreamReader(bomIn, encoding)); + String header = reader.readLine(); if (StringUtils.isEmpty(header)) { throw WorkspaceExceptionManager.createException(80016); diff --git a/linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/basedata_manager_create.sql b/linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/basedata_manager_create.sql index fd297f9d1c..3dd9dcbca0 100644 --- a/linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/basedata_manager_create.sql +++ b/linkis-public-enhancements/linkis-pes-publicservice/src/test/resources/basedata_manager_create.sql @@ -128,12 +128,13 @@ CREATE TABLE `linkis_cg_rm_external_resource_provider` DROP TABLE IF EXISTS `linkis_ps_udf_manager`; -CREATE TABLE `linkis_ps_udf_manager` -( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `user_name` varchar(20) DEFAULT NULL, - PRIMARY KEY (`id`) -); +CREATE TABLE `linkis_ps_udf_manager` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_tree`; CREATE TABLE `linkis_ps_udf_tree` @@ -233,6 +234,22 @@ CREATE TABLE `linkis_cg_engine_conn_plugin_bml_resources` PRIMARY KEY (`id`) ); +DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; +CREATE TABLE `linkis_ps_udf_baseinfo` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `create_user` varchar(50) NOT NULL, + `udf_name` varchar(255) NOT NULL, + `udf_type` int(11) DEFAULT '0', + `tree_id` bigint(20) NOT NULL, + `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `sys` varchar(255) NOT NULL DEFAULT 'ide' COMMENT 'source system', + `cluster_name` varchar(255) NOT NULL, + `is_expire` bit(1) DEFAULT NULL, + `is_shared` bit(1) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + DELETE FROM linkis_ps_datasource_access; INSERT INTO `linkis_ps_datasource_access` (`id`, `table_id`, `visitor`, `fields`, `application_id`, `access_time`) VALUES (1, 1, 'test', 'test', 1, '2022-12-20 22:54:36'); diff --git a/linkis-public-enhancements/linkis-pes-rpc-client/src/main/scala/org/apache/linkis/instance/label/client/InstanceLabelClient.scala b/linkis-public-enhancements/linkis-pes-rpc-client/src/main/scala/org/apache/linkis/instance/label/client/InstanceLabelClient.scala index efaf43818c..677898352a 100644 --- a/linkis-public-enhancements/linkis-pes-rpc-client/src/main/scala/org/apache/linkis/instance/label/client/InstanceLabelClient.scala +++ b/linkis-public-enhancements/linkis-pes-rpc-client/src/main/scala/org/apache/linkis/instance/label/client/InstanceLabelClient.scala @@ -62,41 +62,36 @@ class InstanceLabelClient extends Logging { def getLabelFromInstance(serviceInstance: ServiceInstance): util.List[Label[_]] = { val request = new InsLabelQueryRequest(serviceInstance) - Utils.tryAndError { - getSender().ask(request) match { + Utils.tryAndWarn { + val respObj = getSender().ask(request) + respObj match { case resp: InsLabelQueryResponse => val labelList = new util.ArrayList[Label[_]]() resp.getLabelList.asScala.foreach(pair => labelList.add(labelBuilderFactory.createLabel[Label[_]](pair.getKey, pair.getValue)) ) labelList - case o => - logger.error(s"Invalid response ${BDPJettyServerHelper.gson - .toJson(o)} from request : ${BDPJettyServerHelper.gson.toJson(request)}") + case _ => + logger.warn(s"Invalid resp :$respObj from request : $request") new util.ArrayList[Label[_]] } } } def getInstanceFromLabel(labels: util.List[Label[_]]): util.List[ServiceInstance] = { - Utils.tryAndError { + Utils.tryAndWarn { val request = new LabelInsQueryRequest() val labelMap = LabelUtils.labelsToMap(labels) request.setLabels(labelMap.asInstanceOf[util.HashMap[String, Object]]) - Sender.getSender(PUBLIC_SERVICE_APPLICATION_NAME.getValue).ask(request) match { + val respObj = getSender().ask(request) + respObj match { case resp: LabelInsQueryResponse => if (null == resp.getInsList || resp.getInsList.isEmpty) { return new util.ArrayList[ServiceInstance]() } - if (resp.getInsList.size() != 1) { - logger.warn( - s"Instance num ${resp.getInsList.size()} with labels ${BDPJettyServerHelper.gson.toJson(labelMap)} is not single one." - ) - } resp.getInsList - case o => - logger.error(s"Invalid resp : ${JsonUtils.jackson - .writeValueAsString(o)} from request : ${BDPJettyServerHelper.gson.toJson(request)}") + case _ => + logger.warn(s"Invalid resp :$respObj from request : $request") new util.ArrayList[ServiceInstance]() } } diff --git a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/CommonLockSpringConfiguration.java b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/CommonLockSpringConfiguration.java index 07ec34dbb8..92f11730bc 100644 --- a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/CommonLockSpringConfiguration.java +++ b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/CommonLockSpringConfiguration.java @@ -30,7 +30,7 @@ public class CommonLockSpringConfiguration { @Bean @ConditionalOnMissingBean - public CommonLockService getDefaultLockManagerPersistence(CommonLockMapper commonLockMapper) { + public CommonLockService getCommonLockService(CommonLockMapper commonLockMapper) { DefaultCommonLockService defaultCommonLockService = new DefaultCommonLockService(); defaultCommonLockService.setLockManagerMapper(commonLockMapper); return defaultCommonLockService; diff --git a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapper.java b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapper.java index 6fb435ab62..162341efd4 100644 --- a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapper.java +++ b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapper.java @@ -26,9 +26,11 @@ @Mapper public interface CommonLockMapper { - void lock(@Param("jsonObject") String jsonObject, @Param("timeOut") Long timeOut); + void lock(@Param("commonLock") CommonLock commonLock, @Param("timeOut") Long timeOut); - void unlock(@Param("jsonObject") String jsonObject); + void unlock(@Param("commonLock") CommonLock commonLock); List getAll(); + + CommonLock getLockByLocker(@Param("lockObject") String lockObject, @Param("locker") String host); } diff --git a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/entity/CommonLock.java b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/entity/CommonLock.java index 919c3ecc0f..49db1a710e 100644 --- a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/entity/CommonLock.java +++ b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/entity/CommonLock.java @@ -28,6 +28,7 @@ public class CommonLock { private Date createTime; private String updator; private String creator; + private String locker; public Integer getId() { return id; @@ -84,4 +85,12 @@ public String getCreator() { public void setCreator(String creator) { this.creator = creator; } + + public String getLocker() { + return locker; + } + + public void setLocker(String locker) { + this.locker = locker; + } } diff --git a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/service/CommonLockService.java b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/service/CommonLockService.java index caa78aa066..c93b4f6652 100644 --- a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/service/CommonLockService.java +++ b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/service/CommonLockService.java @@ -24,6 +24,8 @@ public interface CommonLockService { Boolean lock(CommonLock commonLock, Long timeOut); + Boolean reentrantLock(CommonLock commonLock, Long timeOut); + void unlock(CommonLock commonLock); List getAll(); diff --git a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/service/impl/DefaultCommonLockService.java b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/service/impl/DefaultCommonLockService.java index 6ea3ce7217..1fc6a293cc 100644 --- a/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/service/impl/DefaultCommonLockService.java +++ b/linkis-public-enhancements/linkis-ps-common-lock/src/main/java/org/apache/linkis/publicservice/common/lock/service/impl/DefaultCommonLockService.java @@ -57,9 +57,29 @@ public Boolean lock(CommonLock commonLock, Long timeOut) { return isLocked; } + @Override + public Boolean reentrantLock(CommonLock commonLock, Long timeOut) { + CommonLock oldLock = + commonLockMapper.getLockByLocker(commonLock.getLockObject(), commonLock.getLocker()); + if (oldLock != null) { + return true; + } + long startTime = System.currentTimeMillis(); + Boolean isLocked = tryLock(commonLock, timeOut); + while (!isLocked && System.currentTimeMillis() - startTime < timeOut) { + try { + Thread.sleep(1000); + isLocked = tryLock(commonLock, timeOut); + } catch (InterruptedException e) { + logger.warn("lock waiting interrupted", e); + } + } + return isLocked; + } + private boolean tryLock(CommonLock commonLock, Long timeOut) { try { - commonLockMapper.lock(commonLock.getLockObject(), timeOut); + commonLockMapper.lock(commonLock, timeOut); return true; } catch (DataAccessException e) { logger.warn("Failed to obtain lock:" + commonLock.getLockObject()); @@ -69,7 +89,7 @@ private boolean tryLock(CommonLock commonLock, Long timeOut) { @Override public void unlock(CommonLock commonLock) { - commonLockMapper.unlock(commonLock.getLockObject()); + commonLockMapper.unlock(commonLock); } @Override diff --git a/linkis-public-enhancements/linkis-ps-common-lock/src/main/resources/mapper/common/CommonLockMapper.xml b/linkis-public-enhancements/linkis-ps-common-lock/src/main/resources/mapper/common/CommonLockMapper.xml index 46bfdef960..a21818fc14 100644 --- a/linkis-public-enhancements/linkis-ps-common-lock/src/main/resources/mapper/common/CommonLockMapper.xml +++ b/linkis-public-enhancements/linkis-ps-common-lock/src/main/resources/mapper/common/CommonLockMapper.xml @@ -21,17 +21,18 @@ - INSERT INTO linkis_ps_common_lock (lock_object, time_out, update_time, create_time) - VALUES (#{jsonObject}, #{timeOut}, now(), now()) + insert into linkis_ps_common_lock (lock_object, locker, time_out, update_time, create_time) + values(#{commonLock.lockObject}, #{commonLock.locker}, #{timeOut}, now(), now()) - DELETE FROM linkis_ps_common_lock - WHERE lock_object = #{jsonObject} + delete from linkis_ps_common_lock where lock_object = #{commonLock.lockObject} and locker = #{commonLock.locker} + diff --git a/linkis-public-enhancements/linkis-ps-common-lock/src/test/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapperTest.java b/linkis-public-enhancements/linkis-ps-common-lock/src/test/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapperTest.java index 860ea76a7f..bcae6889b8 100644 --- a/linkis-public-enhancements/linkis-ps-common-lock/src/test/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapperTest.java +++ b/linkis-public-enhancements/linkis-ps-common-lock/src/test/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapperTest.java @@ -38,11 +38,45 @@ public void getAllTest() { Assertions.assertTrue(locks.size() == 1); } + public Boolean reentrantLock(CommonLock commonLock) { + CommonLock oldLock = + commonLockMapper.getLockByLocker(commonLock.getLockObject(), commonLock.getLocker()); + if (oldLock != null) { + return true; + } + + try { + commonLockMapper.lock(commonLock, -1L); + } catch (Exception e) { + return false; + } + return true; + } + + @Test + @DisplayName("reentrantLockTest") + public void reentrantLockTest() { + String lockObject = "hadoop-warehouse4"; + CommonLock commonLock = new CommonLock(); + commonLock.setLockObject(lockObject); + commonLock.setLocker("test"); + Boolean lock = reentrantLock(commonLock); + Assertions.assertTrue(lock); + lock = reentrantLock(commonLock); + Assertions.assertTrue(lock); + commonLock.setLocker("test1"); + lock = reentrantLock(commonLock); + Assertions.assertFalse(lock); + } + @Test @DisplayName("unlockTest") public void unlockTest() { String lockObject = "hadoop-warehouse"; - commonLockMapper.unlock(lockObject); + CommonLock commonLock = new CommonLock(); + commonLock.setLockObject(lockObject); + commonLock.setLocker("test"); + commonLockMapper.unlock(commonLock); List locks = commonLockMapper.getAll(); Assertions.assertTrue(locks.size() == 0); @@ -53,7 +87,14 @@ public void unlockTest() { public void lockTest() { String lockObject = "hadoop-warehouse2"; Long timeOut = 10000L; - commonLockMapper.lock(lockObject, timeOut); + CommonLock commonLock = new CommonLock(); + commonLock.setLockObject(lockObject); + + Assertions.assertThrows( + RuntimeException.class, () -> commonLockMapper.lock(commonLock, timeOut)); + + commonLock.setLocker("test"); + commonLockMapper.lock(commonLock, timeOut); List locks = commonLockMapper.getAll(); Assertions.assertTrue(locks.size() == 2); } diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFServiceImpl.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFServiceImpl.java index 9d6144ba07..ac3b90b128 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFServiceImpl.java +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFServiceImpl.java @@ -113,6 +113,8 @@ public long addUDF(UDFAddVo udfVo, String userName) throws Exception { // 锁同一用户 CommonLock commonLock = new CommonLock(); commonLock.setLockObject(userName + _LOCK); + commonLock.setCreator(userName); + commonLock.setLocker(Utils.getLocalHostname()); commonLock.setCreateTime(new Date()); commonLock.setUpdateTime(new Date()); try { @@ -322,6 +324,8 @@ public void updateUDF(UDFUpdateVo udfUpdateVo, String userName) throws Exception // udfInfo.setPath(StringUtils.replace(udfInfo.getPath(), "file://", "")); CommonLock persistenceLock = new CommonLock(); persistenceLock.setLockObject(userName + _LOCK); + persistenceLock.setCreator(userName); + persistenceLock.setLocker(Utils.getLocalHostname()); persistenceLock.setCreateTime(new Date()); persistenceLock.setUpdateTime(new Date()); try { diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml index ae27759e52..f04e5db482 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml @@ -31,6 +31,7 @@ + @@ -102,7 +103,7 @@ - INSERT INTO linkis_ps_udf_user_load (`udf_id`,`user_name`) VALUES (#{arg0},#{arg1}) + INSERT INTO linkis_ps_udf_user_load (`udf_id`,`user_name`,`create_time`,`update_time`) VALUES (#{arg0},#{arg1},now(),now()) SELECT - - FROM linkis_ps_udf_baseinfo - WHERE create_user in + info.id,info.`create_user`,info.`udf_name`,info.`udf_type`,info.`is_expire`,info.`is_shared`,info.`tree_id`,info.`create_time`,info.`update_time`, + info.`sys`,info.`cluster_name`,udf_version.description + FROM + linkis_ps_udf_baseinfo info , + (SELECT + udf_version.* + FROM + linkis_ps_udf_version udf_version , ( + SELECT + udf_id , MAX(bml_resource_version) AS bml_resource_version + FROM + linkis_ps_udf_version + GROUP BY + udf_id) version_tmp + WHERE + version_tmp.udf_id = udf_version.udf_id + AND version_tmp.bml_resource_version = udf_version.bml_resource_version) udf_version + WHERE + info.id = udf_version.udf_id + AND info.create_user in #{item} @@ -124,10 +142,29 @@ @@ -118,9 +118,26 @@ SELECT - - from linkis_ps_udf_baseinfo - where id in - (select udf_id from linkis_ps_udf_shared_info where user_name=#{userName}) + info.id,info."create_user",info."udf_name",info."udf_type",info."is_expire",info."is_shared",info."tree_id",info."create_time",info."update_time", + info."sys",info."cluster_name", version_tmp.description + FROM + linkis_ps_udf_baseinfo info , + ( + SELECT + version_info.* + FROM + linkis_ps_udf_version version_info , ( + SELECT + udf_id , MAX(bml_resource_version) AS bml_resource_version + FROM + linkis_ps_udf_version + GROUP BY + udf_id + ) version_max + WHERE + version_max.udf_id = version_info.udf_id + AND version_max.bml_resource_version = version_info.bml_resource_version + ) version_tmp + WHERE + info.id = version_tmp.udf_id + AND info.id in (SELECT udf_id FROM linkis_ps_udf_shared_info WHERE user_name = #{userName}) - update linkis_ps_udf_user_load set user_name=#{newUser} where udf_id=#{udfId} and user_name=#{oldUser} + update linkis_ps_udf_user_load set user_name=#{newUser}, update_time =now() where udf_id=#{udfId} and user_name=#{oldUser} - update linkis_ps_udf_version set is_published=#{isPublished} where udf_id=#{udfId} and + update linkis_ps_udf_version set is_published=#{isPublished},update_time =now() where udf_id=#{udfId} and bml_resource_version=#{version} diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql index f865efd51b..6262d1e86e 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql @@ -20,11 +20,13 @@ SET REFERENTIAL_INTEGRITY FALSE; DROP TABLE IF EXISTS linkis_ps_udf_user_load CASCADE; CREATE TABLE IF NOT EXISTS linkis_ps_udf_user_load ( - id bigint(20) NOT NULL AUTO_INCREMENT, - udf_id bigint(20) NOT NULL, - user_name varchar(50) NOT NULL, - PRIMARY KEY (id) -) ; + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `udf_id` bigint(20) NOT NULL, + `user_name` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS linkis_ps_udf_baseinfo CASCADE; CREATE TABLE IF NOT EXISTS linkis_ps_udf_baseinfo ( @@ -67,6 +69,7 @@ CREATE TABLE IF NOT EXISTS linkis_ps_udf_version ( use_format varchar(255) DEFAULT NULL, description varchar(255) NOT NULL COMMENT 'version desc', create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + update_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, md5 varchar(100) DEFAULT NULL, PRIMARY KEY (id) ) ; @@ -82,10 +85,12 @@ CREATE TABLE IF NOT EXISTS linkis_ps_udf_shared_info ( DROP TABLE IF EXISTS linkis_ps_udf_manager CASCADE; CREATE TABLE IF NOT EXISTS linkis_ps_udf_manager ( - id bigint(20) NOT NULL AUTO_INCREMENT, - user_name varchar(20) DEFAULT NULL, - PRIMARY KEY (id) -) ; + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DELETE FROM linkis_ps_udf_user_load; -- ---------------------------- diff --git a/linkis-public-enhancements/pom.xml b/linkis-public-enhancements/pom.xml index 760ea05698..7b9385ddb4 100644 --- a/linkis-public-enhancements/pom.xml +++ b/linkis-public-enhancements/pom.xml @@ -34,7 +34,6 @@ linkis-pes-publicservice linkis-bml linkis-context-service - linkis-datasource/linkis-metadata linkis-datasource linkis-udf/linkis-udf-service linkis-jobhistory diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala index f7558fcad6..9ab75c74ce 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala @@ -108,4 +108,14 @@ object GatewayConfiguration { val IS_DOWNLOAD = CommonVars("linkis.web.result.set.export.enable", true) + val LINKIS_CLUSTER_NAME = CommonVars("linkis.cluster.name", "") + + val ACCESS_CONTROL_ENABLED = CommonVars("linkis.client.access.control.enable", false) + + val ACCESS_CONTROL_URL = CommonVars("linkis.client.access.control.url", "") + + val ACCESS_CONTROL_IP = CommonVars("linkis.client.access.control.ip", "") + + val ACCESS_CONTROL_USER_ENABLED = CommonVars("linkis.client.access.control.user.enable", false) + } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/SecurityFilter.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/SecurityFilter.scala index 2eb458beb2..150ae565ef 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/SecurityFilter.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/SecurityFilter.scala @@ -17,7 +17,7 @@ package org.apache.linkis.gateway.security -import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.conf.{CommonVars, Configuration} import org.apache.linkis.common.exception.LinkisException import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.gateway.config.GatewayConfiguration @@ -43,6 +43,7 @@ object SecurityFilter extends Logging { private val refererValidate = ServerConfiguration.BDP_SERVER_SECURITY_REFERER_VALIDATE.getValue private val referers = ServerConfiguration.BDP_SERVER_ADDRESS.getValue protected val testUser: String = ServerConfiguration.BDP_TEST_USER.getValue + private val ACCESS_CONTROL_USER_PREFIX = "linkis.client.access.control.user." private val ipSet = new util.HashSet[String]() @@ -104,11 +105,12 @@ object SecurityFilter extends Logging { val isPassAuthRequest = GatewayConfiguration.PASS_AUTH_REQUEST_URI.exists(r => !r.equals("") && gatewayContext.getRequest.getRequestURI.startsWith(r) ) - if ( - gatewayContext.getRequest.getRequestURI.startsWith( - ServerConfiguration.BDP_SERVER_USER_URI.getValue - ) - ) { + + val isUserRestful = gatewayContext.getRequest.getRequestURI.startsWith( + ServerConfiguration.BDP_SERVER_USER_URI.getValue + ) + + if (isUserRestful) { Utils.tryCatch(userRestful.doUserRequest(gatewayContext)) { t => val message = t match { case dwc: LinkisException => dwc.getMessage @@ -120,10 +122,9 @@ object SecurityFilter extends Logging { Message.error(message).<<(gatewayContext.getRequest.getRequestURI) ) } - false + return false } else if (isPassAuthRequest && !GatewayConfiguration.ENABLE_SSO_LOGIN.getValue) { logger.info("No login needed for proxy uri: " + gatewayContext.getRequest.getRequestURI) - true } else if (TokenAuthentication.isTokenRequest(gatewayContext)) { TokenAuthentication.tokenAuth(gatewayContext) } else { @@ -142,22 +143,20 @@ object SecurityFilter extends Logging { throw t } if (userName.isDefined) { - true + logger.info(s"User $userName has logged in.") } else if (Configuration.IS_TEST_MODE.getValue) { logger.info("test mode! login for uri: " + gatewayContext.getRequest.getRequestURI) GatewaySSOUtils.setLoginUser(gatewayContext, testUser) - true } else if (GatewayConfiguration.ENABLE_SSO_LOGIN.getValue) { val user = SSOInterceptor.getSSOInterceptor.getUser(gatewayContext) if (StringUtils.isNotBlank(user)) { GatewaySSOUtils.setLoginUser(gatewayContext.getRequest, user) - true } else if (isPassAuthRequest) { gatewayContext.getResponse.redirectTo( SSOInterceptor.getSSOInterceptor.redirectTo(gatewayContext.getRequest.getURI) ) gatewayContext.getResponse.sendResponse() - false + return false } else { filterResponse( gatewayContext, @@ -169,7 +168,7 @@ object SecurityFilter extends Logging { SSOInterceptor.getSSOInterceptor.redirectTo(gatewayContext.getRequest.getURI) ) << gatewayContext.getRequest.getRequestURI ) - false + return false } } else if ( gatewayContext.getRequest.getRequestURI.matches( @@ -179,7 +178,6 @@ object SecurityFilter extends Logging { logger.info( "Not logged in, still let it pass (GATEWAY_NO_AUTH_URL): " + gatewayContext.getRequest.getRequestURI ) - true } else { filterResponse( gatewayContext, @@ -187,9 +185,56 @@ object SecurityFilter extends Logging { "You are not logged in, please login first(您尚未登录,请先登录)!" ) << gatewayContext.getRequest.getRequestURI ) - false + return false + } + } + + // 访问控制, 先判断当前用户是否可以在当前IP执行,再判断当前IP是否有权限调用当前接口 + // Access control + // first determine whether the current user can perform operations from the current IP address, + // and then determine whether the current IP address has permission to call the current interface. + if ( + GatewayConfiguration.ACCESS_CONTROL_USER_ENABLED.getValue && !isPassAuthRequest && !isUserRestful + ) { + val userName = GatewaySSOUtils.getLoginUsername(gatewayContext) + val userIps = + CommonVars.apply(ACCESS_CONTROL_USER_PREFIX + userName, "").getValue + val host = + gatewayContext.getRequest.getRemoteAddress.getAddress.toString.replaceAll("/", "") + if (StringUtils.isNotEmpty(userIps)) { + if (!userIps.contains(host)) { + val message = + Message.error( + s"Unauthorized access! User $userName is prohibited from accessing from the current IP $host. (未授权的访问!用户${userName}禁止在当前IP${host}访问。)" + ) + filterResponse(gatewayContext, message) + return false + } + } + } + if ( + GatewayConfiguration.ACCESS_CONTROL_ENABLED.getValue && !isPassAuthRequest && !isUserRestful + ) { + if ( + StringUtils.isNotEmpty(GatewayConfiguration.ACCESS_CONTROL_IP.getValue) && StringUtils + .isNotEmpty(GatewayConfiguration.ACCESS_CONTROL_URL.getValue) + ) { + val host = + gatewayContext.getRequest.getRemoteAddress.getAddress.toString.replaceAll("/", "") + if (GatewayConfiguration.ACCESS_CONTROL_IP.getValue.contains(host)) { + val requestUrl = gatewayContext.getRequest.getRequestURI + if (!GatewayConfiguration.ACCESS_CONTROL_URL.getValue.contains(requestUrl)) { + val message = + Message.error( + s"Unauthorized access! IP $host is prohibited from accessing this URL. (未授权的访问!当前IP${host}禁止访问此URL。)" + ) + filterResponse(gatewayContext, message) + return false + } + } } } + true } private var userRestful: UserRestful = _ diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala index b0f42f9ad0..40b0630706 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala @@ -73,7 +73,7 @@ abstract class AbstractUserRestful extends UserRestful with Logging { Utils.tryCatch { val loginUser = GatewaySSOUtils.getLoginUsername(gatewayContext) Message - .ok(loginUser + "Already logged in, please log out before signing in(已经登录,请先退出再进行登录)!") + .ok(loginUser + " already logged in, please log out before signing in(已经登录,请先退出再进行登录)!") .data("userName", loginUser) }(_ => login(gatewayContext)) case "token-login" => @@ -146,6 +146,8 @@ abstract class AbstractUserRestful extends UserRestful with Logging { Message .ok("get baseinfo success(获取成功)!") .data("resultSetExportEnable", GatewayConfiguration.IS_DOWNLOAD.getValue) + .data("linkisClusterName", GatewayConfiguration.LINKIS_CLUSTER_NAME.getValue) + } def publicKey(gatewayContext: GatewayContext): Message = { diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml index b089c4bfe9..344fce259c 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml @@ -81,6 +81,13 @@ ${project.version} + + + org.apache.linkis + linkis-jobhistory + ${project.version} + + com.fasterxml.jackson.core jackson-databind diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/EntranceRequestGatewayParser.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/EntranceRequestGatewayParser.scala index 2ee0f4b023..04f206d6f6 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/EntranceRequestGatewayParser.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/ujes/parser/EntranceRequestGatewayParser.scala @@ -22,12 +22,25 @@ import org.apache.linkis.gateway.config.GatewayConfiguration import org.apache.linkis.gateway.http.GatewayContext import org.apache.linkis.gateway.parser.AbstractGatewayParser import org.apache.linkis.gateway.ujes.parser.EntranceExecutionGatewayParser._ +import org.apache.linkis.jobhistory.service.JobHistoryQueryService +import org.apache.linkis.protocol.engine.JobInstance import org.apache.linkis.protocol.utils.ZuulEntranceUtils +import org.apache.linkis.rpc.interceptor.ServiceInstanceUtils +import org.apache.linkis.server.BDPJettyServerHelper +import org.apache.linkis.server.conf.ServerConfiguration + +import org.apache.commons.lang3.StringUtils import org.springframework.stereotype.Component +import javax.annotation.Resource + @Component class EntranceRequestGatewayParser extends AbstractGatewayParser { + + @Resource + private var jobHistoryQueryService: JobHistoryQueryService = _ + override def shouldContainRequestBody(gatewayContext: GatewayContext): Boolean = false override def parse(gatewayContext: GatewayContext): Unit = @@ -50,13 +63,57 @@ class EntranceRequestGatewayParser extends AbstractGatewayParser { } else { ServiceInstance(GatewayConfiguration.ENTRANCE_SPRING_NAME.getValue, null) } - } else { + } else if (execId.startsWith(ZuulEntranceUtils.EXEC_ID)) { + // parse by execId ZuulEntranceUtils.parseServiceInstanceByExecID(execId)(0) + } else { + // build JobInstance by taskId + val jobInstance = buildJobInstance(execId.toLong, gatewayContext) + if (jobInstance == null) return + val str = BDPJettyServerHelper.gson.toJson(jobInstance) + gatewayContext.getRequest.addHeader( + ServerConfiguration.LINKIS_SERVER_ENTRANCE_HEADER_KEY.getValue, + Array(str) + ) + + ServiceInstance(GatewayConfiguration.ENTRANCE_SPRING_NAME.getValue, jobInstance.instances) } gatewayContext.getGatewayRoute.setServiceInstance(serviceInstance) case _ => } + def buildJobInstance(taskId: Long, gatewayContext: GatewayContext): JobInstance = { + val histories = + jobHistoryQueryService.search(taskId, null, null, null, null, null, null, null, null) + if (histories.isEmpty) { + sendErrorResponse(s"taskId $taskId is not exists.", gatewayContext) + return null + } + val history = histories.get(0) + if (StringUtils.isEmpty(history.getInstances)) { + return JobInstance( + history.getStatus, + null, + history.getJobReqId, + history.getCreatedTime.getTime, + Long.MaxValue + ) + } + val activeInstances = ServiceInstanceUtils.getRPCServerLoader.getServiceInstances( + GatewayConfiguration.ENTRANCE_SPRING_NAME.getValue + ) + val instance = activeInstances + .find(_.getInstance.equals(history.getInstances)) + .getOrElse(ServiceInstance(null, null, Long.MaxValue)) + JobInstance( + history.getStatus, + instance.getInstance, + history.getJobReqId, + history.getCreatedTime.getTime, + instance.getRegistryTimestamp + ) + } + } object EntranceRequestGatewayParser { diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/http/SpringCloudGatewayHttpRequest.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/http/SpringCloudGatewayHttpRequest.scala index d591e5ce94..929ed6ae62 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/http/SpringCloudGatewayHttpRequest.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/scala/org/apache/linkis/gateway/springcloud/http/SpringCloudGatewayHttpRequest.scala @@ -87,8 +87,10 @@ class SpringCloudGatewayHttpRequest(request: AbstractServerHttpRequest) extends override def getHeaders: JMap[String, Array[String]] = headers - override def addHeader(headerName: String, headers: Array[String]): Unit = + override def addHeader(headerName: String, headers: Array[String]): Unit = { + this.headers.put(headerName, headers) addHeaders.put(headerName, headers) + } override def addCookie(cookieName: String, cookies: Array[Cookie]): Unit = { this.cookies.put(cookieName, cookies) diff --git a/linkis-web-next/package-lock.json b/linkis-web-next/package-lock.json index 967a4091e6..ff48c85670 100644 --- a/linkis-web-next/package-lock.json +++ b/linkis-web-next/package-lock.json @@ -11,7 +11,7 @@ "@fesjs/fes-design": "^0.7.31", "@types/lodash": "^4.14.198", "@vitejs/plugin-vue": "^4.1.0", - "axios": "^1.5.0", + "axios": "^1.6.0", "dayjs": "^1.11.10", "dexie": "^3.2.4", "md5": "^2.3.0", @@ -1553,9 +1553,9 @@ } }, "node_modules/axios": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.5.0.tgz", - "integrity": "sha512-D4DdjDo5CY50Qms0qGQTTw6Q44jl7zRwY7bthds06pUGfChBCTcQs+N743eFWGEd6pRTMd6A+I87aWyFV5wiZQ==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.0.tgz", + "integrity": "sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg==", "dependencies": { "follow-redirects": "^1.15.0", "form-data": "^4.0.0", @@ -2900,6 +2900,19 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -4785,9 +4798,9 @@ } }, "node_modules/postcss": { - "version": "8.4.29", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.29.tgz", - "integrity": "sha512-cbI+jaqIeu/VGqXEarWkRCCffhjgXc0qjBtXpqJhTBohMUjUQnbBr0xqX3vEKudc4iviTewcJo5ajcec5+wdJw==", + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", "funding": [ { "type": "opencollective", @@ -7452,9 +7465,9 @@ "dev": true }, "axios": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.5.0.tgz", - "integrity": "sha512-D4DdjDo5CY50Qms0qGQTTw6Q44jl7zRwY7bthds06pUGfChBCTcQs+N743eFWGEd6pRTMd6A+I87aWyFV5wiZQ==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.0.tgz", + "integrity": "sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg==", "requires": { "follow-redirects": "^1.15.0", "form-data": "^4.0.0", @@ -8484,6 +8497,12 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, + "fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "optional": true + }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -9828,9 +9847,9 @@ "optional": true }, "postcss": { - "version": "8.4.29", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.29.tgz", - "integrity": "sha512-cbI+jaqIeu/VGqXEarWkRCCffhjgXc0qjBtXpqJhTBohMUjUQnbBr0xqX3vEKudc4iviTewcJo5ajcec5+wdJw==", + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", "requires": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", diff --git a/linkis-web-next/package.json b/linkis-web-next/package.json index 72b2c462c1..b42a2ebe9a 100644 --- a/linkis-web-next/package.json +++ b/linkis-web-next/package.json @@ -13,7 +13,7 @@ "@fesjs/fes-design": "^0.7.31", "@types/lodash": "^4.14.198", "@vitejs/plugin-vue": "^4.1.0", - "axios": "^1.5.0", + "axios": "^1.6.0", "dayjs": "^1.11.10", "dexie": "^3.2.4", "md5": "^2.3.0", diff --git a/linkis-web/.env b/linkis-web/.env index 371b8faf75..d62b9326f3 100644 --- a/linkis-web/.env +++ b/linkis-web/.env @@ -2,4 +2,4 @@ VUE_APP_HOST= BACKEND_URL=http://127.0.0.1:9001 VUE_APP_MN_CONFIG_PREFIX= VUE_APP_MN_CONFIG_SOCKET=/ws/api/entrance/connect -VUE_APP_VERSION=1.4.0 +VUE_APP_VERSION=1.5.0 diff --git a/linkis-web/package.json b/linkis-web/package.json index 280927e572..b43a05d183 100644 --- a/linkis-web/package.json +++ b/linkis-web/package.json @@ -1,6 +1,6 @@ { "name": "linkis", - "version": "1.4.0", + "version": "1.5.0", "private": true, "scripts": { "serve": "vue-cli-service serve", @@ -23,7 +23,7 @@ }, "dependencies": { "@form-create/iview": "2.5.27", - "axios": "0.21.4", + "axios": "1.6.0", "babel-polyfill": "6.26.0", "core-js": "3.27.2", "dexie": "3.2.3", diff --git a/linkis-web/release-docs/licenses/LICENSE-hint.css.txt b/linkis-web/release-docs/licenses/LICENSE-hint.css.txt new file mode 100644 index 0000000000..9961a4fb36 --- /dev/null +++ b/linkis-web/release-docs/licenses/LICENSE-hint.css.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Kushagra Gour + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/linkis-web/src/apps/linkis/assets/styles/console.scss b/linkis-web/src/apps/linkis/assets/styles/console.scss index 45a7a4a6d7..008a4b7686 100644 --- a/linkis-web/src/apps/linkis/assets/styles/console.scss +++ b/linkis-web/src/apps/linkis/assets/styles/console.scss @@ -17,8 +17,29 @@ @charset "UTF-8"; @import '@/common/style/variables.scss'; + @import './hint.min.css'; +// * { +// font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", "Helvetica Neue", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "JinbiaoSong", "JinbiaoSongExt"; +// } +body { + font-family: "Helvetica Neue",Helvetica,"PingFang SC","Hiragino Sans GB","Microsoft YaHei","微软雅黑",Arial,sans-serif, "JinbiaoSong", "JinbiaoSongExt"; +} .console-page{ + .ivu-input { + font-family: Arial, -apple-system, BlinkMacSystemFont, "Segoe UI", "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", "Helvetica Neue", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "JinbiaoSong", "JinbiaoSongExt"; + } + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", "Helvetica Neue", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "JinbiaoSong", "JinbiaoSongExt" !important; + .monaco-editor { + --monaco-monospace-font: "SF Mono", Monaco, Menlo, Consolas, "Ubuntu Mono", "Liberation Mono", "DejaVu Sans Mono", "Courier New", monospace, "JinbiaoSong", "JinbiaoSongExt"; + } + .monaco-mouse-cursor-text { + font-family: Consolas, "Courier New", monospace, "JinbiaoSong", "JinbiaoSongExt" !important; + } + .ivu-tooltip-inner { + overflow-wrap: break-word; + white-space: normal; + } position: $relative; width: $percent-all; height: $percent-all; diff --git a/linkis-web/src/apps/linkis/assets/styles/hint.min.css b/linkis-web/src/apps/linkis/assets/styles/hint.min.css new file mode 100644 index 0000000000..0c0d22d322 --- /dev/null +++ b/linkis-web/src/apps/linkis/assets/styles/hint.min.css @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/*! Hint.css - v2.7.0 - 2021-10-01 +* https://kushagra.dev/lab/hint/ +* Copyright (c) 2021 Kushagra Gour */ + +[class*=hint--]{position:relative ; width: 80;}[class*=hint--]:after,[class*=hint--]:before{position:absolute;-webkit-transform:translate3d(0,0,0);-moz-transform:translate3d(0,0,0);transform:translate3d(0,0,0);visibility:hidden;opacity:0;z-index:1000000;pointer-events:none;-webkit-transition:.3s ease;-moz-transition:.3s ease;transition:.3s ease;-webkit-transition-delay:0s;-moz-transition-delay:0s;transition-delay:0s}[class*=hint--]:hover:after,[class*=hint--]:hover:before{visibility:visible;opacity:1;-webkit-transition-delay:.1s;-moz-transition-delay:.1s;transition-delay:.1s}[class*=hint--]:before{content:'';position:absolute;background:0 0;border:6px solid transparent;z-index:1000001}[class*=hint--]:after{background:#383838;color:#fff;padding:8px 10px;font-size:12px;font-family:-apple-system, BlinkMacSystemFont, "Segoe UI", "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", "Helvetica Neue", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "JinbiaoSong", "JinbiaoSongExt";line-height:12px;white-space:pre-line;word-break: normal;text-align:left;text-shadow:0 -1px 0 #000;box-shadow:4px 4px 8px rgba(0,0,0,.3)}[class*=hint--][aria-label]:after{content:attr(aria-label)}[class*=hint--][data-hint]:after{content:attr(data-hint)}[aria-label='']:after,[aria-label='']:before,[data-hint='']:after,[data-hint='']:before{display:none!important}.hint--top-left:before,.hint--top-right:before,.hint--top:before{border-top-color:#383838}.hint--bottom-left:before,.hint--bottom-right:before,.hint--bottom:before{border-bottom-color:#383838}.hint--top:after,.hint--top:before{bottom:100%;left:50%}.hint--top:before{margin-bottom:-11px;left:calc(50% - 6px)}.hint--top:after{-webkit-transform:translateX(-50%);-moz-transform:translateX(-50%);transform:translateX(-50%)}.hint--top:hover:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--top:hover:after{-webkit-transform:translateX(-50%) translateY(-8px);-moz-transform:translateX(-50%) translateY(-8px);transform:translateX(-50%) translateY(-8px)}.hint--bottom:after,.hint--bottom:before{top:100%;left:50%}.hint--bottom:before{margin-top:-11px;left:calc(50% - 6px)}.hint--bottom:after{-webkit-transform:translateX(-50%);-moz-transform:translateX(-50%);transform:translateX(-50%)}.hint--bottom:hover:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--bottom:hover:after{-webkit-transform:translateX(-50%) translateY(8px);-moz-transform:translateX(-50%) translateY(8px);transform:translateX(-50%) translateY(8px)}.hint--right:before{border-right-color:#383838;margin-left:-11px;margin-bottom:-6px}.hint--right:after{margin-bottom:-14px}.hint--right:after,.hint--right:before{left:100%;bottom:50%}.hint--right:hover:after,.hint--right:hover:before{-webkit-transform:translateX(8px);-moz-transform:translateX(8px);transform:translateX(8px)}.hint--left:before{border-left-color:#383838;margin-right:-11px;margin-bottom:-6px}.hint--left:after{margin-bottom:-14px}.hint--left:after,.hint--left:before{right:100%;bottom:50%}.hint--left:hover:after,.hint--left:hover:before{-webkit-transform:translateX(-8px);-moz-transform:translateX(-8px);transform:translateX(-8px)}.hint--top-left:after,.hint--top-left:before{bottom:100%;left:50%}.hint--top-left:before{margin-bottom:-11px;left:calc(50% - 6px)}.hint--top-left:after{-webkit-transform:translateX(-100%);-moz-transform:translateX(-100%);transform:translateX(-100%);margin-left:12px}.hint--top-left:hover:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--top-left:hover:after{-webkit-transform:translateX(-100%) translateY(-8px);-moz-transform:translateX(-100%) translateY(-8px);transform:translateX(-100%) translateY(-8px)}.hint--top-right:after,.hint--top-right:before{bottom:100%;left:50%}.hint--top-right:before{margin-bottom:-11px;left:calc(50% - 6px)}.hint--top-right:after{-webkit-transform:translateX(0);-moz-transform:translateX(0);transform:translateX(0);margin-left:-12px}.hint--top-right:hover:after,.hint--top-right:hover:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--bottom-left:after,.hint--bottom-left:before{top:100%;left:50%}.hint--bottom-left:before{margin-top:-11px;left:calc(50% - 6px)}.hint--bottom-left:after{-webkit-transform:translateX(-100%);-moz-transform:translateX(-100%);transform:translateX(-100%);margin-left:12px}.hint--bottom-left:hover:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--bottom-left:hover:after{-webkit-transform:translateX(-100%) translateY(8px);-moz-transform:translateX(-100%) translateY(8px);transform:translateX(-100%) translateY(8px)}.hint--bottom-right:after,.hint--bottom-right:before{top:100%;left:50%}.hint--bottom-right:before{margin-top:-11px;left:calc(50% - 6px)}.hint--bottom-right:after{-webkit-transform:translateX(0);-moz-transform:translateX(0);transform:translateX(0);margin-left:-12px}.hint--bottom-right:hover:after,.hint--bottom-right:hover:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--large:after,.hint--medium:after,.hint--small:after{white-space:normal;line-height:1.4em;word-wrap:break-word}.hint--small:after{width:80px}.hint--medium:after{width:150px}.hint--large:after{width:300px}.hint--error:after{background-color:#b34e4d;text-shadow:0 -1px 0 #592726}.hint--error.hint--top-left:before,.hint--error.hint--top-right:before,.hint--error.hint--top:before{border-top-color:#b34e4d}.hint--error.hint--bottom-left:before,.hint--error.hint--bottom-right:before,.hint--error.hint--bottom:before{border-bottom-color:#b34e4d}.hint--error.hint--left:before{border-left-color:#b34e4d}.hint--error.hint--right:before{border-right-color:#b34e4d}.hint--warning:after{background-color:#c09854;text-shadow:0 -1px 0 #6c5328}.hint--warning.hint--top-left:before,.hint--warning.hint--top-right:before,.hint--warning.hint--top:before{border-top-color:#c09854}.hint--warning.hint--bottom-left:before,.hint--warning.hint--bottom-right:before,.hint--warning.hint--bottom:before{border-bottom-color:#c09854}.hint--warning.hint--left:before{border-left-color:#c09854}.hint--warning.hint--right:before{border-right-color:#c09854}.hint--info:after{background-color:#3986ac;text-shadow:0 -1px 0 #1a3c4d}.hint--info.hint--top-left:before,.hint--info.hint--top-right:before,.hint--info.hint--top:before{border-top-color:#3986ac}.hint--info.hint--bottom-left:before,.hint--info.hint--bottom-right:before,.hint--info.hint--bottom:before{border-bottom-color:#3986ac}.hint--info.hint--left:before{border-left-color:#3986ac}.hint--info.hint--right:before{border-right-color:#3986ac}.hint--success:after{background-color:#458746;text-shadow:0 -1px 0 #1a321a}.hint--success.hint--top-left:before,.hint--success.hint--top-right:before,.hint--success.hint--top:before{border-top-color:#458746}.hint--success.hint--bottom-left:before,.hint--success.hint--bottom-right:before,.hint--success.hint--bottom:before{border-bottom-color:#458746}.hint--success.hint--left:before{border-left-color:#458746}.hint--success.hint--right:before{border-right-color:#458746}.hint--always:after,.hint--always:before{opacity:1;visibility:visible}.hint--always.hint--top:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--always.hint--top:after{-webkit-transform:translateX(-50%) translateY(-8px);-moz-transform:translateX(-50%) translateY(-8px);transform:translateX(-50%) translateY(-8px)}.hint--always.hint--top-left:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--always.hint--top-left:after{-webkit-transform:translateX(-100%) translateY(-8px);-moz-transform:translateX(-100%) translateY(-8px);transform:translateX(-100%) translateY(-8px)}.hint--always.hint--top-right:after,.hint--always.hint--top-right:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--always.hint--bottom:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--always.hint--bottom:after{-webkit-transform:translateX(-50%) translateY(8px);-moz-transform:translateX(-50%) translateY(8px);transform:translateX(-50%) translateY(8px)}.hint--always.hint--bottom-left:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--always.hint--bottom-left:after{-webkit-transform:translateX(-100%) translateY(8px);-moz-transform:translateX(-100%) translateY(8px);transform:translateX(-100%) translateY(8px)}.hint--always.hint--bottom-right:after,.hint--always.hint--bottom-right:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--always.hint--left:after,.hint--always.hint--left:before{-webkit-transform:translateX(-8px);-moz-transform:translateX(-8px);transform:translateX(-8px)}.hint--always.hint--right:after,.hint--always.hint--right:before{-webkit-transform:translateX(8px);-moz-transform:translateX(8px);transform:translateX(8px)}.hint--rounded:after{border-radius:4px}.hint--no-animate:after,.hint--no-animate:before{-webkit-transition-duration:0s;-moz-transition-duration:0s;transition-duration:0s}.hint--bounce:after,.hint--bounce:before{-webkit-transition:opacity .3s ease,visibility .3s ease,-webkit-transform .3s cubic-bezier(.71,1.7,.77,1.24);-moz-transition:opacity .3s ease,visibility .3s ease,-moz-transform .3s cubic-bezier(.71,1.7,.77,1.24);transition:opacity .3s ease,visibility .3s ease,transform .3s cubic-bezier(.71,1.7,.77,1.24)}.hint--no-shadow:after,.hint--no-shadow:before{text-shadow:initial;box-shadow:initial}.hint--no-arrow:before{display:none} diff --git a/linkis-web/src/apps/linkis/components/variable/index.scss b/linkis-web/src/apps/linkis/components/variable/index.scss index 9140e16a35..fd603d45fb 100644 --- a/linkis-web/src/apps/linkis/components/variable/index.scss +++ b/linkis-web/src/apps/linkis/components/variable/index.scss @@ -89,6 +89,11 @@ border: 1px solid red; } } + .two-lines { + height: 60px; + resize: none; + overflow-y: scroll; + } .we-variable-content-input[disabled] { background-color: #f3f3f3; opacity: 1; diff --git a/linkis-web/src/apps/linkis/components/variable/index.vue b/linkis-web/src/apps/linkis/components/variable/index.vue index 662a3fef27..0d0ba9aedf 100644 --- a/linkis-web/src/apps/linkis/components/variable/index.vue +++ b/linkis-web/src/apps/linkis/components/variable/index.vue @@ -36,7 +36,7 @@