Skip to content

Commit 37b3b26

Browse files
authored
[Improve] spark-app run state style improvement (#4100)
1 parent e378ea2 commit 37b3b26

File tree

4 files changed

+24
-18
lines changed
  • streampark-console/streampark-console-webapp/src
  • streampark-spark/streampark-spark-client/streampark-spark-client-api/src/main/scala/org/apache/streampark/spark/client/proxy

4 files changed

+24
-18
lines changed

streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ export default {
211211
startLog: 'View Spark Start Log',
212212
abort: 'Terminate Job',
213213
copy: 'Copy Job',
214-
remapping: 'Remap Job',
214+
remapping: 'Remapping Job',
215215
deleteTip: 'Are you sure you want to delete this job?',
216216
canceling: 'Current job is stopping',
217217
starting: 'Current job is starting',

streampark-console/streampark-console-webapp/src/views/spark/app/components/State.tsx

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -43,19 +43,19 @@ export const stateMap = {
4343
class: 'status-processing-restarting',
4444
},
4545
[AppStateEnum.ACCEPTED]: {
46-
color: '#52c41a',
46+
color: '#13c2c2',
4747
title: t('spark.app.runState.accept'),
48-
class: 'status-processing-running',
48+
class: 'status-processing-restarting',
4949
},
5050
[AppStateEnum.SUCCEEDED]: {
51-
color: '#52c41a',
51+
color: '#1890ff',
5252
title: t('spark.app.runState.success'),
5353
class: 'status-processing-success',
5454
},
5555
[AppStateEnum.RUNNING]: {
56-
color: '#1890ff',
56+
color: '#52c41a',
5757
title: t('spark.app.runState.running'),
58-
class: 'status-processing-failing',
58+
class: 'status-processing-running',
5959
},
6060
[AppStateEnum.FINISHED]: { color: '#52c41a', title: t('spark.app.runState.finished') },
6161
[AppStateEnum.FAILED]: { color: '#f5222d', title: t('spark.app.runState.failed') },
@@ -73,9 +73,9 @@ export const stateMap = {
7373
},
7474

7575
[AppStateEnum.STOPPING]: {
76-
color: '#f5222d',
76+
color: '#faad14',
7777
title: t('spark.app.runState.stopping'),
78-
class: 'status-processing-initializing',
78+
class: 'status-processing-cancelling',
7979
},
8080
[AppStateEnum.KILLED]: { color: '#8E50FF', title: t('spark.app.runState.killed') },
8181
};

streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -253,8 +253,8 @@ export const useSparkAction = (optionApps: Recordable) => {
253253
<Form class="!pt-50px">
254254
<Form.Item
255255
label="Job Name"
256-
labelCol={{ lg: { span: 7 }, sm: { span: 7 } }}
257-
wrapperCol={{ lg: { span: 16 }, sm: { span: 4 } }}
256+
layout="vertical"
257+
baseColProps={{ span: 22, offset: 1 }}
258258
validateStatus={unref(validateStatus)}
259259
help={help}
260260
rules={[{ required: true }]}
@@ -343,8 +343,8 @@ export const useSparkAction = (optionApps: Recordable) => {
343343
class="!pt-40px"
344344
ref={mappingRef}
345345
name="mappingForm"
346-
labelCol={{ lg: { span: 7 }, sm: { span: 7 } }}
347-
wrapperCol={{ lg: { span: 16 }, sm: { span: 4 } }}
346+
layout="vertical"
347+
baseColProps={{ span: 22, offset: 1 }}
348348
v-model:model={formValue}
349349
>
350350
<Form.Item label="Job Name">

streampark-spark/streampark-spark-client/streampark-spark-client-api/src/main/scala/org/apache/streampark/spark/client/proxy/SparkShimsProxy.scala

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -76,11 +76,10 @@ object SparkShimsProxy extends Logger {
7676
logInfo(s"Add verify sql lib,spark version: $sparkVersion")
7777
VERIFY_SQL_CLASS_LOADER_CACHE.getOrElseUpdate(
7878
s"${sparkVersion.fullVersion}", {
79-
val libUrl = getSparkHomeLib(sparkVersion.sparkHome, "jars", f => !f.getName.startsWith("log4j") && !f.getName.startsWith("slf4j"))
79+
val libUrl = getSparkHomeLib(sparkVersion.sparkHome, "jars")
8080
val shimsUrls = ListBuffer[URL](libUrl: _*)
8181

8282
// TODO If there are compatibility issues with different versions
83-
8483
addShimsUrls(
8584
sparkVersion,
8685
file => {
@@ -151,9 +150,8 @@ object SparkShimsProxy extends Logger {
151150
SHIMS_CLASS_LOADER_CACHE.getOrElseUpdate(
152151
s"${sparkVersion.fullVersion}", {
153152
// 1) spark/lib
154-
val libUrl = getSparkHomeLib(sparkVersion.sparkHome, "jars", f => !f.getName.startsWith("log4j") && !f.getName.startsWith("slf4j"))
153+
val libUrl = getSparkHomeLib(sparkVersion.sparkHome, "jars")
155154
val shimsUrls = ListBuffer[URL](libUrl: _*)
156-
157155
// 2) add all shims jar
158156
addShimsUrls(
159157
sparkVersion,
@@ -174,10 +172,18 @@ object SparkShimsProxy extends Logger {
174172
private[this] def getSparkHomeLib(
175173
sparkHome: String,
176174
childDir: String,
177-
filterFun: File => Boolean): List[URL] = {
175+
filterFun: File => Boolean = null): List[URL] = {
178176
val file = new File(sparkHome, childDir)
179177
require(file.isDirectory, s"SPARK_HOME $file does not exist")
180-
file.listFiles.filter(filterFun).map(_.toURI.toURL).toList
178+
file.listFiles
179+
.filter(f => !f.getName.startsWith("log4j") && !f.getName.startsWith("slf4j"))
180+
.filter(f => {
181+
if (filterFun != null) {
182+
filterFun(f)
183+
} else {
184+
true
185+
}
186+
}).map(_.toURI.toURL).toList
181187
}
182188

183189
@throws[Exception]

0 commit comments

Comments
 (0)