Skip to content
This repository was archived by the owner on Nov 28, 2020. It is now read-only.

Commit 77a8b67

Browse files
committed
HDP-3.1
1 parent dedb948 commit 77a8b67

3 files changed

Lines changed: 27 additions & 24 deletions

File tree

build.sbt

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,14 @@ import scala.util.Properties
22

33
name := """bdg-sequila"""
44

5-
version := "0.5.3-spark-2.3.3-SNAPSHOT"
5+
version := "0.5.3-spark-2.3.2.3.1.0.0-78-SNAPSHOT"
66

77
organization := "org.biodatageeks"
88

99
scalaVersion := "2.11.8"
1010

11-
val DEFAULT_SPARK_2_VERSION = "2.3.3"
12-
val DEFAULT_HADOOP_VERSION = "2.6.5"
11+
val DEFAULT_SPARK_2_VERSION = "2.3.2.3.1.0.0-78"
12+
val DEFAULT_HADOOP_VERSION = "3.1.1"
1313

1414

1515
lazy val sparkVersion = Properties.envOrElse("SPARK_VERSION", DEFAULT_SPARK_2_VERSION)
@@ -30,14 +30,14 @@ libraryDependencies += "com.holdenkarau" % "spark-testing-base_2.11" % "2.3.2_0.
3030

3131
//libraryDependencies += "org.apache.spark" %% "spark-hive" % "2.0.0" % "test"
3232

33-
libraryDependencies += "org.bdgenomics.adam" %% "adam-core-spark2" % "0.24.0"
34-
libraryDependencies += "org.bdgenomics.adam" %% "adam-apis-spark2" % "0.24.0"
35-
libraryDependencies += "org.bdgenomics.adam" %% "adam-cli-spark2" % "0.24.0"
33+
libraryDependencies += "org.bdgenomics.adam" %% "adam-core-spark2" % "0.24.0" excludeAll (ExclusionRule("org.apache.hadoop"))
34+
libraryDependencies += "org.bdgenomics.adam" %% "adam-apis-spark2" % "0.24.0" excludeAll (ExclusionRule("org.apache.hadoop"))
35+
libraryDependencies += "org.bdgenomics.adam" %% "adam-cli-spark2" % "0.24.0" excludeAll (ExclusionRule("org.apache.hadoop"))
3636
libraryDependencies += "org.scala-lang" % "scala-library" % "2.11.8"
3737
libraryDependencies += "org.rogach" %% "scallop" % "3.1.2"
3838

3939

40-
libraryDependencies += "org.hammerlab.bdg-utils" %% "cli" % "0.3.0"
40+
libraryDependencies += "org.hammerlab.bdg-utils" %% "cli" % "0.3.0" excludeAll (ExclusionRule("org.apache.hadoop"))
4141

4242
libraryDependencies += "com.github.samtools" % "htsjdk" % "2.18.2"
4343

@@ -51,15 +51,15 @@ libraryDependencies += "org.apache.logging.log4j" % "log4j-api" % "2.11.0"
5151
libraryDependencies += "com.intel.gkl" % "gkl" % "0.8.5-1-darwin-SNAPSHOT"
5252
libraryDependencies += "com.intel.gkl" % "gkl" % "0.8.5-1-linux-SNAPSHOT"
5353

54-
libraryDependencies += "org.hammerlab.bam" %% "load" % "1.2.0-M1"
54+
libraryDependencies += "org.hammerlab.bam" %% "load" % "1.2.0-M1" excludeAll (ExclusionRule("org.apache.hadoop"))
5555

5656
libraryDependencies += "de.ruedigermoeller" % "fst" % "2.57"
5757
libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.7"
5858
libraryDependencies += "org.eclipse.jetty" % "jetty-servlet" % "9.3.24.v20180605"
5959
libraryDependencies += "org.apache.derby" % "derbyclient" % "10.14.2.0"
6060

6161

62-
libraryDependencies += "org.biodatageeks" % "bdg-performance_2.11" % "0.2-spark-2.3.3-SNAPSHOT" excludeAll (ExclusionRule("org.apache.hadoop"))
62+
//libraryDependencies += "org.biodatageeks" % "bdg-performance_2.11" % "0.2-spark-2.3.3-SNAPSHOT" excludeAll (ExclusionRule("org.apache.hadoop"))
6363

6464

6565

@@ -90,7 +90,7 @@ resolvers ++= Seq(
9090
"Job Server Bintray" at "https://dl.bintray.com/spark-jobserver/maven",
9191
"zsibio-snapshots" at "http://zsibio.ii.pw.edu.pl/nexus/repository/maven-snapshots/",
9292
"spring" at "http://repo.spring.io/libs-milestone/",
93-
"Cloudera" at "https://repository.cloudera.com/content/repositories/releases/",
93+
"confluent" at "http://packages.confluent.io/maven/",
9494
"Hortonworks" at "http://repo.hortonworks.com/content/repositories/releases/"
9595
)
9696

@@ -102,10 +102,12 @@ assemblyMergeStrategy in assembly := {
102102
case PathList("com", xs@_*) => MergeStrategy.first
103103
case PathList("shadeio", xs@_*) => MergeStrategy.first
104104

105+
case PathList(ps @ _*) if ps.last endsWith ".class" => MergeStrategy.first
105106
case PathList("au", xs@_*) => MergeStrategy.first
106107
case ("META-INF/org/apache/logging/log4j/core/config/plugins/Log4j2Plugins.dat") => MergeStrategy.first
107108
case ("images/ant_logo_large.gif") => MergeStrategy.first
108109

110+
109111
case "overview.html" => MergeStrategy.rename
110112
case "mapred-default.xml" => MergeStrategy.last
111113
case "about.html" => MergeStrategy.rename

src/main/scala/org/biodatageeks/hive/ThriftServerPageSeq.scala

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,10 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP
2727
{listener.getOnlineSessionNum} session(s) are online,
2828
running {listener.getTotalRunning} SQL statement(s)
2929
</h4> ++
30-
generateSessionStatsTable() ++
31-
generateSQLStatsTable()
30+
generateSessionStatsTable(request) ++
31+
generateSQLStatsTable(request)
3232
}
33-
UIUtils.headerSparkPage("JDBC/ODBC Server", content, parent, Some(5000))
33+
UIUtils.headerSparkPage(request,"JDBC/ODBC Server", content, parent, Some(5000))
3434
}
3535

3636
/** Generate basic stats of the thrift server program */
@@ -47,16 +47,16 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP
4747
}
4848

4949
/** Generate stats of batch statements of the thrift server program */
50-
private def generateSQLStatsTable(): Seq[Node] = {
50+
private def generateSQLStatsTable(request: HttpServletRequest): Seq[Node] = {
5151
val numStatement = listener.getExecutionList.size
5252
val table = if (numStatement > 0) {
5353
val headerRow = Seq("User", "JobID", "GroupID", "Start Time", "Finish Time", "Duration",
5454
"Statement", "State", "Detail")
55-
val dataRows = listener.getExecutionList
55+
val dataRows = listener.getExecutionList.sortBy(_.startTimestamp).reverse
5656

5757
def generateDataRow(info: ExecutionInfo): Seq[Node] = {
5858
val jobLink = info.jobId.map { id: String =>
59-
<a href={"%s/jobs/job?id=%s".format(UIUtils.prependBaseUri(parent.basePath), id)}>
59+
<a href={"%s/jobs/job?id=%s".format(UIUtils.prependBaseUri(request,parent.basePath), id)}>
6060
[{id}]
6161
</a>
6262
}
@@ -118,16 +118,17 @@ private[ui] class ThriftServerPageSeq(parent: ThriftServerTabSeq) extends WebUIP
118118
}
119119

120120
/** Generate stats of batch sessions of the thrift server program */
121-
private def generateSessionStatsTable(): Seq[Node] = {
121+
private def generateSessionStatsTable(request: HttpServletRequest): Seq[Node] = {
122122
val sessionList = listener.getSessionList
123123
val numBatches = sessionList.size
124124
val table = if (numBatches > 0) {
125-
val dataRows = sessionList
125+
val dataRows = sessionList.sortBy(_.startTimestamp).reverse
126126
val headerRow = Seq("User", "IP", "Session ID", "Start Time", "Finish Time", "Duration",
127127
"Total Execute")
128+
128129
def generateDataRow(session: SessionInfo): Seq[Node] = {
129130
val sessionLink = "%s/%s/session?id=%s"
130-
.format(UIUtils.prependBaseUri(parent.basePath), parent.prefix, session.sessionId)
131+
.format(UIUtils.prependBaseUri(request,parent.basePath), parent.prefix, session.sessionId)
131132
<tr>
132133
<td> {session.userName} </td>
133134
<td> {session.ip} </td>

src/main/scala/org/biodatageeks/hive/ThriftServerSessionPageSeq.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,9 @@ private[ui] class ThriftServerSessionPageSeq(parent: ThriftServerTabSeq)
4040
Session created at {formatDate(sessionStat.startTimestamp)},
4141
Total run {sessionStat.totalExecution} SQL
4242
</h4> ++
43-
generateSQLStatsTable(sessionStat.sessionId)
43+
generateSQLStatsTable(request,sessionStat.sessionId)
4444
}
45-
UIUtils.headerSparkPage("JDBC/ODBC Session", content, parent, Some(5000))
45+
UIUtils.headerSparkPage(request,"JDBC/ODBC Session", content, parent, Some(5000))
4646
}
4747

4848
/** Generate basic stats of the thrift server program */
@@ -59,7 +59,7 @@ private[ui] class ThriftServerSessionPageSeq(parent: ThriftServerTabSeq)
5959
}
6060

6161
/** Generate stats of batch statements of the thrift server program */
62-
private def generateSQLStatsTable(sessionID: String): Seq[Node] = {
62+
private def generateSQLStatsTable(request: HttpServletRequest, sessionID: String): Seq[Node] = {
6363
val executionList = listener.getExecutionList
6464
.filter(_.sessionId == sessionID)
6565
val numStatement = executionList.size
@@ -70,7 +70,7 @@ private[ui] class ThriftServerSessionPageSeq(parent: ThriftServerTabSeq)
7070

7171
def generateDataRow(info: ExecutionInfo): Seq[Node] = {
7272
val jobLink = info.jobId.map { id: String =>
73-
<a href={"%s/jobs/job?id=%s".format(UIUtils.prependBaseUri(parent.basePath), id)}>
73+
<a href={"%s/jobs/job?id=%s".format(UIUtils.prependBaseUri(request,parent.basePath), id)}>
7474
[{id}]
7575
</a>
7676
}
@@ -181,4 +181,4 @@ private[ui] class ThriftServerSessionPageSeq(parent: ThriftServerTabSeq)
181181
}
182182
UIUtils.listingTable(headers, generateDataRow, data, fixedWidth = true)
183183
}
184-
}
184+
}

0 commit comments

Comments
 (0)