Skip to content

Commit

Permalink
Code refactoring improvement changes.
Browse files Browse the repository at this point in the history
Restored the SmartConnectorExample.scala.
  • Loading branch information
PradeepSurale committed Feb 18, 2019
1 parent 799d4ab commit e4b806b
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 41 deletions.
2 changes: 2 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -818,6 +818,8 @@ task product(type: Zip) {
dependsOn ":snappy-spark:snappy-spark-assembly_${scalaBinaryVersion}:sparkProduct"
dependsOn ':snappy-launcher:jar'
dependsOn ':jdbcJar'
// Commented "v2connector" in order to exclude from build and test process.
// uncomment if we decide to include it.
// dependsOn ":snappy-v2connector_${scalaBinaryVersion}:jar"

def clusterProject = project(":snappy-cluster_${scalaBinaryVersion}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,7 @@ final class SmartConnectorRDDHelper {
def prepareScan(conn: Connection, txId: String, columnTable: String, projection: Array[Int],
serializedFilters: Array[Byte], partition: SmartExecutorBucketPartition,
catalogVersion: Long): (PreparedStatement, ResultSet) = {
// Additional (4th) Parameter added in order to support for v2 Connector
// which decides weather to use kryo serializer or Java serializer. Here
// we are using kryo serializer

val pstmt = conn.prepareStatement("call sys.COLUMN_TABLE_SCAN(?, ?, ?, 1)")
pstmt.setString(1, columnTable)
pstmt.setString(2, projection.mkString(","))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@
*/
package org.apache.spark.examples.snappydata

import java.util.{Properties, Random}

import org.apache.spark.sql.{SaveMode, SnappySession, SparkSession}
import org.apache.spark.sql.{SnappySession, SparkSession}

/**
* This example shows how an application can interact with SnappyStore in Split cluster mode.
Expand Down Expand Up @@ -72,40 +70,10 @@ object SmartConnectorExample {
builder.config(params(0), params(1))
})

builder.config("spark.snappydata.connection", "localhost:1527" )

val spark: SparkSession = builder.getOrCreate

val random = Math.abs(new Random().nextInt())

// scalastyle:off
println("Random Name "+random)

val prop = Map("user"->"debian-sys-maint", "password"->"8hQgx9JeFX7lfqAk",
"url"-> "jdbc:mysql://localhost:3306/inventory", "dbtable" -> "STAGING_AIRLINEREF")

val properties = new Properties()
properties.put("user", "debian-sys-maint")
properties.put("password", "8hQgx9JeFX7lfqAk")
properties.put( "url", "jdbc:mysql://localhost:3306/inventory")
properties.put("dbtable", "STAGING_AIRLINEREF")

val spark: SparkSession = builder
.getOrCreate
val snSession = new SnappySession(spark.sparkContext)

// val dataset = snSession.sql("select * from AIRLINEREF" )

val dataset = snSession.read.parquet("/home/pradeep/trunk/snappydata/examples/quickstart/data/airportcodeParquetData")

// val dataset = spark.read.parquet("/home/pradeep/" +
// "trunk/snappydata/examples/quickstart/data/airlineParquetData")

dataset.write.format("jdbc").options(prop).saveAsTable("STAGING_AIRLINEREF" )

// dataset.write.jdbc("jdbc:mysql://localhost:3306/inventory", "AIRLINEREF", properties)

/* val snSession = new SnappySession(spark.sparkContext)
// scalastyle:off
println("\n\n #### Reading from the SnappyStore table SNAPPY_COL_TABLE #### \n")
val colTable = snSession.table("SNAPPY_COL_TABLE")
colTable.show(10)
Expand All @@ -120,9 +88,7 @@ object SmartConnectorExample {

snSession.sql("create table TestColumnTable (id bigint not null, k bigint not null) using column")

dataFrame.write.insertInto("TestColumnTable") */

// scalastyle:off
dataFrame.write.insertInto("TestColumnTable")

println(" #### Write to table completed. ### \n\n" +
"Now you can query table TestColumnTable using $SNAPPY_HOME/bin/snappy-shell")
Expand Down
2 changes: 2 additions & 0 deletions settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ include 'dunit'
include ':snappy-dtests_' + scalaBinaryVersion
include ':snappy-compatibility-tests_' + scalaBinaryVersion
include ':snappy-encoders_' + scalaBinaryVersion
// Commented "v2connector" in order to exclude from build and test process.
// uncomment if we decide to include it.
// include ':snappy-v2connector_' + scalaBinaryVersion

project(':snappy-jdbc_' + scalaBinaryVersion).projectDir = "$rootDir/jdbc" as File
Expand Down

0 comments on commit e4b806b

Please sign in to comment.