From d099cb7fa54e2ecc9fca4d198233f79d80124759 Mon Sep 17 00:00:00 2001 From: ShubhamJaiswalRZP Date: Thu, 7 Mar 2024 11:32:50 +0530 Subject: [PATCH] stage testing --- .../com/razorpay/spark/jdbc/JDBCImport.scala | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/sql-delta-import/src/main/scala/com/razorpay/spark/jdbc/JDBCImport.scala b/sql-delta-import/src/main/scala/com/razorpay/spark/jdbc/JDBCImport.scala index ffa8161d2..2126a18b1 100644 --- a/sql-delta-import/src/main/scala/com/razorpay/spark/jdbc/JDBCImport.scala +++ b/sql-delta-import/src/main/scala/com/razorpay/spark/jdbc/JDBCImport.scala @@ -50,7 +50,7 @@ case class ImportConfig( val splitColumn: String = splitBy.getOrElse(null.asInstanceOf[String]) - val dbType: String = credentials.getSecretValue("db_type_sqoop", "sqoop_test") + val dbType: String = credentials.getSecretValue("datum/datahub/MYSQL_SQOOP_PROD_DATUM_MYSQL_TYPE", "credstash-stage-datahub") val escapeCharacter = if (dbType == Constants.MYSQL) { "`" @@ -84,7 +84,7 @@ class JDBCImport( import spark.implicits._ def createDbIfNotExists(outputDbName: String): Unit = { - val s3Bucket = credentials.getSecretValue("s3_bucket_sqoop", "sqoop_test") + val s3Bucket = credentials.getSecretValue("datum/datahub/SQOOP_S3_BUCKET", "credstash-stage-datahub") val baseS3Path = s"s3a://$s3Bucket/sqoop" if (!spark.catalog.databaseExists(outputDbName)) { @@ -96,10 +96,10 @@ class JDBCImport( implicit def mapToProperties(m: Map[String, String]): Properties = { val properties = new Properties() - val jdbcUsername = credentials.getSecretValue("db_username_sqoop", "sqoop_test") - val jdbcPassword = credentials.getSecretValue("db_password_sqoop", "sqoop_test") + val jdbcUsername = credentials.getSecretValue("datum/datahub/MYSQL_SQOOP_PROD_DATUM_MYSQL_USER", "credstash-stage-datahub") + val jdbcPassword = credentials.getSecretValue("datum/datahub/MYSQL_SQOOP_PROD_DATUM_MYSQL_PASSWORD", "credstash-stage-datahub") - val dbType = credentials.getSecretValue("db_type_sqoop", "sqoop_test") + val dbType = credentials.getSecretValue("datum/datahub/MYSQL_SQOOP_PROD_DATUM_MYSQL_TYPE", "credstash-stage-datahub") if (dbType == Constants.MYSQL) { properties.setProperty("driver", Constants.MYSQL_DRIVER) @@ -124,9 +124,9 @@ class JDBCImport( } def buildJdbcUrl: String = { - val host = credentials.getSecretValue("db_host_sqoop", "sqoop_test") - val port = credentials.getSecretValue("db_port_sqoop", "sqoop_test") - val dbType = credentials.getSecretValue("db_type_sqoop", "sqoop_test") + val host = credentials.getSecretValue("datum/datahub/MYSQL_SQOOP_PROD_DATUM_MYSQL_HOST", "credstash-stage-datahub") + val port = credentials.getSecretValue("datum/datahub/MYSQL_SQOOP_PROD_DATUM_MYSQL_PORT", "credstash-stage-datahub") + val dbType = credentials.getSecretValue("datum/datahub/MYSQL_SQOOP_PROD_DATUM_MYSQL_TYPE", "credstash-stage-datahub") val database = importConfig.database val schema = importConfig.schema @@ -159,8 +159,8 @@ class JDBCImport( .map { case (a, b) => (a.getOrElse(defaultString), b.getOrElse(defaultString)) } .head - val jdbcUsername = credentials.getSecretValue("db_username_sqoop", "sqoop_test") - val jdbcPassword = credentials.getSecretValue("db_password_sqoop", "sqoop_test") + val jdbcUsername = credentials.getSecretValue("datum/datahub/MYSQL_SQOOP_PROD_DATUM_MYSQL_USER", "credstash-stage-datahub") + val jdbcPassword = credentials.getSecretValue("datum/datahub/MYSQL_SQOOP_PROD_DATUM_MYSQL_PASSWORD", "credstash-stage-datahub") spark.read .format("jdbc") @@ -289,7 +289,7 @@ class JDBCImport( val s3BucketConf = importConfig.s3Bucket val s3Bucket = if (s3BucketConf.isDefined) { s3BucketConf.get } - else { credentials.getSecretValue("s3_bucket_sqoop", "sqoop_test") } + else { credentials.getSecretValue("datum/datahub/SQOOP_S3_BUCKET", "credstash-stage-datahub") } val dbtable = importConfig.outputTable.split("\\.")