import com.datagaps.core.engine.dao.CommonDAO import com.datagaps.core.engine.utils.FileHelper.fileSeparator import org.apache.spark.sql.{Dataset, Row, SaveMode, SparkSession} import com.datagaps.core.engine.security.DecryptUtils import com.datagaps.core.engine.service.DataNerveUtils import com.datagaps.core.engine.utils.CodeUtils val connection = CommonDAO.getDataSourceByName("CSVlocal") // local csv data source name val filePath = connection.options.get("path").get+"/"+"API_1.csv"; // File val source = scala.io.Source.fromFile(filePath); var stream = source.bufferedReader() var line = stream.readLine() var count =0 while (line != null) { if(count > 0) { var record = line.split(',') val datasource = CommonDAO.getDataSourceByName(record(0)) val jdbcDatasource = CommonDAO.getDataSourceByName(record(4)) val location = datasource.options.get("path").get+fileSeparator+record(1) var delimiter = record(2); if(record(2) == "") delimiter = "," val dataframe = spark.read.format(datasource.format).option("delimiter", delimiter).option("header", record(3)).load(location) var connection = jdbcDatasource.options val password = CodeUtils.getDecryptedPassword(connection("password")) // DataNerver Method to get clear text password dataframe.write.format("jdbc").mode(SaveMode.Overwrite).option("url", connection("url")).option("driver", connection("driver")).option("user", connection("userName")).option("password", password).option("dbtable", record(5)+"."+record(6)).save() } line = stream.readLine() count = count + 1 }