val hadoopConf = sparkSession.sparkContext.hadoopConfiguration
hadoopConf.set("fs.s3a.impl", "org.apache.hadoop.fs.s3a.S3AFileSystem")
hadoopConf.set(
"fs.s3a.aws.credentials.provider",
"org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider"
)
hadoopConf.set("fs.s3a.path.style.access", "true")
hadoopConf.set("fs.s3a.access.key", params.minioAccessKey.get)
hadoopConf.set("fs.s3a.secret.key", params.minioSecretKey.get)
hadoopConf.set(
"fs.s3a.connection.ssl.enabled",
params.minioSSL.get.toString
)
hadoopConf.set("fs.s3a.endpoint", params.minioUrl.get)
val FilterDF = sparkSession.read
.format("com.crealytics.spark.excel")
.option("recursiveFileLookup", "true")
.option("modifiedBefore", "2020-07-01T05:30:00")
.option("modifiedAfter", "2020-06-01T05:30:00")
.option("header", "true")
.load("s3a://first/");
println(FilterDF)