Skip to content

Commit

Permalink
Upgrade za.co.absa.commons from v0.0.12 to v1.0.0 (#261)
Browse files Browse the repository at this point in the history
  • Loading branch information
jozefbakus authored Mar 9, 2022
1 parent 712fe9b commit ae99672
Show file tree
Hide file tree
Showing 21 changed files with 55 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.mockito.Mockito
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase
import za.co.absa.hyperdrive.driver.TerminationMethodEnum.AwaitTermination
import za.co.absa.hyperdrive.ingestor.api.reader.StreamReader
import za.co.absa.hyperdrive.ingestor.api.transformer.StreamTransformer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, Produce
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.abris.avro.read.confluent.SchemaManagerFactory
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase
import za.co.absa.abris.avro.registry.SchemaSubject
import za.co.absa.hyperdrive.ingestor.implementation.transformer.deduplicate.kafka.PrunedConsumerRecord
import za.co.absa.hyperdrive.ingestor.implementation.utils.KafkaUtil
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, Produce
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.abris.avro.read.confluent.SchemaManagerFactory
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase


/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, Produce
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.abris.avro.read.confluent.SchemaManagerFactory
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

/**
* This e2e test requires a Docker installation on the executing machine.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, Produce
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.abris.avro.read.confluent.SchemaManagerFactory
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

/**
* This e2e test requires a Docker installation on the executing machine.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import org.apache.spark.sql.SaveMode
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

class TestParquetStreamReader extends FlatSpec with MockitoSugar with Matchers with SparkTestBase with BeforeAndAfterEach {
behavior of "ParquetStreamReader"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.scalatest.Suite
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

trait MemoryStreamFixture {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import za.co.absa.abris.avro.parsing.utils.AvroSchemaUtils
import za.co.absa.abris.avro.read.confluent.SchemaManagerFactory
import za.co.absa.abris.avro.registry.ConfluentMockRegistryClient
import za.co.absa.abris.config.AbrisConfig
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase
import za.co.absa.hyperdrive.ingestor.implementation.reader.kafka.KafkaStreamReader.KEY_TOPIC
import za.co.absa.hyperdrive.ingestor.implementation.testutils.abris.AbrisTestUtil.getSchemaRegistryConf
import za.co.absa.hyperdrive.ingestor.implementation.transformer.avro.confluent.ConfluentAvroDecodingTransformer._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import za.co.absa.abris.avro.parsing.utils.AvroSchemaUtils
import za.co.absa.abris.avro.read.confluent.SchemaManagerFactory
import za.co.absa.abris.avro.registry.ConfluentMockRegistryClient
import za.co.absa.abris.config.AbrisConfig
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase
import za.co.absa.hyperdrive.ingestor.api.context.HyperdriveContext
import za.co.absa.hyperdrive.ingestor.implementation.HyperdriveContextKeys
import za.co.absa.hyperdrive.ingestor.implementation.testutils.HyperdriveMockSchemaRegistryClient
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.types.{ArrayType, IntegerType, StringType, StructType}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

class TestColumnCopyStreamTransformer extends FlatSpec with SparkTestBase with Matchers with BeforeAndAfter {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.streaming.{OutputMode, Trigger}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase
import za.co.absa.hyperdrive.ingestor.api.writer.StreamWriterProperties
import za.co.absa.hyperdrive.ingestor.implementation.writer.parquet.ParquetStreamWriter

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.streaming.{OutputMode, Trigger}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase
import za.co.absa.hyperdrive.ingestor.api.writer.StreamWriterProperties
import za.co.absa.hyperdrive.ingestor.implementation.writer.parquet.ParquetStreamWriter

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.streaming.{OutputMode, Trigger}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase
import za.co.absa.hyperdrive.ingestor.api.writer.StreamWriterProperties
import za.co.absa.hyperdrive.ingestor.implementation.writer.parquet.ParquetStreamWriter

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.kafka010.KafkaSourceOffsetProxy
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

class TestKafkaUtil extends FlatSpec with Matchers with BeforeAndAfter with SparkTestBase {
private var baseDir: TempDirectory = _
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.streaming.{OutputMode, Trigger}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

class TestMetadataLogUtil extends FlatSpec with Matchers with SparkTestBase with BeforeAndAfter {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{FlatSpec, Matchers}
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

class TestMongoDbStreamWriter extends FlatSpec with MockitoSugar with Matchers with SparkTestBase {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import org.apache.commons.configuration2.BaseConfiguration
import org.apache.spark.sql.DataFrame
import org.scalatest.{BeforeAndAfter, FlatSpec}
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase
import za.co.absa.hyperdrive.ingestor.api.writer.{StreamWriter, StreamWriterCommonAttributes}
import za.co.absa.hyperdrive.ingestor.implementation.testutils.mongodb.ScalaMongoImplicits
import za.co.absa.hyperdrive.ingestor.implementation.testutils.MemoryStreamFixture
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.mockito.Mockito._
import org.scalatest.{FlatSpec, Matchers}
import org.scalatest.mockito.MockitoSugar
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

class TestParquetStreamWriter extends FlatSpec with MockitoSugar with Matchers with SparkTestBase {

Expand Down
6 changes: 5 additions & 1 deletion parent-conf/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@
<!--Mockito-->
<mockito.version>2.25.0</mockito.version>

<absa.commons.version>0.0.12</absa.commons.version>
<absa.commons.version>1.0.0</absa.commons.version>
<!--Shade Maven plug-in-->
<maven.shade.plugin.version>3.2.1</maven.shade.plugin.version>

Expand Down Expand Up @@ -164,6 +164,10 @@
<artifactId>abris_${scala.compat.version}</artifactId>
<version>${abris.version}</version>
<exclusions>
<exclusion>
<groupId>za.co.absa.commons</groupId>
<artifactId>commons_${scala.compat.version}</artifactId>
</exclusion>
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Copyright 2018 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package za.co.absa.hyperdrive.shared.utils

import org.apache.spark.sql.SparkSession

trait SparkTestBase {
implicit val spark: SparkSession = SparkSession.builder()
.master("local[*]")
.appName(s"Commons unit testing SchemaUtils")
.config("spark.ui.enabled", "false")
.config("spark.debug.maxToStringFields", 100)
.config("spark.driver.bindAddress", "127.0.0.1")
.config("spark.driver.host", "127.0.0.1")
.config("spark.sql.hive.convertMetastoreParquet", false)
.config("fs.defaultFS", "file:/")
.getOrCreate()
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import java.util.UUID
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import za.co.absa.commons.io.TempDirectory
import za.co.absa.commons.spark.SparkTestBase
import za.co.absa.hyperdrive.shared.utils.SparkTestBase

class TestFileUtils extends FlatSpec with Matchers with SparkTestBase with BeforeAndAfter {

Expand Down

0 comments on commit ae99672

Please sign in to comment.