Skip to content

Commit

Permalink
#19 Naming conventions
Browse files Browse the repository at this point in the history
  • Loading branch information
ABMC831 committed Oct 4, 2024
1 parent f596158 commit 25f4f43
Show file tree
Hide file tree
Showing 10 changed files with 45 additions and 45 deletions.
8 changes: 4 additions & 4 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ lazy val models = project
.disablePlugins(sbtassembly.AssemblyPlugin)
.settings(
commonSettings ++ Seq(
name := "KafkaCase-Models",
name := "kafkacase-models",
libraryDependencies ++= modelsDependencies,
scalacOptions ++= { if (scalaVersion.value.startsWith("2.13")) Seq("-Ymacro-annotations") else Seq("-Xmacro-settings:enable-macro-paradise") },
),
Expand All @@ -50,7 +50,7 @@ lazy val reader = project
.disablePlugins(sbtassembly.AssemblyPlugin)
.settings(
commonSettings ++ Seq(
name := "KafkaCase-Reader",
name := "kafkacase-reader",
libraryDependencies ++= readerDependencies
)
)
Expand All @@ -60,7 +60,7 @@ lazy val writer = project
.disablePlugins(sbtassembly.AssemblyPlugin)
.settings(
commonSettings ++ Seq(
name := "KafkaCase-Writer",
name := "kafkacase-writer",
libraryDependencies ++= writerDependencies
)
)
Expand All @@ -69,7 +69,7 @@ lazy val writer = project
lazy val examples = project
.settings(
commonSettings ++ Seq(
name := "KafkaCase-Examples",
name := "kafkacase-examples",
libraryDependencies ++= examplesDependencies,
mergeStrategy
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,27 +14,27 @@
* limitations under the License.
*/

package za.co.absa.KafkaCase.Examples
package za.co.absa.kafkacase.examples

import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerConfig
import za.co.absa.KafkaCase.Models.EdlaChangeTopic
import za.co.absa.KafkaCase.Reader.ReaderImpl
import za.co.absa.KafkaCase.Writer.WriterImpl
import za.co.absa.kafkacase.models.topics.EdlaChange
import za.co.absa.kafkacase.reader.ReaderImpl
import za.co.absa.kafkacase.writer.WriterImpl

import java.util.{Properties, UUID}

object KafkaCase {
private def writer_use_case(): Unit = {
// 0 -> HAVE SOMETHING TO WRITE
val messageToWrite = EdlaChangeTopic(
val messageToWrite = EdlaChange(
app_id_snow = "N/A",
data_definition_id = "TestingThis",
environment = "DEV",
format = "FooBar",
guid = "DebugId",
location = "ether",
operation = EdlaChangeTopic.Operation.CREATE(),
operation = EdlaChange.Operation.Create(),
schema_link = "http://not.here",
source_app = "ThisCode",
timestamp_event = 12345
Expand All @@ -47,7 +47,7 @@ object KafkaCase {
writerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")

// 2 -> MAKE WRITER
val writer = new WriterImpl[EdlaChangeTopic](writerProps, "KillMePleaseTopic")
val writer = new WriterImpl[EdlaChange](writerProps, "KillMePleaseTopic")
try {
// 3 -> WRITE
writer.Write("sampleMessageKey", messageToWrite)
Expand All @@ -68,7 +68,7 @@ object KafkaCase {
readerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

// 2 -> MAKE READER (should be in using block for newer versions of scala)
val reader = new ReaderImpl[EdlaChangeTopic](readerProps, "KillMePleaseTopic")
val reader = new ReaderImpl[EdlaChange](readerProps, "KillMePleaseTopic")
try {
for (item <- reader)
println(item)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,43 +14,43 @@
* limitations under the License.
*/

package za.co.absa.KafkaCase.Models
package za.co.absa.kafkacase.models.topics

import io.circe.{Decoder, Encoder}
import io.circe.generic.JsonCodec

@JsonCodec
case class EdlaChangeTopic(
case class EdlaChange(
app_id_snow: String,
data_definition_id: String,
environment: String,
format: String,
guid: String,
location: String,
operation: EdlaChangeTopic.Operation,
operation: EdlaChange.Operation,
schema_link: String,
source_app: String,
timestamp_event: Long
)

object EdlaChangeTopic {
object EdlaChange {
sealed trait Operation

object Operation {
case class CREATE() extends Operation
case class UPDATE() extends Operation
case class ARCHIVE() extends Operation
case class Create() extends Operation
case class Update() extends Operation
case class Archive() extends Operation

implicit val operationEncoder: Encoder[Operation] = Encoder.encodeString.contramap[Operation] {
case CREATE() => s"CREATE"
case UPDATE() => s"UPDATE"
case ARCHIVE() => s"ARCHIVE"
case Create() => s"CREATE"
case Update() => s"UPDATE"
case Archive() => s"ARCHIVE"
}

implicit val operationDecoder: Decoder[Operation] = Decoder.decodeString.emap {
case "CREATE" => Right(CREATE())
case "UPDATE" => Right(UPDATE())
case "ARCHIVE" => Right(ARCHIVE())
case "CREATE" => Right(Create())
case "UPDATE" => Right(Update())
case "ARCHIVE" => Right(Archive())
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,26 +14,26 @@
* limitations under the License.
*/

package za.co.absa.KafkaCase.Models
package za.co.absa.kafkacase.models.topics

import io.circe.{Decoder, Encoder}
import io.circe.generic.JsonCodec

@JsonCodec
case class SchemaRunTopic(
case class SchemaRun(
app_id_snow: String,
data_definition_id: String,
environment: String,
guid: String,
job_ref: String,
message: String,
source_app: String,
status: SchemaRunTopic.Status,
status: SchemaRun.Status,
timestamp_end: Long,
timestamp_start: Long
)

object SchemaRunTopic {
object SchemaRun {
sealed trait Status

object Status {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,21 @@
* limitations under the License.
*/

package za.co.absa.KafkaCase.Models
package za.co.absa.kafkacase.models.topics

import io.circe.jawn.decode
import io.circe.syntax.EncoderOps
import org.scalatest.funsuite.AnyFunSuite

class EdlaChangeTopicUnitTests extends AnyFunSuite {
private val instance = EdlaChangeTopic(
class EdlaChangeUnitTests extends AnyFunSuite {
private val instance = EdlaChange(
app_id_snow = "N/A",
data_definition_id = "TestingThis",
environment = "DEV",
format = "FooBar",
guid = "DebugId",
location = "ether",
operation = EdlaChangeTopic.Operation.CREATE(),
operation = EdlaChange.Operation.Create(),
schema_link = "http://not.here",
source_app = "ThisCode",
timestamp_event = 12345
Expand All @@ -53,6 +53,6 @@ class EdlaChangeTopicUnitTests extends AnyFunSuite {
}

test("Deserializes from JSON properly") {
assertResult(instance)(decode[EdlaChangeTopic](json).getOrElse(throw new Exception("Failed to parse JSON")))
assertResult(instance)(decode[EdlaChange](json).getOrElse(throw new Exception("Failed to parse JSON")))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,22 +14,22 @@
* limitations under the License.
*/

package za.co.absa.KafkaCase.Models
package za.co.absa.kafkacase.models.topics

import io.circe.jawn.decode
import io.circe.syntax.EncoderOps
import org.scalatest.funsuite.AnyFunSuite

class SchemaRunTopicUnitTests extends AnyFunSuite {
private val instance = SchemaRunTopic(
class SchemaRunUnitTests extends AnyFunSuite {
private val instance = SchemaRun(
app_id_snow = "N/A",
data_definition_id = "Foo",
environment = "TEST",
guid = "DebugId",
job_ref = "UnitTestJob",
message = "FooBar",
source_app = "ThisTest",
status = SchemaRunTopic.Status.Killed(),
status = SchemaRun.Status.Killed(),
timestamp_end = 67890,
timestamp_start = 12345
)
Expand All @@ -53,6 +53,6 @@ class SchemaRunTopicUnitTests extends AnyFunSuite {
}

test("Deserializes from JSON properly") {
assertResult(instance)(decode[SchemaRunTopic](json).getOrElse(throw new Exception("Failed to parse JSON")))
assertResult(instance)(decode[SchemaRun](json).getOrElse(throw new Exception("Failed to parse JSON")))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,6 @@
* limitations under the License.
*/

package za.co.absa.KafkaCase.Reader
package za.co.absa.kafkacase.reader

trait Reader[TType] extends Iterator[(String, Either[String, TType])] with AutoCloseable
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@
* limitations under the License.
*/

package za.co.absa.KafkaCase.Reader
package za.co.absa.kafkacase.reader

import io.circe.Decoder
import io.circe.jawn.decode
import org.slf4j.LoggerFactory
import org.apache.kafka.clients.consumer.{ConsumerRecord, KafkaConsumer}
import za.co.absa.KafkaCase.Reader.ReaderImpl.{DEFAULT_TIMEOUT, log}
import za.co.absa.kafkacase.reader.ReaderImpl.{DEFAULT_TIMEOUT, log}

import java.time.Duration
import java.util
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
* limitations under the License.
*/

package za.co.absa.KafkaCase.Writer
package za.co.absa.kafkacase.writer

trait Writer[TType] extends AutoCloseable {
def Write(key: String, value: TType): Unit
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@
* limitations under the License.
*/

package za.co.absa.KafkaCase.Writer
package za.co.absa.kafkacase.writer

import io.circe.Encoder
import io.circe.syntax.EncoderOps
import org.slf4j.LoggerFactory
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import za.co.absa.KafkaCase.Writer.WriterImpl.log
import za.co.absa.kafkacase.writer.WriterImpl.log

import java.util.Properties

Expand Down

0 comments on commit 25f4f43

Please sign in to comment.