Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[submodule "sorting-hat"]
path = sorting-hat
url = git@github.com:the-sortinghat/backend.git
35 changes: 35 additions & 0 deletions app-async/build.gradle.kts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
val mockk_version: String by project
val kafka_clients_version: String by project
val kafka_json_serializer_version: String by project


plugins {
application
kotlin("jvm")
id("com.github.johnrengelman.shadow") version "7.0.0"
}

group = "com.usvision.async"
version = "0.0.1"

repositories {
mavenCentral()
maven(url = "https://packages.confluent.io/maven/")
}

tasks.test {
useJUnitPlatform()
}

dependencies {
implementation(project(":app-creation"))
implementation(project(":app-model"))
implementation(project(":app-persistence"))

implementation("org.apache.kafka:kafka-clients:${kafka_clients_version}")
implementation("io.confluent:kafka-json-serializer:${kafka_json_serializer_version}")


testImplementation("io.mockk:mockk:${mockk_version}")
testImplementation(kotlin("test"))
}
4 changes: 4 additions & 0 deletions app-async/gradle.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
kotlin.code.style=official
mockk_version=1.12.3
kafka_clients_version=3.3.1
kafka_json_serializer_version=5.0.1
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package com.usvision.kafka

import org.apache.kafka.clients.consumer.KafkaConsumer
import java.time.Duration


fun main() {
val consumerConfigs =
mapOf(
"bootstrap.servers" to "localhost:9092",
"auto.offset.reset" to "earliest",
"key.deserializer" to "org.apache.kafka.common.serialization.StringDeserializer",
"value.deserializer" to "org.apache.kafka.common.serialization.ByteArrayDeserializer",
"group.id" to "usvision.serviceschema",
"security.protocol" to "PLAINTEXT"
)

val topic = "sorting-hat-database.serviceSchema"
val consumer = KafkaConsumer<String, ByteArray>(consumerConfigs)

consumer.subscribe(listOf(topic))

while (true) {
try {
consumer.poll(Duration.ofMillis(400)).forEach {
item -> println(item.value())
}
} catch (ex: Exception) {
println(ex)
}
}



}
6 changes: 6 additions & 0 deletions config-replica.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
rsconf = {
_id: "rs0",
members: [{ _id: 0, host: "sorting_hat_mongodb:27017", priority: 1.0 }],
};
rs.initiate(rsconf);
rs.status();
138 changes: 132 additions & 6 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,24 +1,150 @@
version: '3.8'

services:
web:
us_vision:
build: .
env_file:
- ./.env.docker-compose
ports:
- "8080:8080"
- "8081:8080"
depends_on:
- mongodb
- us_vision_mongodb

mongodb:
us_vision_mongodb:
image: mongo
env_file:
- ./.env.docker-compose
volumes:
- mongodb_data:/data/db
- us_vision_mongodb_data:/data/db
- ./mongo-init.js:/docker-entrypoint-initdb.d/mongo-init.js:ro
ports:
- "27018:27017"

sorting_hat_mongodb:
image: mongo:4.2-bionic
ports:
- "27017:27017"
volumes:
- sorting_hat_mongodb_data:/data/db
command: --replSet rs0 --oplogSize 128


sorting_hat_mongodb-setup:
image: mongo
depends_on:
- sorting_hat_mongodb
volumes:
- ./config-replica.js:/config-replica.js
entrypoint:
[
"bash",
"-c",
"sleep 10 && mongosh --host sorting_hat_mongodb:27017 config-replica.js && sleep 10",
]
restart: "no"

sorting_hat:
build: ./sorting-hat
ports:
- "8080:8080"
env_file:
- ./sorting-hat/.env
depends_on:
- sorting_hat_mongodb
- sorting_hat_mongodb-setup

zookeeper:
image: confluentinc/cp-zookeeper:7.2.2
hostname: zookeeper
container_name: zookeeper
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000

broker:
image: confluentinc/cp-kafka:7.2.2
hostname: broker
container_name: broker
depends_on:
- zookeeper
ports:
- "9092:9092"
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENERS: PLAINTEXT://broker:29092,LISTENER_2://localhost:9092
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,LISTENER_2://localhost:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,LISTENER_2:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
CONFLUENT_SUPPORT_CUSTOMER_ID: "anonymous"
KAFKA_DELETE_TOPIC_ENABLE: "true"

schema-registry:
image: confluentinc/cp-schema-registry:7.2.2
hostname: schema-registry
container_name: schema-registry
depends_on:
- broker
ports:
- "3081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: "broker:29092"
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: "zookeeper:2181"
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081

rest-proxy:
image: confluentinc/cp-kafka-rest:7.2.2
depends_on:
- zookeeper
- broker
- schema-registry
ports:
- "3082:8082"
hostname: rest-proxy
container_name: rest-proxy
environment:
KAFKA_REST_HOST_NAME: rest-proxy
KAFKA_REST_BOOTSTRAP_SERVERS: "broker:29092"
KAFKA_REST_LISTENERS: "http://0.0.0.0:8082"
KAFKA_REST_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"

connect:
build:
context: .
dockerfile: mongodb-kafka-connect.Dockerfile
ports:
- "35000:35000"
- "8083:8083"
hostname: connect
container_name: connect
depends_on:
- zookeeper
- broker
environment:
KAFKA_JMX_PORT: 35000
KAFKA_JMX_HOSTNAME: localhost
CONNECT_BOOTSTRAP_SERVERS: "broker:29092"
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: connect-cluster-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CONNECT_ZOOKEEPER_CONNECT: "zookeeper:2181"
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
CONNECT_CONNECTIONS_MAX_IDLE_MS: 180000
CONNECT_METADATA_MAX_AGE_MS: 180000
CONNECT_AUTO_CREATE_TOPICS_ENABLE: "true"
CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"

volumes:
mongodb_data:
us_vision_mongodb_data:
sorting_hat_mongodb_data:
5 changes: 5 additions & 0 deletions mongodb-kafka-connect.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
FROM confluentinc/cp-kafka-connect:7.2.5

RUN confluent-hub install --no-prompt --verbose mongodb/kafka-connect-mongodb:latest

ENV CONNECT_PLUGIN_PATH="/usr/share/java,/usr/share/confluent-hub-components"
1 change: 1 addition & 0 deletions settings.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ include("app-reports")
include("app-persistence")
include("app-web")
include("app-creation")
include("app-async")
1 change: 1 addition & 0 deletions sorting-hat
Submodule sorting-hat added at 95695a