diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..26a09f6 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "sorting-hat"] + path = sorting-hat + url = git@github.com:the-sortinghat/backend.git diff --git a/app-async/build.gradle.kts b/app-async/build.gradle.kts new file mode 100644 index 0000000..e892588 --- /dev/null +++ b/app-async/build.gradle.kts @@ -0,0 +1,35 @@ +val mockk_version: String by project +val kafka_clients_version: String by project +val kafka_json_serializer_version: String by project + + +plugins { + application + kotlin("jvm") + id("com.github.johnrengelman.shadow") version "7.0.0" +} + +group = "com.usvision.async" +version = "0.0.1" + +repositories { + mavenCentral() + maven(url = "https://packages.confluent.io/maven/") +} + +tasks.test { + useJUnitPlatform() +} + +dependencies { + implementation(project(":app-creation")) + implementation(project(":app-model")) + implementation(project(":app-persistence")) + + implementation("org.apache.kafka:kafka-clients:${kafka_clients_version}") + implementation("io.confluent:kafka-json-serializer:${kafka_json_serializer_version}") + + + testImplementation("io.mockk:mockk:${mockk_version}") + testImplementation(kotlin("test")) +} \ No newline at end of file diff --git a/app-async/gradle.properties b/app-async/gradle.properties new file mode 100644 index 0000000..bca845c --- /dev/null +++ b/app-async/gradle.properties @@ -0,0 +1,4 @@ +kotlin.code.style=official +mockk_version=1.12.3 +kafka_clients_version=3.3.1 +kafka_json_serializer_version=5.0.1 \ No newline at end of file diff --git a/app-async/src/main/kotlin/com/usvision/kafka/CompanySystemConfig.kt b/app-async/src/main/kotlin/com/usvision/kafka/CompanySystemConfig.kt new file mode 100644 index 0000000..776400e --- /dev/null +++ b/app-async/src/main/kotlin/com/usvision/kafka/CompanySystemConfig.kt @@ -0,0 +1,35 @@ +package com.usvision.kafka + +import org.apache.kafka.clients.consumer.KafkaConsumer +import java.time.Duration + + +fun main() { + val consumerConfigs = + mapOf( + "bootstrap.servers" to "localhost:9092", + "auto.offset.reset" to "earliest", + "key.deserializer" to "org.apache.kafka.common.serialization.StringDeserializer", + "value.deserializer" to "org.apache.kafka.common.serialization.ByteArrayDeserializer", + "group.id" to "usvision.serviceschema", + "security.protocol" to "PLAINTEXT" + ) + + val topic = "sorting-hat-database.serviceSchema" + val consumer = KafkaConsumer(consumerConfigs) + + consumer.subscribe(listOf(topic)) + + while (true) { + try { + consumer.poll(Duration.ofMillis(400)).forEach { + item -> println(item.value()) + } + } catch (ex: Exception) { + println(ex) + } + } + + + +} \ No newline at end of file diff --git a/config-replica.js b/config-replica.js new file mode 100644 index 0000000..00e7f2f --- /dev/null +++ b/config-replica.js @@ -0,0 +1,6 @@ +rsconf = { + _id: "rs0", + members: [{ _id: 0, host: "sorting_hat_mongodb:27017", priority: 1.0 }], +}; +rs.initiate(rsconf); +rs.status(); diff --git a/docker-compose.yaml b/docker-compose.yaml index 3031e56..a1427be 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,24 +1,150 @@ version: '3.8' services: - web: + us_vision: build: . env_file: - ./.env.docker-compose ports: - - "8080:8080" + - "8081:8080" depends_on: - - mongodb + - us_vision_mongodb - mongodb: + us_vision_mongodb: image: mongo env_file: - ./.env.docker-compose volumes: - - mongodb_data:/data/db + - us_vision_mongodb_data:/data/db - ./mongo-init.js:/docker-entrypoint-initdb.d/mongo-init.js:ro + ports: + - "27018:27017" + + sorting_hat_mongodb: + image: mongo:4.2-bionic ports: - "27017:27017" + volumes: + - sorting_hat_mongodb_data:/data/db + command: --replSet rs0 --oplogSize 128 + + + sorting_hat_mongodb-setup: + image: mongo + depends_on: + - sorting_hat_mongodb + volumes: + - ./config-replica.js:/config-replica.js + entrypoint: + [ + "bash", + "-c", + "sleep 10 && mongosh --host sorting_hat_mongodb:27017 config-replica.js && sleep 10", + ] + restart: "no" + + sorting_hat: + build: ./sorting-hat + ports: + - "8080:8080" + env_file: + - ./sorting-hat/.env + depends_on: + - sorting_hat_mongodb + - sorting_hat_mongodb-setup + + zookeeper: + image: confluentinc/cp-zookeeper:7.2.2 + hostname: zookeeper + container_name: zookeeper + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + + broker: + image: confluentinc/cp-kafka:7.2.2 + hostname: broker + container_name: broker + depends_on: + - zookeeper + ports: + - "9092:9092" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181" + KAFKA_LISTENERS: PLAINTEXT://broker:29092,LISTENER_2://localhost:9092 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,LISTENER_2://localhost:9092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,LISTENER_2:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + CONFLUENT_SUPPORT_CUSTOMER_ID: "anonymous" + KAFKA_DELETE_TOPIC_ENABLE: "true" + + schema-registry: + image: confluentinc/cp-schema-registry:7.2.2 + hostname: schema-registry + container_name: schema-registry + depends_on: + - broker + ports: + - "3081:8081" + environment: + SCHEMA_REGISTRY_HOST_NAME: schema-registry + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: "broker:29092" + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: "zookeeper:2181" + SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081 + + rest-proxy: + image: confluentinc/cp-kafka-rest:7.2.2 + depends_on: + - zookeeper + - broker + - schema-registry + ports: + - "3082:8082" + hostname: rest-proxy + container_name: rest-proxy + environment: + KAFKA_REST_HOST_NAME: rest-proxy + KAFKA_REST_BOOTSTRAP_SERVERS: "broker:29092" + KAFKA_REST_LISTENERS: "http://0.0.0.0:8082" + KAFKA_REST_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" + + connect: + build: + context: . + dockerfile: mongodb-kafka-connect.Dockerfile + ports: + - "35000:35000" + - "8083:8083" + hostname: connect + container_name: connect + depends_on: + - zookeeper + - broker + environment: + KAFKA_JMX_PORT: 35000 + KAFKA_JMX_HOSTNAME: localhost + CONNECT_BOOTSTRAP_SERVERS: "broker:29092" + CONNECT_REST_ADVERTISED_HOST_NAME: connect + CONNECT_REST_PORT: 8083 + CONNECT_GROUP_ID: connect-cluster-group + CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs + CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1 + CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000 + CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets + CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1 + CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status + CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1 + CONNECT_ZOOKEEPER_CONNECT: "zookeeper:2181" + CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components" + CONNECT_CONNECTIONS_MAX_IDLE_MS: 180000 + CONNECT_METADATA_MAX_AGE_MS: 180000 + CONNECT_AUTO_CREATE_TOPICS_ENABLE: "true" + CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" volumes: - mongodb_data: + us_vision_mongodb_data: + sorting_hat_mongodb_data: diff --git a/mongodb-kafka-connect.Dockerfile b/mongodb-kafka-connect.Dockerfile new file mode 100644 index 0000000..25af83f --- /dev/null +++ b/mongodb-kafka-connect.Dockerfile @@ -0,0 +1,5 @@ +FROM confluentinc/cp-kafka-connect:7.2.5 + +RUN confluent-hub install --no-prompt --verbose mongodb/kafka-connect-mongodb:latest + +ENV CONNECT_PLUGIN_PATH="/usr/share/java,/usr/share/confluent-hub-components" diff --git a/settings.gradle.kts b/settings.gradle.kts index d64241d..6a251d3 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -6,3 +6,4 @@ include("app-reports") include("app-persistence") include("app-web") include("app-creation") +include("app-async") diff --git a/sorting-hat b/sorting-hat new file mode 160000 index 0000000..95695a1 --- /dev/null +++ b/sorting-hat @@ -0,0 +1 @@ +Subproject commit 95695a1808e477ce67d8257891f4fa9be21f8cce