diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml
index e7e5efa..fd9e28d 100644
--- a/.github/workflows/pipeline.yml
+++ b/.github/workflows/pipeline.yml
@@ -5,15 +5,55 @@ on:
jobs:
ci:
- uses: PedroHenriques/ci_cd_workflow_templates/.github/workflows/ci_dotnet_package.yml@v1
+ uses: PedroHenriques/ci_cd_workflow_templates/.github/workflows/ci_docker.yml@v1
with:
environment: "dev"
deployable_branch_name: 'main'
source_dir_name: 'src'
- deployment_file-or-dir_path: 'build.txt'
+ manifest_dir_name: 'Infrastructure'
custom_service_file_pattern: '*.csproj'
- build_file_pattern: 'build.txt'
- major_version_label_name: 'major'
- minor_version_label_name: 'minor'
- patch_version_label_name: 'patch'
- secrets: inherit
\ No newline at end of file
+ build_file_pattern: 'Dockerfile'
+ deploy_all_services_label_name: 'deploy all services'
+ secrets: inherit
+
+ # cd-dev:
+ # needs: ci
+ # if: ${{ github.event_name == 'pull_request' && github.event.action == 'closed' && github.event.pull_request.merged == true && github.base_ref == 'main' }}
+ # uses: PedroHenriques/ci_cd_workflow_templates/.github/workflows/cd_docker.yml@v1
+ # with:
+ # environment: "dev"
+ # source_dir_name: 'src'
+ # manifest_dir_name: 'Infrastructure'
+ # custom_service_file_pattern: '*.csproj'
+ # build_file_pattern: 'Dockerfile'
+ # img_tag: ${{ needs.ci.outputs.img_tag }}
+ # deploy_all_services_label_name: 'deploy all services'
+ # secrets: inherit
+
+ # cd-qa:
+ # needs: [ci, cd-dev]
+ # if: ${{ github.event_name == 'pull_request' && github.event.action == 'closed' && github.event.pull_request.merged == true && github.base_ref == 'main' }}
+ # uses: PedroHenriques/ci_cd_workflow_templates/.github/workflows/cd_docker.yml@v1
+ # with:
+ # environment: "qua"
+ # source_dir_name: 'src'
+ # manifest_dir_name: 'Infrastructure'
+ # custom_service_file_pattern: '*.csproj'
+ # build_file_pattern: 'Dockerfile'
+ # img_tag: ${{ needs.ci.outputs.img_tag }}
+ # deploy_all_services_label_name: 'deploy all services'
+ # secrets: inherit
+
+ # cd-prd:
+ # needs: [ci, cd-qa]
+ # if: ${{ github.event_name == 'pull_request' && github.event.action == 'closed' && github.event.pull_request.merged == true && github.base_ref == 'main' }}
+ # uses: PedroHenriques/ci_cd_workflow_templates/.github/workflows/cd_docker.yml@v1
+ # with:
+ # environment: "prd"
+ # source_dir_name: 'src'
+ # manifest_dir_name: 'Infrastructure'
+ # custom_service_file_pattern: '*.csproj'
+ # build_file_pattern: 'Dockerfile'
+ # img_tag: ${{ needs.ci.outputs.img_tag }}
+ # deploy_all_services_label_name: 'deploy all services'
+ # secrets: inherit
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..6bf7787
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,11 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
+
+## YYYY-MM-DD
+
+### Added
+
+- Initial version of the application
diff --git a/README.md b/README.md
index 43afc44..1d5eda4 100644
--- a/README.md
+++ b/README.md
@@ -1,26 +1,34 @@
-# .Net Toolkit
-The .Net Toolkit is split into multiple packages:
-- **Base .Net Toolkit**: Intended to be used in non Asp.Net application
-- **Asp.Net Toolkit**: Intended to be used in Asp.Net application
+# Your application name
+Your application brief description.
-## Main functionalities
-- Handles setting up the connections with MongoDb, Redis, Kafka and LaunchDarkly
-- Exposes functionality to perform most operations on this tech stack while abstracting the implementation details of each technology
-- Standardizes the interactions with this tech stack across all the applications that use this package
-- Reduces the cost of evolving the interaction with this tech stack across all the applications
+## Applications wiki
+
+[Link to applications wiki](https://wiki.com/something)
-**Note:** This package does not intend to completely abstract, from the application, the technology being used.
-The application will still need to interact with some data types from the underlying technologies.
+## Main functionalities
+- Store data in the schema you want
+- API to create, update and delete entities and their data
+- Register entities (Ex: countries, holidays, stores, etc.)
+- Manage the data of each registered entity
+- Register notifications for an entity
+ - Every change made to a data point of an entity can trigger notifications to 1 or many destinations
+ - Use this to notify other applications that need to know when data changes
+ - Supported destinations:
+ - Kafka topic
+ - HTTP(S) webhook
# Application Architecture
[more information here](/documentation/architecture.md)
# Technical information
-For detailed information about each package look at:
-| Package | Documentation |
-| ----------- | ----------- |
-| Base .Net Toolkit | [doc](/src/Toolkit/README.md) |
-| Asp.Net Toolkit | [doc](/src/Toolkit.Asp/README.md) |
+## Stack
+This application uses the following technologies:
+- C# .Net
+- MongoDb
+- Redis
+
+The application also interacts with the following technologies:
+- Kafka
# Developer information
## Requisites
@@ -57,8 +65,7 @@ The available services are declared in the local environment Docker compose proj
This will run a Docker compose project and start several networked Docker containers will all the services and necessary tools to use the application.
The following services will be running in the containers:
-- 1 MongoDb instance
-- 1 Redis single node instances
+- List your services here
- Confluent community edition Kafka Broker
- Confluent Schema Registry
- A GUI for MongoDb
@@ -102,20 +109,19 @@ Accept the T&C and submit to enter.

Add the following databases:
-`redis://default@redis:6379`
+`redis://default@api_redis:6379`
`Kafka GUI`: [http://localhost:9002](http://localhost:9002)
**NOTES:**
-Add a topic with the name `myTestTopicJson` with, at least, 1 partition.
-Register the `myTestTopicJson-key` and `myTestTopicJson-value` schemas, using the contents of the files `setup/local/tester_kafka_json_schema_key.json` and `setup/local/tester_kafka_json_schema_value.json`, respectively.
-Add a topic with the name `myTestTopicAvro` with, at least, 1 partition.
-Register the `myTestTopicAvro-key` and `myTestTopicAvro-value` schemas, using the contents of the files `setup/local/tester_kafka_avro_schema_key.json` and `setup/local/tester_kafka_avro_schema_value.json`, respectively.
+Add a topic with the name `myTestTopic` with, at least, 1 partition.
+Add a schema with the subject `myTestTopic-value`, the content of the file `setup/local/kafka_schema_json.json` and the type `JSON`.
`Kibana`: [http://localhost:9003](http://localhost:9003)
-`Test API`: [http://localhost:10000](http://localhost:10000)
+`API`: [http://localhost:10000](http://localhost:10000)
+Use the Postman collection at `setup/local/XPTO.postman_collection` to interact with the application.
-`Test API Swagger UI`: [http://localhost:10000/swagger](http://localhost:10000/swagger)
+`API Swagger UI`: [http://localhost:10000/swagger](http://localhost:10000/swagger)
### Stop the local environment
From the root of the project run the command
@@ -187,4 +193,4 @@ If the update flag is not provided, the script will print the report with all th
[more information here](/documentation/security.md)
## CI/CD
-[more information here](/documentation/cicd.md)
\ No newline at end of file
+[more information here](/documentation/cicd.md)
diff --git a/cli/external_static_analysis.sh b/cli/external_static_analysis.sh
index e10e2ca..ad36b64 100644
--- a/cli/external_static_analysis.sh
+++ b/cli/external_static_analysis.sh
@@ -36,7 +36,7 @@ else
fi
TEST_COVERAGE_PATH="./test/**/${TEST_COVERAGE_FILE_NAME}";
-CMD="dotnet tool restore && dotnet sonarscanner begin /k:"${EXTERNAL_STATIC_ANALYSIS_PROJ_KEY}" /o:"${EXTERNAL_STATIC_ANALYSIS_ORG}" /d:sonar.token="${EXTERNAL_STATIC_ANALYSIS_TOKEN}" /d:sonar.host.url="${EXTERNAL_STATIC_ANALYSIS_HOST}" /d:sonar.cs.opencover.reportsPaths="${TEST_COVERAGE_PATH}" /d:sonar.projectBaseDir=/app /d:sonar.exclusions=**/bin/**,**/obj/**,setup/**,app/setup/** /d:sonar.coverage.exclusions=setup/**,app/setup/** ${EXTRA_OPTS} /d:sonar.qualitygate.wait=${SONAR_QG_WAIT} /d:sonar.qualitygate.timeout=${SONAR_QG_TIMEOUT_SEC} && dotnet build -p:UseLocalToolkit=true && chmod +x ./cli/test.sh && ./cli/test.sh --coverage && dotnet sonarscanner end /d:sonar.token="${EXTERNAL_STATIC_ANALYSIS_TOKEN}"";
+CMD="dotnet tool restore && dotnet sonarscanner begin /k:"${EXTERNAL_STATIC_ANALYSIS_PROJ_KEY}" /o:"${EXTERNAL_STATIC_ANALYSIS_ORG}" /d:sonar.token="${EXTERNAL_STATIC_ANALYSIS_TOKEN}" /d:sonar.host.url="${EXTERNAL_STATIC_ANALYSIS_HOST}" /d:sonar.cs.opencover.reportsPaths="${TEST_COVERAGE_PATH}" /d:sonar.projectBaseDir=/app /d:sonar.exclusions=**/bin/**,**/obj/**,setup/**,app/setup/** /d:sonar.coverage.exclusions=setup/**,app/setup/** ${EXTRA_OPTS} /d:sonar.qualitygate.wait=${SONAR_QG_WAIT} /d:sonar.qualitygate.timeout=${SONAR_QG_TIMEOUT_SEC} && dotnet build && chmod +x ./cli/test.sh && ./cli/test.sh --coverage && dotnet sonarscanner end /d:sonar.token="${EXTERNAL_STATIC_ANALYSIS_TOKEN}"";
if [ $USE_DOCKER -eq 1 ]; then
INTERACTIVE_FLAGS="-it";
diff --git a/cli/start_elk.sh b/cli/start_elk.sh
index 80dd16b..9f4ee84 100644
--- a/cli/start_elk.sh
+++ b/cli/start_elk.sh
@@ -30,4 +30,4 @@ export COMPOSE_PROFILES;
docker network create myapp_shared || true;
-docker compose -f setup/local/docker-compose.elk.yml -p myapp_elk up --no-build "$@";
\ No newline at end of file
+docker compose -f setup/local/docker-compose.elk.yml -p myapp_elk up --no-build "$@";
diff --git a/cli/stop.sh b/cli/stop.sh
index 9ab262d..110102c 100644
--- a/cli/stop.sh
+++ b/cli/stop.sh
@@ -23,4 +23,4 @@ export COMPOSE_PROFILES;
docker compose -f setup/local/docker-compose.yml -p myapp down;
docker compose -f setup/local/docker-compose.elk.yml -p myapp_elk down;
-docker system prune -f --volumes;
\ No newline at end of file
+docker system prune -f --volumes;
diff --git a/setup/local/docker-compose.yml b/setup/local/docker-compose.yml
index 047b594..7bbbae3 100644
--- a/setup/local/docker-compose.yml
+++ b/setup/local/docker-compose.yml
@@ -1,45 +1,36 @@
services:
- tester:
+ api:
+ image: ${PROJECT_NAME:-myapp}_api:${IMAGE_TAG:-latest}
build:
context: ../../
- dockerfile: ./setup/local/Tester/Dockerfile
- container_name: "tester"
+ dockerfile: ./src/Api/Dockerfile
+ container_name: "api"
restart: on-failure
depends_on:
- mongodb:
+ api_db:
condition: service_healthy
- redis:
+ api_redis:
condition: service_healthy
environment:
- - DOTNET_ENVIRONMENT=Development
- DEPLOYMENT_ENV=local
+ - DOTNET_ENVIRONMENT=Development
- ASPNETCORE_HTTP_PORTS=10000
- - MONGO_CON_STR=mongodb://${MONGODB_ADMIN_USER:-admin}:${MONGODB_ADMIN_PW:-pw}@mongodb:27017/admin?authMechanism=SCRAM-SHA-256
- - REDIS_CON_STR=redis:6379
- - REDIS_PW=password
- - KAFKA_CON_STR=broker:29092
+ - MONGO_CON_STR=mongodb://admin:pw@api_db:27017/admin?authMechanism=SCRAM-SHA-256&replicaSet=rs0
+ - REDIS_CON_STR=api_redis:6379
- KAFKA_SCHEMA_REGISTRY_URL=http://schema-registry:8081
- - LD_ENV_SDK_KEY=${LD_ENV_SDK_KEY}
- - LD_CONTEXT_API_KEY=${LD_CONTEXT_API_KEY}
- - LD_CONTEXT_NAME=CTT .Net Toolkit - DEV
- - SERVICE_NAME=tester
- - SERVICE_VERSION=asf786
- - PROJECT_NAME=toolkit_tester
- - LOG_LEVEL=information
- - LOG_DESTINATION_URI=http://otel_collector:4317
- - EXPORTER_MODE=sync
- - OTEL_RESOURCE_ATTRIBUTES=team=tester,infra=aks
+ - KAFKA_CON_STR=broker:29092
ports:
- ${API_PORT:-10000}:10000
networks:
- - redis
- - mongodb
+ - api
+ - apiDb
+ - apiRedis
- kafka
- myapp_shared
- redis:
+ api_redis:
image: redis:7-alpine
- container_name: "redis"
+ container_name: "api_redis"
restart: on-failure
command:
- /bin/sh
@@ -51,7 +42,7 @@ services:
timeout: 3s
retries: 5
networks:
- - redis
+ - apiRedis
- myapp_shared
redis_gui:
@@ -60,14 +51,14 @@ services:
profiles: ["only_if_not_cicd"]
restart: on-failure
depends_on:
- redis:
+ api_redis:
condition: service_healthy
ports:
- ${REDIS_GUI_PORT:-9001}:5540
networks:
- - redis
+ - apiRedis
- mongodb:
+ api_db:
image: mongo:8-noble
entrypoint:
- bash
@@ -86,14 +77,14 @@ services:
timeout: 5s
retries: 3
start_period: 5s
- container_name: "mongodb"
+ container_name: "api_db"
restart: on-failure
environment:
- - MONGO_INITDB_ROOT_USERNAME=${MONGODB_ADMIN_USER:-admin}
- - MONGO_INITDB_ROOT_PASSWORD=${MONGODB_ADMIN_PW:-pw}
- - MONGO_INITDB_DATABASE=${MONGO_INITDB_DATABASE:-RefData}
+ - MONGO_INITDB_ROOT_USERNAME=admin
+ - MONGO_INITDB_ROOT_PASSWORD=pw
+ - MONGO_INITDB_DATABASE=MyDb
networks:
- - mongodb
+ - apiDb
- myapp_shared
# Used to start the replica set in the MongoDb instance (will run once and exit)
@@ -102,39 +93,39 @@ services:
container_name: "db_init"
restart: "no"
depends_on:
- mongodb:
+ api_db:
condition: service_healthy
command: >
- mongosh --username ${MONGODB_ADMIN_USER:-admin} --password ${MONGODB_ADMIN_PW:-pw} --host mongodb:27017 --eval ' rs.initiate( {
+ mongosh --username ${API_DB_ADMIN_USER:-admin} --password ${API_DB_ADMIN_PW:-pw} --host api_db:27017 --eval ' rs.initiate( {
_id : "rs0",
members: [
- { _id: 0, host: "mongodb:27017" }
+ { _id: 0, host: "api_db:27017" }
]
}) '
networks:
- - mongodb
+ - apiDb
- mongodb_gui:
+ api_db_gui:
image: mongo-express:latest
- container_name: "mongodb_gui"
+ container_name: "api_db_gui"
profiles: ["only_if_not_cicd"]
environment:
- - ME_CONFIG_MONGODB_SERVER=mongodb
+ - ME_CONFIG_MONGODB_SERVER=api_db
- ME_CONFIG_MONGODB_PORT=27017
- ME_CONFIG_MONGODB_ENABLE_ADMIN=true
- ME_CONFIG_MONGODB_AUTH_DATABASE=admin
- - ME_CONFIG_MONGODB_AUTH_USERNAME=${MONGODB_ADMIN_USER:-admin}
- - ME_CONFIG_MONGODB_AUTH_PASSWORD=${MONGODB_ADMIN_PW:-pw}
+ - ME_CONFIG_MONGODB_AUTH_USERNAME=admin
+ - ME_CONFIG_MONGODB_AUTH_PASSWORD=pw
- ME_CONFIG_BASICAUTH_USERNAME=appUser
- ME_CONFIG_BASICAUTH_PASSWORD=appPw
- - ME_CONFIG_MONGODB_URL=mongodb://${MONGODB_ADMIN_USER:-admin}:${MONGODB_ADMIN_PW:-pw}@mongodb:27017/?replicaSet=rs0
+ - ME_CONFIG_MONGODB_URL=mongodb://admin:pw@api_db:27017/?replicaSet=rs0
depends_on:
- mongodb:
+ api_db:
condition: service_healthy
ports:
- ${MONGO_GUI_PORT:-9000}:8081
networks:
- - mongodb
+ - apiDb
broker:
image: confluentinc/cp-kafka:latest
@@ -212,8 +203,9 @@ services:
- kafka
networks:
- redis:
- mongodb:
+ api:
+ apiDb:
+ apiRedis:
kafka:
myapp_shared:
- external: true
\ No newline at end of file
+ external: true