Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
a554996
updating the rtr charts to disable the feature flags
alog-enquizit Sep 15, 2025
bb835bc
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
169533c
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
488ab8e
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
130b0bf
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
4a0916e
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
887e493
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
9057f60
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
66c3b73
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
f791d03
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
265b672
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
9916007
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
af507e7
updating the rtr charts to disable the feature flags
alog-enquizit Sep 16, 2025
f689e52
updating the rtr charts to disable the feature flags
alog-enquizit Sep 17, 2025
13d8bde
updating the rtr charts to disable the feature flags
alog-enquizit Sep 17, 2025
db32273
updating the rtr charts
alog-enquizit Sep 17, 2025
9fa7d80
updating the rtr charts
alog-enquizit Sep 17, 2025
24a330d
updating the rtr charts
alog-enquizit Sep 17, 2025
d85d30a
Merge branch 'main' into rtr-perf-sep152025
alog-enquizit Sep 18, 2025
465c132
updating the rtr charts
alog-enquizit Sep 18, 2025
f519e6c
Merge branch 'main' into rtr-perf-sep152025
alog-enquizit Sep 18, 2025
ce5890f
updating the rtr charts
alog-enquizit Sep 18, 2025
0abb9a8
updating the rtr charts
alog-enquizit Sep 18, 2025
4a27fa3
updating the rtr charts
alog-enquizit Sep 18, 2025
d4304de
updating the rtr charts
alog-enquizit Sep 18, 2025
5bff0b9
updating the rtr charts
alog-enquizit Sep 19, 2025
5f32f19
updating the rtr charts
alog-enquizit Sep 19, 2025
5e6463e
updating the rtr charts
alog-enquizit Sep 19, 2025
75c3851
updating the rtr charts
alog-enquizit Sep 19, 2025
c3d4b9f
updating the rtr charts
alog-enquizit Sep 19, 2025
c45940d
updating the rtr charts
alog-enquizit Sep 19, 2025
d87574a
updating the rtr charts
alog-enquizit Sep 19, 2025
a452fe4
updating the rtr charts
alog-enquizit Sep 19, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions charts/debezium/templates/deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,9 @@ spec:
resources:
{{- toYaml .Values.connect.resources | nindent 12 }}

{{- if not .Values.connect.properties.probe }}
livenessProbe:
initialDelaySeconds: 60 # Wait 60s after container starts before first check
initialDelaySeconds: 600 # Wait 180s after container starts before first check
periodSeconds: 20 # Check every 20s
timeoutSeconds: 5 # Timeout for each check
failureThreshold: 3 # Mark container unhealthy after 3 consecutive failures
Expand Down Expand Up @@ -86,10 +87,12 @@ spec:

# If all connectors and tasks are running, container is healthy
exit 0
{{- end }}

# Readiness probe: Checks if container is ready to accept traffic
{{- if not .Values.connect.properties.probe }}
readinessProbe:
initialDelaySeconds: 60
initialDelaySeconds: 600
periodSeconds: 20
timeoutSeconds: 5
failureThreshold: 3
Expand Down Expand Up @@ -134,6 +137,7 @@ spec:

# If all connectors and tasks are running, container is ready
exit 0
{{- end }}

volumeMounts:
- name: config
Expand Down
55 changes: 24 additions & 31 deletions charts/debezium/values-dts1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,41 +24,40 @@ connect:

resources:
limits:
cpu: 1000m
memory: 2Gi
cpu: 2000m
memory: 8Gi
requests:
cpu: 500m
memory: 1Gi
cpu: 1500m
memory: 7Gi

properties:
group_id: "debezium-odse-srte-connector-v081525"
group_id: "debezium-odse-srte-connector-v091725-1"
topics_basename: "debezium-odse-srte-connector"
default_replication_factor: 2
default_partitions: 10
default_cleanup: "compact"
sql_server_agent_override: false
sql_server_agent_status: ""
bootstrap_server: "b-1.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092,
b-2.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092"
bootstrap_server: "b-2.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092,b-1.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092"
probe: false

connector_enable:
nbs_odse: "enabled"
nbs_srte: "enabled"
nbs_odse_meta: "enabled"

sqlserverconnector_odse: {
"name": "debezium-odse-connector-v081525",
"name": "debezium-odse-connector-v091725-1",
"config": {
"connector.class": "io.debezium.connector.sqlserver.SqlServerConnector",
"database.hostname": "nbs-db.private-dts1.nbspreview.com",
"database.hostname": "cdc-nbs-alabama-rds-mssql.czya31goozkz.us-east-1.rds.amazonaws.com",
"database.port": "1433",
"database.user": "",
"database.password": "",
"database.dbname": "nbs_odse",
"database.names": "nbs_odse",
"database.server.name": "odse",
"database.history.kafka.bootstrap.servers": "b-1.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092,
b-2.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092",
"database.history.kafka.bootstrap.servers": "b-2.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092,b-1.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092",
"database.history.kafka.topic": "dbhistory.database_server_name.database_name",
# Uncomment following to manually bypass the sqlserver agent status query results
#"database.sqlserver.agent.status.query": "select dbo.IsSqlAgentRunning()",
Expand All @@ -70,8 +69,7 @@ connect:
"producer.message.max.bytes": "10000000", #10MB
"snapshot.mode": "no_data",
"schema.history.internal.kafka.topic": "odse-schema-history",
"schema.history.internal.kafka.bootstrap.servers": "b-1.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092,
b-2.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092",
"schema.history.internal.kafka.bootstrap.servers": "b-2.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092,b-1.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092",
"table.include.list": "dbo.Person, dbo.Organization, dbo.Observation, dbo.Public_health_case, dbo.Treatment,
dbo.state_defined_field_data, dbo.Notification, dbo.Interview,
dbo.Place, dbo.CT_contact, dbo.Auth_user, dbo.Intervention, dbo.Act_relationship",
Expand All @@ -82,7 +80,7 @@ connect:
"topic.creation.default.cleanup.policy": "compact",
"time.precision.mode": "connect",
"transforms": "dropPrefix, convertTimezone",
"transforms.dropPrefix.regex": "cdc\\.NBS_ODSE\\.dbo\\.(.+)",
"transforms.dropPrefix.regex": "cdc\\.nbs_odse\\.dbo\\.(.+)",
"transforms.dropPrefix.type": "org.apache.kafka.connect.transforms.RegexRouter",
"transforms.dropPrefix.replacement": "nbs_$1",
"transforms.convertTimezone.type": "io.debezium.transforms.TimezoneConverter",
Expand All @@ -93,18 +91,17 @@ connect:
}

sqlserverconnector_odse_meta: {
"name": "debezium-odse-meta-tables-connector-v081525",
"name": "debezium-odse-meta-tables-connector-v091725-1",
"config": {
"connector.class": "io.debezium.connector.sqlserver.SqlServerConnector",
"database.hostname": "nbs-db.private-dts1.nbspreview.com",
"database.hostname": "cdc-nbs-alabama-rds-mssql.czya31goozkz.us-east-1.rds.amazonaws.com",
"database.port": "1433",
"database.user": "",
"database.password": "",
"database.dbname": "nbs_odse",
"database.names": "nbs_odse",
"database.server.name": "odse-meta",
"database.history.kafka.bootstrap.servers": "b-1.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092,
b-2.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092",
"database.history.kafka.bootstrap.servers": "b-2.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092,b-1.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092",
"database.history.kafka.topic": "dbhistory.database_server_name.database_name",
# Uncomment following to manually bypass the sqlserver agent status query results
#"database.sqlserver.agent.status.query": "select dbo.IsSqlAgentRunning()",
Expand All @@ -116,8 +113,7 @@ connect:
"producer.message.max.bytes": "10000000", #10MB
"snapshot.mode": "no_data",
"schema.history.internal.kafka.topic": "odse-schema-history",
"schema.history.internal.kafka.bootstrap.servers": "b-1.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092,
b-2.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092",
"schema.history.internal.kafka.bootstrap.servers": "b-2.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092,b-1.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092",
"table.include.list": "dbo.Page_cond_mapping, dbo.NBS_page, dbo.NBS_ui_metadata, dbo.NBS_rdb_metadata,
dbo.state_defined_field_metadata, dbo.NBS_configuration, dbo.LOOKUP_QUESTION",
"tasks.max": "1",
Expand All @@ -128,7 +124,7 @@ connect:
"time.precision.mode": "connect",
"transforms": "dropPrefix, convertTimezone, unwrap, convertTimestampsConfig_add_time,
convertTimestampsConfig_last_chg_time, convertTimestampsConfig_record_status_time",
"transforms.dropPrefix.regex": "cdc\\.NBS_ODSE\\.dbo\\.(.+)",
"transforms.dropPrefix.regex": "cdc\\.nbs_odse\\.dbo\\.(.+)",
"transforms.dropPrefix.type": "org.apache.kafka.connect.transforms.RegexRouter",
"transforms.dropPrefix.replacement": "nrt_odse_$1",
"transforms.convertTimezone.type": "io.debezium.transforms.TimezoneConverter",
Expand All @@ -152,18 +148,17 @@ connect:
}

sqlserverconnector_srte: {
"name": "debezium-srte-connector-v081525",
"name": "debezium-srte-connector-v091725-1",
"config": {
"connector.class": "io.debezium.connector.sqlserver.SqlServerConnector",
"database.hostname": "nbs-db.private-dts1.nbspreview.com",
"database.hostname": "cdc-nbs-alabama-rds-mssql.czya31goozkz.us-east-1.rds.amazonaws.com",
"database.port": "1433",
"database.user": "",
"database.password": "",
"database.dbname": "nbs_srte",
"database.names": "nbs_srte",
"database.server.name": "srte",
"database.history.kafka.bootstrap.servers": "b-1.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092,
b-2.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092",
"database.history.kafka.bootstrap.servers": "b-2.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092,b-1.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092",
"database.history.kafka.topic": "dbhistory.database_server_name.database_name",
# Uncomment following to manually bypass the sqlserver agent status query results
#"database.sqlserver.agent.status.query": "select dbo.IsSqlAgentRunning()",
Expand All @@ -175,8 +170,7 @@ connect:
"producer.message.max.bytes": "10000000", #10MB
"snapshot.mode": "no_data",
"schema.history.internal.kafka.topic": "srte-schema-history",
"schema.history.internal.kafka.bootstrap.servers": "b-1.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092,
b-2.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092",
"schema.history.internal.kafka.bootstrap.servers": "b-2.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092,b-1.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092",
"table.include.list": "dbo.Condition_code,dbo.Program_area_code,dbo.Language_code,dbo.State_code,dbo.Unit_code,
dbo.Cntycity_code_value,dbo.Lab_result,dbo.Country_code,dbo.Labtest_loinc,dbo.ELR_XREF,
dbo.Loinc_condition,dbo.Loinc_snomed_condition,dbo.Lab_test,dbo.Zip_code_value,
Expand Down Expand Up @@ -208,7 +202,7 @@ connect:
"transforms.dropPrefix.replacement": "nrt_srte_$1",

### Snomed condition specific transforms
"transforms.dropPrefixConfig.regex": "cdc\\.NBS_SRTE\\.dbo\\.Snomed_condition",
"transforms.dropPrefixConfig.regex": "cdc\\.nbs_srte\\.dbo\\.Snomed_condition",
"transforms.dropPrefixConfig.type": "org.apache.kafka.connect.transforms.RegexRouter",
"transforms.dropPrefixConfig.replacement": "nrt_srte_Snomed_condition",
"transforms.unwrapConfig.type": "io.debezium.transforms.ExtractNewRecordState",
Expand Down Expand Up @@ -277,14 +271,13 @@ connect:

env:
- name: BOOTSTRAP_SERVERS
value: "b-1.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092,
b-2.nrtreportingdebezium.89ln12.c11.kafka.us-east-1.amazonaws.com:9092"
value: "b-2.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092,b-1.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092"
- name: LOG_LEVEL
value: "INFO"
- name: KAFKA_LOG4J_OPTS
value: "-Dlog4j.configuration=file:/kafka/config/log4j.properties"
- name: NAME
value: "debezium-odse-srte-connector-v081525"
value: "debezium-odse-srte-connector-v091725-1"
- name: TZ
value: "UTC"

Expand Down
24 changes: 12 additions & 12 deletions charts/kafka-connect-sink/values-dts1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ servicePort: 8083
## ref: https://docs.confluent.io/current/connect/userguide.html#configuring-workers
configurationOverrides:
"plugin.path": "/usr/share/confluent-hub-components,/usr/share/java"
"topics_basename": "kafka-sink-connector-070825"
"topics_basename": "kafka-sink-connector-v091725-1"
"key.converter": "org.apache.kafka.connect.json.JsonConverter"
"value.converter": "org.apache.kafka.connect.json.JsonConverter"
"key.converter.schemas.enable": "false"
Expand All @@ -37,13 +37,13 @@ configurationOverrides:

sqlServerConnectorEnabled: true
sqlServerConnector: {
"name": "Kafka-Connect-SqlServer-Sink-071025",
"name": "Kafka-Connect-SqlServer-Sink-v091725-1",
"config": {
"connector.class": "io.confluent.connect.jdbc.JdbcSinkConnector",
"tasks.max": "1",
"tasks.max": "2",
"offset.flush.interval.ms": "60000",
"batch.size": "200",
"connection.url": "jdbc:sqlserver://nbs-db.EXAMPLE_FIXME.nbspreview.com:1433;databaseName=rdb_modern;encrypt=true;trustServerCertificate=true;",
"batch.size": "5000",
"connection.url": "jdbc:sqlserver://cdc-nbs-alabama-rds-mssql.czya31goozkz.us-east-1.rds.amazonaws.com:1433;databaseName=rdb_modern;encrypt=true;trustServerCertificate=true;",
"connection.user": "",
"connection.password": "",
"connection.pool.min_size": "5",
Expand Down Expand Up @@ -89,12 +89,12 @@ JDBC_CONNECTOR_CONFIG: /etc/jdbcConnector.sh
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
resources:
# limits:
# memory: "2Gi"
# cpu: "1000m"
# requests:
# memory: "1Gi"
# cpu: "500m"
limits:
memory: "8Gi"
cpu: "2000m"
requests:
memory: "7Gi"
cpu: "1500m"

## Custom pod annotations
podAnnotations: { }
Expand Down Expand Up @@ -137,7 +137,7 @@ prometheus:
## You can list load balanced service endpoint, or list of all brokers (which is hard in K8s). e.g.:
## bootstrapServers: "PLAINTEXT://dozing-prawn-kafka-headless:9092"
kafka:
bootstrapServers: "b-1.nrtreporting.rx5iwx.c5.kafka.us-east-1.amazonaws.com:9092,b-2.nrtreporting.rx5iwx.c5.kafka.us-east-1.amazonaws.com:9092"
bootstrapServers: "b-2.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092,b-1.nrtclusterv2.6bwemz.c13.kafka.us-east-1.amazonaws.com:9092"
default_replication_factor: 2
default_partitions: 10
default_cleanup: "compact"
Expand Down
4 changes: 3 additions & 1 deletion charts/rtr/templates/deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,9 @@ spec:
- name: FF_SERVICE_DISABLE
value: {{- if and (hasKey $service "featureFlag") (hasKey $service.featureFlag "serviceDisable") }} {{ $service.featureFlag.serviceDisable | quote }} {{- else }} "false" {{- end }}
- name: FF_THREAD_POOL_SIZE
value: {{- if and (hasKey $service "featureFlag") (hasKey $service.featureFlag "threadPoolSize") }} {{ $service.featureFlag.threadPoolSize }} {{- else }} 1 {{- end }}
value: {{- if and (hasKey $service "featureFlag") (hasKey $service.featureFlag "threadPoolSize") }} {{ $service.featureFlag.threadPoolSize }} {{- else }} "1" {{- end }}
- name: KAFKA_CONSUMER_MAX_POLL_RECS
value: {{- if and (hasKey $service "kafka") (hasKey $service.kafka "maxPollRecs") }} {{ $service.kafka.maxPollRecs | quote }} {{- else }} "200" {{- end }}

{{- if $service.nodeSelector }}
nodeSelector:
Expand Down
Loading