Skip to content

P1#6

Open
pinkusrg wants to merge 3 commits intomasterfrom
p1
Open

P1#6
pinkusrg wants to merge 3 commits intomasterfrom
p1

Conversation

@pinkusrg
Copy link
Owner

@pinkusrg pinkusrg commented Jul 9, 2020

aa

@pinkusrg
Copy link
Owner Author

pinkusrg commented Jul 9, 2020

-{
+        {
     "submit-args": "
-        --properties-file=dcos/lvs/properties/spark-default.conf 
-        --name push_to_vertica_lvs_na1 
-        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 
-        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos 
-        --conf spark.network.timeout=600000 
-        --conf spark.executor.heartbeatInterval=60000 
-        --conf spark.executor.memory=24G
-        --conf spark.driver.memory=6G 
-        --conf spark.mesos.driver.failoverTimeout=60 
-        --driver-cores=4 
-        --supervise 
+        --properties-file=dcos/lvs/properties/spark-default.conf
+        --name push_to_vertica_lvs_reprocess_reject_data_na7
+        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6
+        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos
+        --conf spark.network.timeout=600000
+        --conf spark.executor.heartbeatInterval=60000
+        --conf spark.executor.memory=20G
+        --conf spark.driver.memory=6G
+        --conf spark.mesos.driver.failoverTimeout=60
+        --driver-cores=4
+        --supervise
         --total-executor-cores=128
         --conf spark.executor.cores=4
         --class=infosight.nimble.spark.PushToVertica https://$USER:$API_KEY@hpeartifacts.jfrog.io/hpeartifacts/sbt-symphony/com/hpe/infosight-nimble-spark_2.11/1.1.134/infosight-nimble-spark-assembly-1.1.134.jar",
     "env": {
-        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na1_20200610_000",
-        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na1",
+        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na7_20200610_000",
+        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na7",
         "BATCH_INTERVAL": "180",
-        "DB_HOST": "na1-vertica-vip.lvs.nimblestorage.com", 
-        "DIFF_DB_TABLE": "array_ds_vol_diff", 
-        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", 
-        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", 
-        "HDFS_URL": "hdfs://hdfs/user/nimble", 
-        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", 
+        "DB_HOST": "na7-vertica-vip.lvs.nimblestorage.com",
+        "DIFF_DB_TABLE": "array_ds_vol_diff",
+        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model",
+        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model",
+        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",
+        "CACHE_DB_TABLE": "array_ds_sys_diff",
+        "HDFS_URL": "hdfs://hdfs/user/nimble",
+        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble",
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",
         "PARALLELISM": "128",
         "MAXRATE_PER_PARTION": "22",
-        "GROUP_ID": "spark_kafka_group_lvs_na1_20200129_000",
+        "GROUP_ID": "spark_kafka_group_lvs_na7_20200129_000",
         "LOG_LEVEL": "INFO",
         "DIFF_DB_SCHEMA": "stream0",
         "ARRAY_MODEL_DB_SCHEMA": "stream0",
         "VOLUME_MODEL_DB_SCHEMA": "stream0",
-        "POD_ID": "na1",
+        "CPU_DB_SCHEMA": "stream0",
+        "CACHE_DB_SCHEMA": "stream0",
+        "POD_ID": "na7",
         "DB_NAME": "dsci01",
-        "DB_USER": "load_user2",
+        "DB_USER": "load_user1",
         "FROM_LATEST": "false",
         "FROM_BEGINNING": "true",
-        "JOBNAME": "push_to_vertica-lvs_na1", 
-        "DB_PASSWORD": "islu2",
+        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na7",
+        "DB_PASSWORD": "islu1",
         "STREAMING_BACKPRESSURE_ENABLED": "false",
-        "APP_MODE": "real-time"
+        "APP_MODE": "real-time",
+        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na7",
+        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na7",
+        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na7",
+        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na7",
+        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na7",
+        "REJECT_DATA_SCHEMA": "symcon_na7_reject_data",
+        "REJECT_DATA_TABLE": "reject_data",
+        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",
+        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na7_20200610_000"
     }
 }


-{
+        {
     "submit-args": "
-        --properties-file=dcos/lvs/properties/spark-default.conf 
-        --name push_to_vertica_lvs_na1 
-        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 
-        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos 
-        --conf spark.network.timeout=600000 
-        --conf spark.executor.heartbeatInterval=60000 
-        --conf spark.executor.memory=24G
-        --conf spark.driver.memory=6G 
-        --conf spark.mesos.driver.failoverTimeout=60 
-        --driver-cores=4 
-        --supervise 
+        --properties-file=dcos/lvs/properties/spark-default.conf
+        --name push_to_vertica_lvs_reprocess_reject_data_na7
+        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6
+        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos
+        --conf spark.network.timeout=600000
+        --conf spark.executor.heartbeatInterval=60000
+        --conf spark.executor.memory=20G
+        --conf spark.driver.memory=6G
+        --conf spark.mesos.driver.failoverTimeout=60
+        --driver-cores=4
+        --supervise
         --total-executor-cores=128
         --conf spark.executor.cores=4
         --class=infosight.nimble.spark.PushToVertica https://$USER:$API_KEY@hpeartifacts.jfrog.io/hpeartifacts/sbt-symphony/com/hpe/infosight-nimble-spark_2.11/1.1.134/infosight-nimble-spark-assembly-1.1.134.jar",
     "env": {
-        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na1_20200610_000",
-        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na1",
+        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na7_20200610_000",
+        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na7",
         "BATCH_INTERVAL": "180",
-        "DB_HOST": "na1-vertica-vip.lvs.nimblestorage.com", 
-        "DIFF_DB_TABLE": "array_ds_vol_diff", 
-        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", 
-        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", 
-        "HDFS_URL": "hdfs://hdfs/user/nimble", 
-        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", 
+        "DB_HOST": "na7-vertica-vip.lvs.nimblestorage.com",
+        "DIFF_DB_TABLE": "array_ds_vol_diff",
+        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model",
+        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model",
+        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",
+        "CACHE_DB_TABLE": "array_ds_sys_diff",
+        "HDFS_URL": "hdfs://hdfs/user/nimble",
+        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble",
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",
         "PARALLELISM": "128",
         "MAXRATE_PER_PARTION": "22",
-        "GROUP_ID": "spark_kafka_group_lvs_na1_20200129_000",
+        "GROUP_ID": "spark_kafka_group_lvs_na7_20200129_000",
         "LOG_LEVEL": "INFO",
         "DIFF_DB_SCHEMA": "stream0",
         "ARRAY_MODEL_DB_SCHEMA": "stream0",
         "VOLUME_MODEL_DB_SCHEMA": "stream0",
-        "POD_ID": "na1",
+        "CPU_DB_SCHEMA": "stream0",
+        "CACHE_DB_SCHEMA": "stream0",
+        "POD_ID": "na7",
         "DB_NAME": "dsci01",
-        "DB_USER": "load_user2",
+        "DB_USER": "load_user1",
         "FROM_LATEST": "false",
         "FROM_BEGINNING": "true",
-        "JOBNAME": "push_to_vertica-lvs_na1", 
-        "DB_PASSWORD": "islu2",
+        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na7",
+        "DB_PASSWORD": "islu1",
         "STREAMING_BACKPRESSURE_ENABLED": "false",
-        "APP_MODE": "real-time"
+        "APP_MODE": "real-time",
+        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na7",
+        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na7",
+        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na7",
+        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na7",
+        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na7",
+        "REJECT_DATA_SCHEMA": "symcon_na7_reject_data",
+        "REJECT_DATA_TABLE": "reject_data",
+        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",
+        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na7_20200610_000"
     }
 }


-{
+        {
     "submit-args": "
-        --properties-file=dcos/lvs/properties/spark-default.conf 
-        --name push_to_vertica_lvs_na1 
-        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 
-        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos 
-        --conf spark.network.timeout=600000 
-        --conf spark.executor.heartbeatInterval=60000 
-        --conf spark.executor.memory=24G
-        --conf spark.driver.memory=6G 
-        --conf spark.mesos.driver.failoverTimeout=60 
-        --driver-cores=4 
-        --supervise 
+        --properties-file=dcos/lvs/properties/spark-default.conf
+        --name push_to_vertica_lvs_reprocess_reject_data_na7
+        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6
+        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos
+        --conf spark.network.timeout=600000
+        --conf spark.executor.heartbeatInterval=60000
+        --conf spark.executor.memory=20G
+        --conf spark.driver.memory=6G
+        --conf spark.mesos.driver.failoverTimeout=60
+        --driver-cores=4
+        --supervise
         --total-executor-cores=128
         --conf spark.executor.cores=4
         --class=infosight.nimble.spark.PushToVertica https://$USER:$API_KEY@hpeartifacts.jfrog.io/hpeartifacts/sbt-symphony/com/hpe/infosight-nimble-spark_2.11/1.1.134/infosight-nimble-spark-assembly-1.1.134.jar",
     "env": {
-        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na1_20200610_000",
-        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na1",
+        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na7_20200610_000",
+        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na7",
         "BATCH_INTERVAL": "180",
-        "DB_HOST": "na1-vertica-vip.lvs.nimblestorage.com", 
-        "DIFF_DB_TABLE": "array_ds_vol_diff", 
-        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", 
-        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", 
-        "HDFS_URL": "hdfs://hdfs/user/nimble", 
-        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", 
+        "DB_HOST": "na7-vertica-vip.lvs.nimblestorage.com",
+        "DIFF_DB_TABLE": "array_ds_vol_diff",
+        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model",
+        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model",
+        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",
+        "CACHE_DB_TABLE": "array_ds_sys_diff",
+        "HDFS_URL": "hdfs://hdfs/user/nimble",
+        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble",
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",
         "PARALLELISM": "128",
         "MAXRATE_PER_PARTION": "22",
-        "GROUP_ID": "spark_kafka_group_lvs_na1_20200129_000",
+        "GROUP_ID": "spark_kafka_group_lvs_na7_20200129_000",
         "LOG_LEVEL": "INFO",
         "DIFF_DB_SCHEMA": "stream0",
         "ARRAY_MODEL_DB_SCHEMA": "stream0",
         "VOLUME_MODEL_DB_SCHEMA": "stream0",
-        "POD_ID": "na1",
+        "CPU_DB_SCHEMA": "stream0",
+        "CACHE_DB_SCHEMA": "stream0",
+        "POD_ID": "na7",
         "DB_NAME": "dsci01",
-        "DB_USER": "load_user2",
+        "DB_USER": "load_user1",
         "FROM_LATEST": "false",
         "FROM_BEGINNING": "true",
-        "JOBNAME": "push_to_vertica-lvs_na1", 
-        "DB_PASSWORD": "islu2",
+        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na7",
+        "DB_PASSWORD": "islu1",
         "STREAMING_BACKPRESSURE_ENABLED": "false",
-        "APP_MODE": "real-time"
+        "APP_MODE": "real-time",
+        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na7",
+        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na7",
+        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na7",
+        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na7",
+        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na7",
+        "REJECT_DATA_SCHEMA": "symcon_na7_reject_data",
+        "REJECT_DATA_TABLE": "reject_data",
+        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",
+        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na7_20200610_000"
     }
 }


-{
+        {
     "submit-args": "
-        --properties-file=dcos/lvs/properties/spark-default.conf 
-        --name push_to_vertica_lvs_na1 
-        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 
-        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos 
-        --conf spark.network.timeout=600000 
-        --conf spark.executor.heartbeatInterval=60000 
-        --conf spark.executor.memory=24G
-        --conf spark.driver.memory=6G 
-        --conf spark.mesos.driver.failoverTimeout=60 
-        --driver-cores=4 
-        --supervise 
+        --properties-file=dcos/lvs/properties/spark-default.conf
+        --name push_to_vertica_lvs_reprocess_reject_data_na7
+        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6
+        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos
+        --conf spark.network.timeout=600000
+        --conf spark.executor.heartbeatInterval=60000
+        --conf spark.executor.memory=20G
+        --conf spark.driver.memory=6G
+        --conf spark.mesos.driver.failoverTimeout=60
+        --driver-cores=4
+        --supervise
         --total-executor-cores=128
         --conf spark.executor.cores=4
         --class=infosight.nimble.spark.PushToVertica https://$USER:$API_KEY@hpeartifacts.jfrog.io/hpeartifacts/sbt-symphony/com/hpe/infosight-nimble-spark_2.11/1.1.134/infosight-nimble-spark-assembly-1.1.134.jar",
     "env": {
-        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na1_20200610_000",
-        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na1",
+        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na7_20200610_000",
+        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na7",
         "BATCH_INTERVAL": "180",
-        "DB_HOST": "na1-vertica-vip.lvs.nimblestorage.com", 
-        "DIFF_DB_TABLE": "array_ds_vol_diff", 
-        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", 
-        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", 
-        "HDFS_URL": "hdfs://hdfs/user/nimble", 
-        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", 
+        "DB_HOST": "na7-vertica-vip.lvs.nimblestorage.com",
+        "DIFF_DB_TABLE": "array_ds_vol_diff",
+        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model",
+        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model",
+        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",
+        "CACHE_DB_TABLE": "array_ds_sys_diff",
+        "HDFS_URL": "hdfs://hdfs/user/nimble",
+        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble",
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",
         "PARALLELISM": "128",
         "MAXRATE_PER_PARTION": "22",
-        "GROUP_ID": "spark_kafka_group_lvs_na1_20200129_000",
+        "GROUP_ID": "spark_kafka_group_lvs_na7_20200129_000",
         "LOG_LEVEL": "INFO",
         "DIFF_DB_SCHEMA": "stream0",
         "ARRAY_MODEL_DB_SCHEMA": "stream0",
         "VOLUME_MODEL_DB_SCHEMA": "stream0",
-        "POD_ID": "na1",
+        "CPU_DB_SCHEMA": "stream0",
+        "CACHE_DB_SCHEMA": "stream0",
+        "POD_ID": "na7",
         "DB_NAME": "dsci01",
-        "DB_USER": "load_user2",
+        "DB_USER": "load_user1",
         "FROM_LATEST": "false",
         "FROM_BEGINNING": "true",
-        "JOBNAME": "push_to_vertica-lvs_na1", 
-        "DB_PASSWORD": "islu2",
+        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na7",
+        "DB_PASSWORD": "islu1",
         "STREAMING_BACKPRESSURE_ENABLED": "false",
-        "APP_MODE": "real-time"
+        "APP_MODE": "real-time",
+        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na7",
+        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na7",
+        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na7",
+        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na7",
+        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na7",
+        "REJECT_DATA_SCHEMA": "symcon_na7_reject_data",
+        "REJECT_DATA_TABLE": "reject_data",
+        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",
+        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na7_20200610_000"
     }
 }


-{
+        {
     "submit-args": "
-        --properties-file=dcos/lvs/properties/spark-default.conf 
-        --name push_to_vertica_lvs_na1 
-        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 
-        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos 
-        --conf spark.network.timeout=600000 
-        --conf spark.executor.heartbeatInterval=60000 
-        --conf spark.executor.memory=24G
-        --conf spark.driver.memory=6G 
-        --conf spark.mesos.driver.failoverTimeout=60 
-        --driver-cores=4 
-        --supervise 
+        --properties-file=dcos/lvs/properties/spark-default.conf
+        --name push_to_vertica_lvs_reprocess_reject_data_na7
+        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6
+        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos
+        --conf spark.network.timeout=600000
+        --conf spark.executor.heartbeatInterval=60000
+        --conf spark.executor.memory=20G
+        --conf spark.driver.memory=6G
+        --conf spark.mesos.driver.failoverTimeout=60
+        --driver-cores=4
+        --supervise
         --total-executor-cores=128
         --conf spark.executor.cores=4
         --class=infosight.nimble.spark.PushToVertica https://$USER:$API_KEY@hpeartifacts.jfrog.io/hpeartifacts/sbt-symphony/com/hpe/infosight-nimble-spark_2.11/1.1.134/infosight-nimble-spark-assembly-1.1.134.jar",
     "env": {
-        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na1_20200610_000",
-        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na1",
+        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na7_20200610_000",
+        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na7",
         "BATCH_INTERVAL": "180",
-        "DB_HOST": "na1-vertica-vip.lvs.nimblestorage.com", 
-        "DIFF_DB_TABLE": "array_ds_vol_diff", 
-        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", 
-        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", 
-        "HDFS_URL": "hdfs://hdfs/user/nimble", 
-        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", 
+        "DB_HOST": "na7-vertica-vip.lvs.nimblestorage.com",
+        "DIFF_DB_TABLE": "array_ds_vol_diff",
+        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model",
+        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model",
+        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",
+        "CACHE_DB_TABLE": "array_ds_sys_diff",
+        "HDFS_URL": "hdfs://hdfs/user/nimble",
+        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble",
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",
         "PARALLELISM": "128",
         "MAXRATE_PER_PARTION": "22",
-        "GROUP_ID": "spark_kafka_group_lvs_na1_20200129_000",
+        "GROUP_ID": "spark_kafka_group_lvs_na7_20200129_000",
         "LOG_LEVEL": "INFO",
         "DIFF_DB_SCHEMA": "stream0",
         "ARRAY_MODEL_DB_SCHEMA": "stream0",
         "VOLUME_MODEL_DB_SCHEMA": "stream0",
-        "POD_ID": "na1",
+        "CPU_DB_SCHEMA": "stream0",
+        "CACHE_DB_SCHEMA": "stream0",
+        "POD_ID": "na7",
         "DB_NAME": "dsci01",
-        "DB_USER": "load_user2",
+        "DB_USER": "load_user1",
         "FROM_LATEST": "false",
         "FROM_BEGINNING": "true",
-        "JOBNAME": "push_to_vertica-lvs_na1", 
-        "DB_PASSWORD": "islu2",
+        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na7",
+        "DB_PASSWORD": "islu1",
         "STREAMING_BACKPRESSURE_ENABLED": "false",
-        "APP_MODE": "real-time"
+        "APP_MODE": "real-time",
+        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na7",
+        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na7",
+        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na7",
+        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na7",
+        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na7",
+        "REJECT_DATA_SCHEMA": "symcon_na7_reject_data",
+        "REJECT_DATA_TABLE": "reject_data",
+        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",
+        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na7_20200610_000"
     }
 }


-{
+        {
     "submit-args": "
-        --properties-file=dcos/lvs/properties/spark-default.conf 
-        --name push_to_vertica_lvs_na1 
-        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 
-        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos 
-        --conf spark.network.timeout=600000 
-        --conf spark.executor.heartbeatInterval=60000 
-        --conf spark.executor.memory=24G
-        --conf spark.driver.memory=6G 
-        --conf spark.mesos.driver.failoverTimeout=60 
-        --driver-cores=4 
-        --supervise 
+        --properties-file=dcos/lvs/properties/spark-default.conf
+        --name push_to_vertica_lvs_reprocess_reject_data_na7
+        --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6
+        --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos
+        --conf spark.network.timeout=600000
+        --conf spark.executor.heartbeatInterval=60000
+        --conf spark.executor.memory=20G
+        --conf spark.driver.memory=6G
+        --conf spark.mesos.driver.failoverTimeout=60
+        --driver-cores=4
+        --supervise
         --total-executor-cores=128
         --conf spark.executor.cores=4
         --class=infosight.nimble.spark.PushToVertica https://$USER:$API_KEY@hpeartifacts.jfrog.io/hpeartifacts/sbt-symphony/com/hpe/infosight-nimble-spark_2.11/1.1.134/infosight-nimble-spark-assembly-1.1.134.jar",
     "env": {
-        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na1_20200610_000",
-        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na1",
+        "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na7_20200610_000",
+        "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na7",
         "BATCH_INTERVAL": "180",
-        "DB_HOST": "na1-vertica-vip.lvs.nimblestorage.com", 
-        "DIFF_DB_TABLE": "array_ds_vol_diff", 
-        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", 
-        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", 
-        "HDFS_URL": "hdfs://hdfs/user/nimble", 
-        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", 
+        "DB_HOST": "na7-vertica-vip.lvs.nimblestorage.com",
+        "DIFF_DB_TABLE": "array_ds_vol_diff",
+        "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model",
+        "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model",
+        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",
+        "CACHE_DB_TABLE": "array_ds_sys_diff",
+        "HDFS_URL": "hdfs://hdfs/user/nimble",
+        "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble",
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",
         "PARALLELISM": "128",
         "MAXRATE_PER_PARTION": "22",
-        "GROUP_ID": "spark_kafka_group_lvs_na1_20200129_000",
+        "GROUP_ID": "spark_kafka_group_lvs_na7_20200129_000",
         "LOG_LEVEL": "INFO",
         "DIFF_DB_SCHEMA": "stream0",
         "ARRAY_MODEL_DB_SCHEMA": "stream0",
         "VOLUME_MODEL_DB_SCHEMA": "stream0",
-        "POD_ID": "na1",
+        "CPU_DB_SCHEMA": "stream0",
+        "CACHE_DB_SCHEMA": "stream0",
+        "POD_ID": "na7",
         "DB_NAME": "dsci01",
-        "DB_USER": "load_user2",
+        "DB_USER": "load_user1",
         "FROM_LATEST": "false",
         "FROM_BEGINNING": "true",
-        "JOBNAME": "push_to_vertica-lvs_na1", 
-        "DB_PASSWORD": "islu2",
+        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na7",
+        "DB_PASSWORD": "islu1",
         "STREAMING_BACKPRESSURE_ENABLED": "false",
-        "APP_MODE": "real-time"
+        "APP_MODE": "real-time",
+        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na7",
+        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na7",
+        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na7",
+        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na7",
+        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na7",
+        "REJECT_DATA_SCHEMA": "symcon_na7_reject_data",
+        "REJECT_DATA_TABLE": "reject_data",
+        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",
+        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na7_20200610_000"
     }
 }

@pinkusrg
Copy link
Owner Author

pinkusrg commented Jul 9, 2020

Details

``` diff --git a/dcos/lvs/services/spark/infosight-nimble-spark-na1 b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na2 index d41dd9a..7777188 100644 --- a/dcos/lvs/services/spark/infosight-nimble-spark-na1 +++ b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na2 @@ -1,45 +1,58 @@ { "submit-args": " --properties-file=dcos/lvs/properties/spark-default.conf - --name push_to_vertica_lvs_na1 + --name push_to_vertica_lvs_reprocess_reject_data_na2 --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos --conf spark.network.timeout=600000 --conf spark.executor.heartbeatInterval=60000 - --conf spark.executor.memory=24G + --conf spark.executor.memory=20G --conf spark.driver.memory=6G - --conf spark.mesos.driver.failoverTimeout=60 + --conf spark.mesos.driver.failoverTimeout=60 --driver-cores=4 --supervise --total-executor-cores=128 --conf spark.executor.cores=4 --class=infosight.nimble.spark.PushToVertica https://$USER:$API_KEY@hpeartifacts.jfrog.io/hpeartifacts/sbt-symphony/com/hpe/infosight-nimble-spark_2.11/1.1.134/infosight-nimble-spark-assembly-1.1.134.jar", "env": { - "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na1_20200610_000", - "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na1", + "CHECKPOINT_DIRECTORY": "hdfs://hdfs/user/nimblecheckpoint/lvs_na2_20200610_000", + "SPARK_VERTICA_KAFKA_TOPIC": "object-level-statsstream-payload-na2", "BATCH_INTERVAL": "180", - "DB_HOST": "na1-vertica-vip.lvs.nimblestorage.com", - "DIFF_DB_TABLE": "array_ds_vol_diff", - "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", - "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", - "HDFS_URL": "hdfs://hdfs/user/nimble", - "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", + "DB_HOST": "na2-vertica-vip.lvs.nimblestorage.com", + "DIFF_DB_TABLE": "array_ds_vol_diff", + "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", + "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", + "CPU_DB_TABLE": "ctrlr_sys_cpu_diff", + "CACHE_DB_TABLE": "array_ds_sys_diff", + "HDFS_URL": "hdfs://hdfs/user/nimble", + "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027", "PARALLELISM": "128", "MAXRATE_PER_PARTION": "22", - "GROUP_ID": "spark_kafka_group_lvs_na1_20200129_000", + "GROUP_ID": "spark_kafka_group_lvs_na2_20200129_000", "LOG_LEVEL": "INFO", "DIFF_DB_SCHEMA": "stream0", "ARRAY_MODEL_DB_SCHEMA": "stream0", "VOLUME_MODEL_DB_SCHEMA": "stream0", - "POD_ID": "na1", + "CPU_DB_SCHEMA": "stream0", + "CACHE_DB_SCHEMA": "stream0", + "POD_ID": "na2", "DB_NAME": "dsci01", - "DB_USER": "load_user2", + "DB_USER": "load_user1", "FROM_LATEST": "false", "FROM_BEGINNING": "true", - "JOBNAME": "push_to_vertica-lvs_na1", - "DB_PASSWORD": "islu2", + "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na2", + "DB_PASSWORD": "islu1", "STREAMING_BACKPRESSURE_ENABLED": "false", - "APP_MODE": "real-time" + "APP_MODE": "real-time", + "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na2", + "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na2", + "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na2", + "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na2", + "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na2", + "REJECT_DATA_SCHEMA": "symcon_na2_reject_data", + "REJECT_DATA_TABLE": "reject_data", + "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history", + "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na2_20200610_000" } } \```

@pinkusrg
Copy link
Owner Author

pinkusrg commented Jul 9, 2020

I could use some help...

public class Order
{
    public int OrderId { get; set; }
    public int CustomerId { get; set; }

    public List<int> Products { get; set; }
}
\```

</p>
</details> 

<details>
<summary>I could use some help...</summary>
<p>

```c#
public class Order
{
    public int OrderId { get; set; }
    public int CustomerId { get; set; }

    public List<int> Products { get; set; }
}
\```

</p>
</details> 

@pinkusrg
Copy link
Owner Author

pinkusrg commented Jul 9, 2020

lvs_na1 and lvs_reject_data_na1

diff --git a/dcos/lvs/services/spark/infosight-nimble-spark-na1 b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na1
index d41dd9a..23fe214 100644
--- a/dcos/lvs/services/spark/infosight-nimble-spark-na1
+++ b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na1
@@ -1,7 +1,7 @@
 {
     "submit-args": "
         --properties-file=dcos/lvs/properties/spark-default.conf 
-        --name push_to_vertica_lvs_na1 
+        --name push_to_vertica_lvs_reprocess_reject_data_na1
         --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 
         --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos 
         --conf spark.network.timeout=600000 
@@ -22,6 +22,8 @@
         "DIFF_DB_TABLE": "array_ds_vol_diff", 
         "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", 
         "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", 
+        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",
+        "CACHE_DB_TABLE": "array_ds_sys_diff",
         "HDFS_URL": "hdfs://hdfs/user/nimble", 
         "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", 
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",
@@ -32,14 +34,25 @@
         "DIFF_DB_SCHEMA": "stream0",
         "ARRAY_MODEL_DB_SCHEMA": "stream0",
         "VOLUME_MODEL_DB_SCHEMA": "stream0",
+        "CPU_DB_SCHEMA": "stream0",
+        "CACHE_DB_SCHEMA": "stream0",
         "POD_ID": "na1",
         "DB_NAME": "dsci01",
         "DB_USER": "load_user2",
         "FROM_LATEST": "false",
         "FROM_BEGINNING": "true",
-        "JOBNAME": "push_to_vertica-lvs_na1", 
+        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na1", 
         "DB_PASSWORD": "islu2",
         "STREAMING_BACKPRESSURE_ENABLED": "false",
-        "APP_MODE": "real-time"
+        "APP_MODE": "real-time",
+        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na1,
+        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na1",
+        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na1",
+        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na1",
+        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na1",
+        "REJECT_DATA_SCHEMA": "symcon_na1_reject_data",
+        "REJECT_DATA_TABLE": "reject_data",
+        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",
+        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na1_20200610_000"
     }
 }

lvs_na1 and lvs_reject_data_na1

diff --git a/dcos/lvs/services/spark/infosight-nimble-spark-na1 b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na1
index d41dd9a..23fe214 100644
--- a/dcos/lvs/services/spark/infosight-nimble-spark-na1
+++ b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na1
@@ -1,7 +1,7 @@
 {
     "submit-args": "
         --properties-file=dcos/lvs/properties/spark-default.conf 
-        --name push_to_vertica_lvs_na1 
+        --name push_to_vertica_lvs_reprocess_reject_data_na1
         --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 
         --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos 
         --conf spark.network.timeout=600000 
@@ -22,6 +22,8 @@
         "DIFF_DB_TABLE": "array_ds_vol_diff", 
         "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", 
         "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", 
+        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",
+        "CACHE_DB_TABLE": "array_ds_sys_diff",
         "HDFS_URL": "hdfs://hdfs/user/nimble", 
         "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", 
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",
@@ -32,14 +34,25 @@
         "DIFF_DB_SCHEMA": "stream0",
         "ARRAY_MODEL_DB_SCHEMA": "stream0",
         "VOLUME_MODEL_DB_SCHEMA": "stream0",
+        "CPU_DB_SCHEMA": "stream0",
+        "CACHE_DB_SCHEMA": "stream0",
         "POD_ID": "na1",
         "DB_NAME": "dsci01",
         "DB_USER": "load_user2",
         "FROM_LATEST": "false",
         "FROM_BEGINNING": "true",
-        "JOBNAME": "push_to_vertica-lvs_na1", 
+        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na1", 
         "DB_PASSWORD": "islu2",
         "STREAMING_BACKPRESSURE_ENABLED": "false",
-        "APP_MODE": "real-time"
+        "APP_MODE": "real-time",
+        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na1,
+        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na1",
+        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na1",
+        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na1",
+        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na1",
+        "REJECT_DATA_SCHEMA": "symcon_na1_reject_data",
+        "REJECT_DATA_TABLE": "reject_data",
+        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",
+        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na1_20200610_000"
     }
 }

lvs_na1 and lvs_reject_data_na1

�[1mdiff --git a/dcos/lvs/services/spark/infosight-nimble-spark-na1 b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na1�[m
�[1mindex d41dd9a..23fe214 100644�[m
�[1m--- a/dcos/lvs/services/spark/infosight-nimble-spark-na1�[m
�[1m+++ b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na1�[m
�[36m@@ -1,7 +1,7 @@�[m
 {�[m
     "submit-args": "�[m
         --properties-file=dcos/lvs/properties/spark-default.conf �[m
�[31m-        --name push_to_vertica_lvs_na1 �[m
�[32m+�[m�[32m        --name push_to_vertica_lvs_reprocess_reject_data_na1�[m
         --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 �[m
         --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos �[m
         --conf spark.network.timeout=600000 �[m
�[36m@@ -22,6 +22,8 @@�[m
         "DIFF_DB_TABLE": "array_ds_vol_diff", �[m
         "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", �[m
         "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", �[m
�[32m+�[m�[32m        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",�[m
�[32m+�[m�[32m        "CACHE_DB_TABLE": "array_ds_sys_diff",�[m
         "HDFS_URL": "hdfs://hdfs/user/nimble", �[m
         "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", �[m
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",�[m
�[36m@@ -32,14 +34,25 @@�[m
         "DIFF_DB_SCHEMA": "stream0",�[m
         "ARRAY_MODEL_DB_SCHEMA": "stream0",�[m
         "VOLUME_MODEL_DB_SCHEMA": "stream0",�[m
�[32m+�[m�[32m        "CPU_DB_SCHEMA": "stream0",�[m
�[32m+�[m�[32m        "CACHE_DB_SCHEMA": "stream0",�[m
         "POD_ID": "na1",�[m
         "DB_NAME": "dsci01",�[m
         "DB_USER": "load_user2",�[m
         "FROM_LATEST": "false",�[m
         "FROM_BEGINNING": "true",�[m
�[31m-        "JOBNAME": "push_to_vertica-lvs_na1", �[m
�[32m+�[m�[32m        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na1",�[m�[41m �[m
         "DB_PASSWORD": "islu2",�[m
         "STREAMING_BACKPRESSURE_ENABLED": "false",�[m
�[31m-        "APP_MODE": "real-time"�[m
�[32m+�[m�[32m        "APP_MODE": "real-time",�[m
�[32m+�[m�[32m        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na1,�[m
�[32m+�[m�[32m        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na1",�[m
�[32m+�[m�[32m        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na1",�[m
�[32m+�[m�[32m        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na1",�[m
�[32m+�[m�[32m        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na1",�[m
�[32m+�[m�[32m        "REJECT_DATA_SCHEMA": "symcon_na1_reject_data",�[m
�[32m+�[m�[32m        "REJECT_DATA_TABLE": "reject_data",�[m
�[32m+�[m�[32m        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",�[m
�[32m+�[m�[32m        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na1_20200610_000"�[m
     }�[m
 }�[m

lvs_na1 and lvs_reject_data_na1

diff --git a/dcos/lvs/services/spark/infosight-nimble-spark-na1 b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na1
index d41dd9a..23fe214 100644
--- a/dcos/lvs/services/spark/infosight-nimble-spark-na1
+++ b/dcos/lvs/services/spark/infosight-nimble-spark-reprocess-reject-data-na1
@@ -1,7 +1,7 @@
 {
     "submit-args": "
         --properties-file=dcos/lvs/properties/spark-default.conf 
-        --name push_to_vertica_lvs_na1 
+        --name push_to_vertica_lvs_reprocess_reject_data_na1
         --conf spark.mesos.executor.docker.image=mesosphere/spark:2.1.0-2.2.0-1-hadoop-2.6 
         --conf spark.mesos.driverEnv.MASTER=mesos://zk://leader.mesos:2181/mesos 
         --conf spark.network.timeout=600000 
@@ -22,6 +22,8 @@
         "DIFF_DB_TABLE": "array_ds_vol_diff", 
         "ARRAY_MODEL_DB_TABLE": "array_lvl_post_model", 
         "VOLUME_MODEL_DB_TABLE": "volume_lvl_post_model", 
+        "CPU_DB_TABLE": "ctrlr_sys_cpu_diff",
+        "CACHE_DB_TABLE": "array_ds_sys_diff",
         "HDFS_URL": "hdfs://hdfs/user/nimble", 
         "WEB_HDFS_URL": "webhdfs://hdfs/user/nimble", 
         "BOOTSTRAP_SERVER": "kafka-0-broker.kafka-0.autoip.dcos.thisdcos.directory:1030,kafka-1-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-2-broker.kafka-0.autoip.dcos.thisdcos.directory:1029,kafka-3-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-4-broker.kafka-0.autoip.dcos.thisdcos.directory:1025,kafka-5-broker.kafka-0.autoip.dcos.thisdcos.directory:1027",
@@ -32,14 +34,25 @@
         "DIFF_DB_SCHEMA": "stream0",
         "ARRAY_MODEL_DB_SCHEMA": "stream0",
         "VOLUME_MODEL_DB_SCHEMA": "stream0",
+        "CPU_DB_SCHEMA": "stream0",
+        "CACHE_DB_SCHEMA": "stream0",
         "POD_ID": "na1",
         "DB_NAME": "dsci01",
         "DB_USER": "load_user2",
         "FROM_LATEST": "false",
         "FROM_BEGINNING": "true",
-        "JOBNAME": "push_to_vertica-lvs_na1", 
+        "JOBNAME": "push_to_vertica-lvs_reprocess_reject_data_na1", 
         "DB_PASSWORD": "islu2",
         "STREAMING_BACKPRESSURE_ENABLED": "false",
-        "APP_MODE": "real-time"
+        "APP_MODE": "real-time",
+        "DIFF_KAFKA_TOPIC": "array_ds_vol_diff_na1,
+        "ARRAY_LEVEL_KAFKA_TOPIC": "array_lvl_post_model_na1",
+        "VOLUME_LEVEL_KAFKA_TOPIC": "volume_lvl_post_model_na1",
+        "CPU_KAFKA_TOPIC": "ctrlr_sys_cpu_diff_na1",
+        "CACHE_KAFKA_TOPIC": "array_ds_sys_diff_na1",
+        "REJECT_DATA_SCHEMA": "symcon_na1_reject_data",
+        "REJECT_DATA_TABLE": "reject_data",
+        "REJECT_DATA_MICROBATCH_HISTORY_TABLE": "stream_microbatch_history",
+        "REJECT_DATA_CHECKPOINT": "hdfs://hdfs/user/nimblecheckpoint/reprocess/lvs_na1_20200610_000"
     }
 }

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Labels

None yet

Projects

None yet

Development

Successfully merging this pull request may close these issues.

1 participant