Skip to content

Commit 627eb27

Browse files
author
anders-wartoft
committed
0.1.4-SNAPSHOT
1 parent a915ba5 commit 627eb27

45 files changed

Lines changed: 1456 additions & 1353 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

BUILD_NUMBER

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
66

Makefile

Lines changed: 20 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
GIT_VERSION := $(shell git describe --tags `git rev-list --tags --max-count=1`)
22

3+
# Build number file
4+
BUILD_NUMBER_FILE := BUILD_NUMBER
5+
BUILD_NUMBER := $(shell cat $(BUILD_NUMBER_FILE))
6+
NEXT_BUILD_NUMBER := $(shell echo $$(($(BUILD_NUMBER)+1)))
7+
8+
# Add build number to Go binaries
9+
BUILD_LDFLAGS := -X 'main.BuildNumber=$(NEXT_BUILD_NUMBER)'
10+
311
.PHONY: all upstream downstream clean
412

513

@@ -16,27 +24,33 @@ src/version/version.go:
1624

1725
upstream: src/version/version.go src/upstream/upstream.go
1826
mkdir -p ./target
19-
go build -o ./target/upstream src/upstream/upstream.go
27+
go build -ldflags '$(BUILD_LDFLAGS)' -o ./target/upstream src/upstream/upstream.go
28+
@echo $(NEXT_BUILD_NUMBER) > $(BUILD_NUMBER_FILE)
2029

2130
upstream-linux-arm64: src/version/version.go src/upstream/upstream.go
2231
mkdir -p ./target
23-
GOOS=linux GOARCH=arm64 go build -o ./target/upstream-linux-arm64 src/upstream/upstream.go
32+
GOOS=linux GOARCH=arm64 go build -ldflags '$(BUILD_LDFLAGS)' -o ./target/upstream-linux-arm64 src/upstream/upstream.go
33+
@echo $(NEXT_BUILD_NUMBER) > $(BUILD_NUMBER_FILE)
2434

2535
upstream-linux-amd64: src/version/version.go src/upstream/upstream.go
2636
mkdir -p ./target
27-
GOOS=linux GOARCH=amd64 go build -o ./target/upstream-linux-amd64 src/upstream/upstream.go
37+
GOOS=linux GOARCH=amd64 go build -ldflags '$(BUILD_LDFLAGS)' -o ./target/upstream-linux-amd64 src/upstream/upstream.go
38+
@echo $(NEXT_BUILD_NUMBER) > $(BUILD_NUMBER_FILE)
2839

2940
downstream: src/version/version.go src/downstream/downstream.go
3041
mkdir -p ./target
31-
go build -o ./target/downstream src/downstream/downstream.go
42+
go build -ldflags '$(BUILD_LDFLAGS)' -o ./target/downstream src/downstream/downstream.go
43+
@echo $(NEXT_BUILD_NUMBER) > $(BUILD_NUMBER_FILE)
3244

3345
downstream-linux-arm64: src/version/version.go src/downstream/downstream.go
3446
mkdir -p ./target
35-
GOOS=linux GOARCH=arm64 go build -o ./target/downstream-linux-arm64 src/downstream/downstream.go
47+
GOOS=linux GOARCH=arm64 go build -ldflags '$(BUILD_LDFLAGS)' -o ./target/downstream-linux-arm64 src/downstream/downstream.go
48+
@echo $(NEXT_BUILD_NUMBER) > $(BUILD_NUMBER_FILE)
3649

3750
downstream-linux-amd64: src/version/version.go src/downstream/downstream.go
3851
mkdir -p ./target
39-
GOOS=linux GOARCH=amd64 go build -o ./target/downstream-linux-amd64 src/downstream/downstream.go
52+
GOOS=linux GOARCH=amd64 go build -ldflags '$(BUILD_LDFLAGS)' -o ./target/downstream-linux-amd64 src/downstream/downstream.go
53+
@echo $(NEXT_BUILD_NUMBER) > $(BUILD_NUMBER_FILE)
4054

4155
build-all: upstream downstream upstream-linux-arm64 upstream-linux-amd64 downstream-linux-arm64 downstream-linux-amd64
4256

README.md

Lines changed: 23 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -9,22 +9,23 @@ For resend of lost events that gap-detection identifies, a back channel must be
99

1010
1. **Clone and build:**
1111
```bash
12-
git clone <your-repo-url>
12+
git clone https://github.com/anders-wartoft/air-gap.git
1313
cd air-gap
14-
make
14+
make all
1515
```
16+
This will build the upstream and downstream binaries as well as the Kafka Streams Java application for deduplication.
1617

1718
2. **Run a local test (no Kafka required):**
18-
- Edit `config/upstream3.properties` and `config/downstream3.properties` as needed (set `targetIP` to your local IP).
19+
- Edit `config/upstream.properties` and `config/downstream.properties` as needed (set `targetIP` to your local IP).
1920
- In one terminal:
2021
```bash
21-
go run src/downstream/downstream.go config/downstream3.properties
22+
go run src/downstream/downstream.go config/downstream.properties
2223
```
2324
- In another terminal:
2425
```bash
25-
go run src/upstream/upstream.go config/upstream3.properties
26+
go run src/upstream/upstream.go config/upstream.properties
2627
```
27-
- You should see messages sent and received.
28+
- You should see messages received in the first terminal.
2829

2930
3. **Next steps:**
3031
- Connect to real Kafka by editing the config files.
@@ -35,7 +36,6 @@ For resend of lost events that gap-detection identifies, a back channel must be
3536
- To set up reduncancy and/or load balancing, see [Redundancy and Load Balancing.md](doc/Redundancy%20and%20Load%20Balancing.md)
3637

3738

38-
3939
## Notation
4040
There are four executable files that together constitutes the transfer software.
4141
- Upstream - on the sending side of the diode, also used as the name of the program that consumes Kafka events and produces UDP packets
@@ -56,19 +56,19 @@ The first topic may contain duplicates but the one from the deduplicator should
5656
## Getting started
5757
### Very simple use case
5858
To enable users to get started without Kafka and without hardware diode, use the following properties files:
59-
- upstream3.properties
60-
- downstream3.properties
59+
- upstream.properties
60+
- downstream.properties
6161

62-
These properties files are configured for getting a few random strings instead of reading from Kafka and to send with UDP without encyption. Change the targetIP in upstream3.properties to the one you would like to send to, and change the targetIP in downstream3.properties to the same value. The IP address must be one that downstrem can bind to and that upstream can send to.
62+
These properties files are configured for getting a few random strings instead of reading from Kafka and to send with UDP without encyption. Change the targetIP in upstream3.properties to the one you would like to send to. The targetIP in downstream3.properties is set to 0.0.0.0 so it will bind to all local addresses.
6363

6464
In one terminal, start the server with:
6565
```
66-
go run src/downstream/downstream.go config/downstream3.properties
66+
go run src/downstream/downstream.go config/downstream.properties
6767
```
6868

6969
In a new terminal, start the client (sender) with:
7070
```
71-
go run src/upstream/upstream.go config/upstream3.properties
71+
go run src/upstream/upstream.go config/upstream.properties
7272
```
7373
A few messages should now be sent from upstream and received by downstream. From here, add encryption and connections to Kafka to enable all features.
7474

@@ -149,6 +149,7 @@ id=Upstream_1
149149
nic=en0
150150
targetIP=127.0.0.1
151151
targetPort=1234
152+
source=kafka
152153
bootstrapServers=192.168.153.138:9092
153154
topic=transfer
154155
groupID=test
@@ -183,9 +184,10 @@ export AIRGAP_UPSTREAM_TARGET_IP=255.255.255.255
183184
| Config file property name | Environment variable name | Default value | Description |
184185
|--------------------------|--------------------------|---------------|-------------|
185186
| id | AIRGAP_UPSTREAM_ID | | Name of the instance. Will be used in logging and when sending status messages |
186-
| verbose | AIRGAP_UPSTREAM_VERBOSE | false | true gives extra logging |
187+
| logLevel | AIRGAP_UPSTREAM_LOG_LEVEL | info | debug, info, error, warn, fatal |
188+
| soruce | AIRGAP_UPSTREAM_SOURCE | | kafka or random |
187189
| nic | AIRGAP_UPSTREAM_NIC | | What nic to use for sending to downstream |
188-
| targetIP | AIRGAP_UPSTREAM_TARGET_IP | | Downstream air-gap ip address |
190+
| targetIP | AIRGAP_UPSTREAM_TARGET_IP | | Downstream air-gap ip address, if IPv6, enclose the address with brackets, like [::1] |
189191
| targetPort | AIRGAP_UPSTREAM_TARGET_PORT | | Downstream air-gap ip port |
190192
| bootstrapServers | AIRGAP_UPSTREAM_BOOTSTRAP_SERVERS | | Bootstrap url for Kafka, with port |
191193
| topic | AIRGAP_UPSTREAM_TOPIC | | Topic name in Kafka to read from |
@@ -234,7 +236,7 @@ The property privateKeyFiles should point to one or more private key files that
234236
| Config file property name | Environment variable name | Default value | Description |
235237
|--------------------------|--------------------------|---------------|-------------|
236238
| id | AIRGAP_DOWNSTREAM_ID | | Name of the instance. Will be used in logging and when sending status messages |
237-
| verbose | AIRGAP_DOWNSTREAM_VERBOSE | false | true gives extra logging |
239+
| logLevel | AIRGAP_DOWNSTREAM_LOG_LEVEL | | debug, info, error, warn, fatal |
238240
| nic | AIRGAP_DOWNSTREAM_NIC | | What nic to use for binding the UDP port |
239241
| targetIP | AIRGAP_DOWNSTREAM_TARGET_IP | | Ip address to bind to |
240242
| targetPort | AIRGAP_DOWNSTREAM_TARGET_PORT | | Port to bind to |
@@ -289,7 +291,7 @@ Now we have a compiled file called `upstream`. We can run the application with `
289291
To turn the application into a service we need to create a service file: `/etc/systemd/system/upstream.service`
290292
Change the paths to where you will install the service binary and comfiguration file
291293

292-
```properties
294+
```ini
293295
[Unit]
294296
Description=Upstream AirGap service
295297
ConditionPathExists=/opt/airgap/upstream/bin
@@ -408,6 +410,11 @@ See LICENSE file
408410

409411
# Release Notes
410412

413+
## 0.1.4-SNAPSHOT
414+
* Changed the logging for the go applications to include log levels. Monitoring and log updates.
415+
* Documented redundancy and load balancing (see doc folder)
416+
* Documented resend (future updates will implement the new resend algorithm)
417+
411418
## 0.1.3-SNAPSHOT
412419
* Added a Kafka Streams Java Application for deduplication and gap detection. Gap detection not finished.
413420
* Added upstreams filter to filter on the offset number for each partition (used in redundancy an load balancing setups)

config/downstream.properties

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,15 @@ targetIP=127.0.0.1
77
# UDP port to listen to
88
targetPort=1234
99
# Kafka target. If more than one, separate the servers with a comma ,
10-
bootstrapServers=192.168.153.143:9092,192.168.153.144:9092
10+
target=cmd
11+
#bootstrapServers=192.168.153.143:9092,192.168.153.144:9092
1112
# Topic to write to
12-
topic=downstream
13+
#topic=downstream
1314
# Glob that will identify the path(s) to all private keys we should try to use
1415
# when a key exchange packet is received
1516
privateKeyFiles=../certs/private*.pem
1617
# kafka or cmd (cmd untested)
17-
target=kafka
18+
#target=kafka
1819
# Some extra printouts
1920
verbose=false
2021
# Set mtu to auto or 0 will query the nic of the mtu

config/downstream3.properties

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,13 @@ id=Downstream_3
33
# Used in the MTU code
44
nic=en0
55
# UDP target (what IP to listen to)
6-
targetIP=192.168.0.27
6+
targetIP=0.0.0.0
77
# UDP port to listen to
88
targetPort=1234
99
# Kafka target. If more than one, separate the servers with a comma ,
10-
bootstrapServers=192.168.153.138:9092
10+
#bootstrapServers=your.kafka.server:9092
1111
# Topic to write to
12-
topic=log2
12+
topic=logs
1313
# Glob that will identify the path(s) to all private keys we should try to use
1414
# when a key exchange packet is received
1515
privateKeyFiles=certs/private*.pem
@@ -20,6 +20,6 @@ verbose=false
2020
# Set mtu to auto or 0 will query the nic of the mtu
2121
mtu=auto
2222
# Client id to use when sending events to Kafka
23-
clientId=downstream
23+
#clientId=downstream
2424
# After loading the config, where to send the logs? stdout is default
2525
#logFileName=./tmp/downstream.log

config/upstream.properties

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,30 +3,31 @@ id=Upstream_1
33
# Used in the MTU code
44
nic=lo0
55
# UDP target (downstream, use static arp and route to be able to send packets over a diode)
6+
#targetIP=[::1]
67
targetIP=127.0.0.1
78
# UDP target port
89
targetPort=1234
910
# Kafka source. If more than one, separate the servers with a comma ,
10-
bootstrapServers=192.168.153.143:9092,192.168.153.144:9092
11+
#bootstrapServers=192.168.153.143:9092,192.168.153.144:9092
1112
# Topic to read
12-
topic=upstream
13+
#topic=upstream
1314
# Kafka group id to use. If several threads are used, this is prepended to the thread names.
14-
groupID=test
15+
#groupID=test
1516
# Read from this time instead of starting at the end
1617
# 2024-01-28T10:24:55+01:00
1718
from=
1819
# Downstream public key file. Leave empty to disable encryption
1920
#publicKeyFile=certs/server2.pem
2021
# For testing, you can use random, else use kafka
2122
#source=random
22-
source=kafka
23+
source=random
2324
# Every n seconds, generate a new symmetric key
2425
generateNewSymmetricKeyEvery=50
2526
# Set mtu to auto or 0 will query the nic of the mtu
2627
mtu=auto
2728
# After reading the config, where should we send the logs? Default is stdout
28-
logFileName=./tmp/upstream.log
29+
#logFileName=./tmp/upstream.log
2930
# Add more sending threads (only valid with source=kafka). sendingThread needs to be an array of objects
3031
# Format: {"name": "thread_name", "offset": offset_in_seconds}
3132
#sendingThreads=[{"now": 0}, {"3minutes": -10}]
32-
sendingThreads=[{"0seconds": 0}]
33+
#sendingThreads=[{"0seconds": 0}]

config/upstream3.properties

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,13 @@ id=Upstream_1
33
# Used in the MTU code
44
nic=en0
55
# UDP target (downstream, use static arp and route to be able to send packets over a diode)
6-
targetIP=192.168.0.27
6+
targetIP=127.0.0.1
77
# UDP target port
88
targetPort=1234
99
# Kafka source. If more than one, separate the servers with a comma ,
10-
bootstrapServers=192.168.153.138:9092
10+
bootstrapServers=kafka-upstream.sitia.nu:9092
1111
# Topic to read
12-
topic=transfer2
12+
topic=transfer
1313
# Kafka group id to use. If from is set, then a new id is calculated by appending a timestamp to the groupID
1414
groupID=test
1515
# Read from this time instead of starting at the end
@@ -19,10 +19,11 @@ from=
1919
publicKeyFile=certs/server2.pem
2020
# For testing, you can use random, else use kafka
2121
#source=random
22-
source=random
22+
source=kafka
2323
# Every n seconds, generate a new symmetric key
2424
generateNewSymmetricKeyEvery=50
2525
# Set mtu to auto or 0 will query the nic of the mtu
2626
mtu=auto
2727
# After reading the config, where should we send the logs? Default is stdout
2828
# logFileName=./tmp/upstream.log
29+
verbose=true

doc/Monitoring.md

Lines changed: 27 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -97,12 +97,22 @@ sudo systemctl enable --now metricbeat
9797

9898
The PartitionDedupApp exposes rich runtime information and operations via JMX, thanks to the `JmxSupport` class. You can access these via JConsole, Jolokia, or any JMX client.
9999

100+
100101
#### What is Exposed?
101-
- **GapDetectors MBean** (`nu.sitia.airgap:type=GapDetectors`):
102-
- For each partition, exposes:
103-
- `<partition>`: Info about the GapDetector for that partition (window stats, offsets, etc.)
104-
- `<partition>_gaps`: Current gaps for that partition
105-
- Operations: `getAllGaps_<partition>()` and `purge_<partition>()` to fetch or purge gaps for a specific partition
102+
- **GapDetectors MBean** (per partition):
103+
- Each partition is registered as its own MBean:
104+
- `nu.sitia.airgap:type=GapDetectors,partition=0`
105+
- `nu.sitia.airgap:type=GapDetectors,partition=1`
106+
- ...etc.
107+
- For each partition MBean, exposes:
108+
- `<topic>_<partition>`: Info about the GapDetector for that partition (window stats, offsets, etc.)
109+
- `<topic>_<partition>_gaps`: Current gaps for that partition
110+
- `<topic>_<partition>_mem`: Estimated memory usage for that partition
111+
- `<topic>_<partition>_nrMissing`: Number of missing offsets for that partition
112+
- `<topic>_<partition>_nrWindows`: Number of windows for that partition
113+
- Operations: `getAllGaps_<topic>_<partition>()` and `purge_<topic>_<partition>()` to fetch or purge gaps for a specific partition
114+
- **Aggregate GapDetectors MBean** (optional):
115+
- `nu.sitia.airgap:type=GapDetectors,partition=-1` provides aggregate stats across all partitions (if enabled in your code).
106116
- **Props MBean** (`nu.sitia.airgap:type=Props`):
107117
- All Kafka Streams properties
108118
- Topics, assigned partitions, window size, max windows, and other runtime config
@@ -114,22 +124,24 @@ The PartitionDedupApp exposes rich runtime information and operations via JMX, t
114124
3. Browse to `nu.sitia.airgap -> GapDetectors` or `Props` to view attributes and invoke operations.
115125
- **With Jolokia (for Metricbeat):**
116126
- The Jolokia agent exposes these MBeans over HTTP. Metricbeat can be configured to scrape specific attributes or call operations.
117-
- Example: To fetch all gaps for partition 0, configure Metricbeat's `jolokia.yml` to query the `getAllGaps_0` operation on the `nu.sitia.airgap:type=GapDetectors` MBean.
127+
128+
- Example: To fetch all gaps for partition 0, configure Metricbeat's `jolokia.yml` to query the `getAllGaps_transfer_0` operation on the `nu.sitia.airgap:type=GapDetectors,partition=0` MBean.
118129

119130
#### Example Jolokia Query (HTTP API)
120131
To call an operation (e.g., get all gaps for partition 0):
121132
```sh
122133
curl -X POST http://localhost:8778/jolokia/ \
123134
-H 'Content-Type: application/json' \
124-
-d '{"type":"exec","mbean":"nu.sitia.airgap:type=GapDetectors","operation":"getAllGaps_transfer_0"}'
135+
-d '{"type":"exec","mbean":"nu.sitia.airgap:type=GapDetectors,partition=0","operation":"getAllGaps_transfer_0"}'
125136
```
126137
To read an attribute (some examples):
127138
```sh
128139
curl http://127.0.0.1:8778/jolokia/read/java.lang:type=Memory
129140
curl http://localhost:8778/jolokia/list/nu.sitia.airgap
130-
curl http://localhost:8778/jolokia/read/nu.sitia.airgap:type=GapDetectors/transfer_0
131-
curl http://localhost:8778/jolokia/read/nu.sitia.airgap:type=GapDetectors/transfer_0_mem
132-
curl http://localhost:8778/jolokia/read/nu.sitia.airgap:type=GapDetectors/transfer_0_gaps
141+
curl http://localhost:8778/jolokia/read/nu.sitia.airgap:type=GapDetectors,partition=0/transfer_0
142+
curl http://localhost:8778/jolokia/read/nu.sitia.airgap:type=GapDetectors,partition=0/transfer_0_mem
143+
curl http://localhost:8778/jolokia/read/nu.sitia.airgap:type=GapDetectors,partition=0/transfer_0_gaps
144+
curl http://localhost:8778/jolokia/read/nu.sitia.airgap:type=GapDetectors,partition=0/transfer_0_nrMissing
133145
curl http://localhost:8778/jolokia/read/nu.sitia.airgap:type=Props
134146
```
135147

@@ -141,11 +153,13 @@ Add to your `jolokia.yml`:
141153
hosts: ["http://localhost:8778/jolokia"]
142154
namespace: "airgap"
143155
jmx.mappings:
144-
- mbean: 'nu.sitia.airgap:type=GapDetectors'
156+
- mbean: 'nu.sitia.airgap:type=GapDetectors,partition=0'
145157
attributes:
146-
- attr: 0_gaps
158+
- attr: transfer_0_gaps
147159
field: partition_0_gaps
148-
- attr: 1_gaps
160+
- mbean: 'nu.sitia.airgap:type=GapDetectors,partition=1'
161+
attributes:
162+
- attr: transfer_1_gaps
149163
field: partition_1_gaps
150164
- mbean: 'nu.sitia.airgap:type=Props'
151165
attributes:

0 commit comments

Comments
 (0)