diff --git a/MIDAS/.gitignore b/MIDAS/.gitignore
index fa66e01a..1488e904 100644
--- a/MIDAS/.gitignore
+++ b/MIDAS/.gitignore
@@ -1,6 +1,6 @@
.pio
.vscode
-/src/log_checksum.h
**/.DS_Store
*.launch
-*.pyc
\ No newline at end of file
+*.pyc
+struct_sizes.json
\ No newline at end of file
diff --git a/MIDAS/README.md b/MIDAS/README.md
index 47786730..7fd36208 100644
--- a/MIDAS/README.md
+++ b/MIDAS/README.md
@@ -20,4 +20,7 @@ Note that if you never directly installed platformio and instead are just using
the VSCode extension, these commands won't work until you install platformio
manually.
-To run SILSIM from the command line, use `pio run -e mcu_silsim`.
\ No newline at end of file
+To run SILSIM from the command line, use `pio run -e mcu_silsim`.
+
+### HILSIM
+To start a basic hilsim run, you must first run the `struct_sizes` target, then you can upload the hilsim data and run `hilsimstreamer.py`
\ No newline at end of file
diff --git a/MIDAS/hilsim/hilsimstreamer.py b/MIDAS/hilsim/hilsimstreamer.py
new file mode 100644
index 00000000..ac6832a8
--- /dev/null
+++ b/MIDAS/hilsim/hilsimstreamer.py
@@ -0,0 +1,103 @@
+import serial
+import serial.tools
+import serial.tools.list_ports
+import time
+import json
+
+import csv
+import os
+
+def write_to_csv(filename, data):
+ file_exists = os.path.exists(filename)
+
+ with open(filename, 'a', newline='') as csvfile:
+ writer = csv.writer(csvfile)
+
+ if not file_exists:
+ writer.writerow(['Timestamp', 'fsmstate', 'global_armed', 'a_armed', 'a_firing', 'b_armed', 'b_firing', 'c_armed', 'c_firing', 'd_armed', 'd_firing'])
+
+ writer.writerow(data)
+
+device = None
+# Look for midas comport
+for comport in serial.tools.list_ports.comports():
+ if comport.vid == 0x303a:
+ # This is an espressif device
+ print(comport.name, "is an Espressif device")
+ device = comport
+ break
+
+print(device.device)
+
+if not device:
+ print("MIDAS is not connected!")
+ exit()
+
+# make this a command line argument
+file = open(r"data43.launch", "rb")
+
+# Read the json file
+SIZES = { int(k): v for k, v in json.load(open("../struct_sizes.json", 'r')).items() }
+print(SIZES)
+
+test_list=file.read(4)
+print("Checksum", hex(int.from_bytes(test_list, byteorder='little')))
+ser = serial.Serial(
+ port=comport.device,
+ baudrate=115200,
+ timeout=None
+)
+print(ser.write('!'.encode('ascii')))
+print("Magic", ser.read_until('\n'.encode('ascii'))) # Should be our magic
+print("Checksum", hex(int(ser.read_until('\n'.encode('ascii'))))) # Should be our magic
+print("Garbage", ser.read_until('\n'.encode('ascii')))
+print("Garbage", ser.read_until('\n'.encode('ascii')))
+#print("Garbage", ser.read_until('\n'.encode('ascii')))
+#print("Garbage", ser.read_until('\n'.encode('ascii')))
+#print("Garbage", ser.read_until('\n'.encode('ascii')))
+
+counter = 0
+
+
+start_time = time.perf_counter()
+prev = None
+while True:
+ tag = file.read(4)
+ if not tag:
+ break
+
+ tag = int.from_bytes(tag, byteorder='little')
+ timestamp = file.read(4)
+ timestamp = int.from_bytes(timestamp, byteorder='little')
+ # print(tag, int.from_bytes(timestamp, byteorder='little'))
+
+ if tag in SIZES:
+ size = SIZES[tag]
+ # print(size)
+
+ data = file.read(size)
+ # print(data)
+
+ ser.write(tag.to_bytes(1, byteorder='little'))
+ # ser.write(size.to_bytes(4, byteorder='little'))
+ ser.write(data)
+ content = (ser.read())
+ # data = bytes.decode(content, encoding="ascii")
+ # if len(content) != 0:
+ # # print(content)
+ # if ("Error") in (data):
+ # print((content))
+ if content != prev:
+ prev = content
+ print(counter, file.tell(), int.from_bytes(content))
+
+ previous_pyro_array = [timestamp, int.from_bytes(content), int.from_bytes(ser.read()), int.from_bytes(ser.read()), int.from_bytes(ser.read()), int.from_bytes(ser.read()), int.from_bytes(ser.read()), int.from_bytes(ser.read()), int.from_bytes(ser.read()), int.from_bytes(ser.read()), int.from_bytes(ser.read())]
+
+ write_to_csv("pyro_data.csv", previous_pyro_array)
+ else:
+ raise ValueError(f"Unknown tag: {tag}")
+ counter += 1
+
+ser.close()
+end_time = time.perf_counter()
+print("Done in ", end_time - start_time)
diff --git a/MIDAS/hilsim/pyro_data_plotter.py b/MIDAS/hilsim/pyro_data_plotter.py
new file mode 100644
index 00000000..234f4ed4
--- /dev/null
+++ b/MIDAS/hilsim/pyro_data_plotter.py
@@ -0,0 +1,44 @@
+from matplotlib import pyplot as plt
+import pandas as pd
+
+df = pd.read_csv('pyro_data.csv')
+
+ # STATE_IDLE = 0
+ # STATE_FIRST_BOOST = 1
+ # STATE_BURNOUT = 2
+ # STATE_COAST = 3
+ # STATE_SUSTAINER_IGNITION = 4
+ # STATE_SECOND_BOOST = 5
+ # STATE_FIRST_SEPARATION = 6
+ # STATE_APOGEE = 7
+ # STATE_DROGUE_DEPLOY = 8
+ # STATE_DROGUE = 9
+ # STATE_MAIN_DEPLOY = 10
+ # STATE_MAIN = 11
+ # STATE_LANDED = 12
+ # FSM_STATE_COUNT = 13
+
+y_labs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
+y_labs_map = ['STATE_IDLE', 'STATE_FIRST_BOOST', 'STATE_BURNOUT', 'STATE_COAST', 'STATE_SUSTAINER_IGNITION', 'STATE_SECOND_BOOST', 'STATE_FIRST_SEPARATION', 'STATE_APOGEE', 'STATE_DROGUE_DEPLOY', 'STATE_DROGUE', 'STATE_MAIN_DEPLOY', 'STATE_MAIN', 'STATE_LANDED', 'FSM_STATE_COUNT']
+
+for index, row in df.iterrows():
+ if (index > 0 and df.loc[df.index[index]]["a_armed"] != df.loc[df.index[index-1]]["a_armed"] or
+ df.loc[df.index[index]]["b_armed"] != df.loc[df.index[index-1]]["b_armed"] or
+ df.loc[df.index[index]]["c_armed"] != df.loc[df.index[index-1]]["c_armed"] or
+ df.loc[df.index[index]]["d_armed"] != df.loc[df.index[index-1]]["d_armed"] or
+ df.loc[df.index[index]]["a_firing"] != df.loc[df.index[index-1]]["a_firing"] or
+ df.loc[df.index[index]]["b_firing"] != df.loc[df.index[index-1]]["b_firing"] or
+ df.loc[df.index[index]]["c_firing"] != df.loc[df.index[index-1]]["c_firing"] or
+ df.loc[df.index[index]]["d_firing"] != df.loc[df.index[index-1]]["d_firing"] or
+ df.loc[df.index[index]]["global_armed"] != df.loc[df.index[index-1]]["global_armed"]
+ ):
+
+ plt.axvline(x = df.loc[df.index[index]]["Timestamp"], color = 'red')
+
+
+plt.yticks(y_labs, y_labs_map)
+
+plt.ylim(0, 15)
+
+plt.plot(df['Timestamp'], df['fsmstate'])
+plt.show()
\ No newline at end of file
diff --git a/MIDAS/platformio.ini b/MIDAS/platformio.ini
index 43f37175..471957d3 100644
--- a/MIDAS/platformio.ini
+++ b/MIDAS/platformio.ini
@@ -27,6 +27,7 @@ build_flags =
-DARDUINO_USB_CDC_ON_BOOT=1
-DCONFIG_DISABLE_HAL_LOCKS=1
-std=gnu++2a
+ -DHILSIM=1
-DIS_BOOSTER
build_unflags =
-std=gnu++11
@@ -47,9 +48,12 @@ build_flags =
-DHILSIM=1
-DIS_SUSTAINER
-std=gnu++2a
-build_src_filter = +<*> - - +
+build_src_filter = +<*> - - + -
build_unflags =
-std=gnu++11
+lib_deps =
+ adafruit/Adafruit LIS3MDL@^1.2.1 ; Magnetometer driver
+ stevemarple/MicroNMEA@^2.0.6 ; NMEA Parsing library (for GPS messages)
[env:mcu_hilsim_booster]
platform = espressif32
@@ -62,9 +66,12 @@ build_flags =
-DHILSIM=1
-DIS_BOOSTER
-std=gnu++2a
-build_src_filter = +<*> - - +
+build_src_filter = +<*> - - + -
build_unflags =
-std=gnu++11
+lib_deps =
+ adafruit/Adafruit LIS3MDL@^1.2.1 ; Magnetometer driver
+ stevemarple/MicroNMEA@^2.0.6 ; NMEA Parsing library (for GPS messages)
[env:mcu_silsim_sustainer]
@@ -98,3 +105,11 @@ lib_deps =
Eigen
lib_ignore =
TCAL9539
+
+
+[env:struct_sizes]
+platform=native
+build_type = release
+build_unflags =
+ -std=gnu++11
+build_src_filter = +
diff --git a/MIDAS/src/Mutex.h b/MIDAS/src/Mutex.h
index a524ddaa..170a4d82 100644
--- a/MIDAS/src/Mutex.h
+++ b/MIDAS/src/Mutex.h
@@ -51,7 +51,7 @@ struct Mutex {
*/
T read() {
if (!mutex_handle || mutex_handle != check) {
- Serial.println("Aw shucks");
+ Serial.println("Aw shucks read");
Serial.flush();
}
xSemaphoreTake(mutex_handle, portMAX_DELAY);
@@ -77,7 +77,7 @@ struct Mutex {
*/
void read2(T* ptr) {
if (!mutex_handle || mutex_handle != check) {
- Serial.println("Aw shucks");
+ Serial.println("Aw shucks read2");
Serial.flush();
}
xSemaphoreTake(mutex_handle, portMAX_DELAY);
@@ -93,7 +93,7 @@ struct Mutex {
*/
void write(T value) {
if (!mutex_handle || mutex_handle != check) {
- Serial.println("Aw shucks");
+ Serial.println("Aw shucks write");
Serial.flush();
}
diff --git a/MIDAS/src/hardware/Pyro.cpp b/MIDAS/src/Pyro.cpp
similarity index 97%
rename from MIDAS/src/hardware/Pyro.cpp
rename to MIDAS/src/Pyro.cpp
index 1c5c3bdb..473667f5 100644
--- a/MIDAS/src/hardware/Pyro.cpp
+++ b/MIDAS/src/Pyro.cpp
@@ -1,7 +1,11 @@
#include
-#include "sensors.h"
-#include "pins.h"
+#ifdef HILSIM
+#include "hilsim/sensors.h"
+#else
+#include "hardware/sensors.h"
+#endif
+#include "hardware/pins.h"
#include "TCAL9539.h"
diff --git a/MIDAS/src/hardware/SDLog.cpp b/MIDAS/src/SDLog.cpp
similarity index 97%
rename from MIDAS/src/hardware/SDLog.cpp
rename to MIDAS/src/SDLog.cpp
index bb8d1879..12581459 100644
--- a/MIDAS/src/hardware/SDLog.cpp
+++ b/MIDAS/src/SDLog.cpp
@@ -2,6 +2,7 @@
#include
#include
+#include "hardware/pins.h"
#include "SDLog.h"
/**
diff --git a/MIDAS/src/hardware/SDLog.h b/MIDAS/src/SDLog.h
similarity index 82%
rename from MIDAS/src/hardware/SDLog.h
rename to MIDAS/src/SDLog.h
index be038d47..fdbab304 100644
--- a/MIDAS/src/hardware/SDLog.h
+++ b/MIDAS/src/SDLog.h
@@ -3,7 +3,11 @@
#include
#include
-#include "sensors.h"
+#ifdef HILSIM
+#include "hilsim/sensors.h"
+#else
+#include "hardware/sensors.h"
+#endif
#include "data_logging.h"
/**
diff --git a/MIDAS/src/hardware/main.cpp b/MIDAS/src/hardware/main.cpp
index 0ff17182..5c79233c 100644
--- a/MIDAS/src/hardware/main.cpp
+++ b/MIDAS/src/hardware/main.cpp
@@ -5,7 +5,7 @@
#include "systems.h"
#include "hardware/pins.h"
#include "hardware/Emmc.h"
-#include "hardware/SDLog.h"
+#include "SDLog.h"
#include "sensor_data.h"
/**
diff --git a/MIDAS/src/hardware/sensors.h b/MIDAS/src/hardware/sensors.h
index 0e84c9ed..7d4712b4 100644
--- a/MIDAS/src/hardware/sensors.h
+++ b/MIDAS/src/hardware/sensors.h
@@ -1,6 +1,6 @@
#pragma once
-#include "errors.h"
+#include "../errors.h"
#include "sensor_data.h"
#include "hardware/pins.h"
diff --git a/MIDAS/src/hilsim/Barometer.cpp b/MIDAS/src/hilsim/Barometer.cpp
new file mode 100644
index 00000000..9f7d4ec9
--- /dev/null
+++ b/MIDAS/src/hilsim/Barometer.cpp
@@ -0,0 +1,33 @@
+#include "sensors.h"
+#include
+
+MS5611 MS(MS5611_CS); //singleton object for the MS sensor
+
+/**
+ * @brief Initializes barometer, returns NoError
+ *
+ * @return Error code
+*/
+ErrorCode BarometerSensor::init() {
+ MS.init();
+
+ return ErrorCode::NoError;
+}
+
+/**
+ * @brief Reads the pressure and temperature from the MS5611
+ *
+ * @return Barometer data packet
+*/
+Barometer BarometerSensor::read() {
+ MS.read(12);
+
+ /*
+ * TODO: Switch to latest version of library (0.3.9) when we get hardware to verify
+ */
+ float pressure = static_cast(MS.getPressure() * 0.01 + 26.03);
+ float temperature = static_cast(MS.getTemperature() * 0.01);
+ float altitude = static_cast(-log(pressure * 0.000987) * (temperature + 273.15) * 29.254);
+
+ return barometer;
+}
diff --git a/MIDAS/src/hilsim/Continuity.cpp b/MIDAS/src/hilsim/Continuity.cpp
new file mode 100644
index 00000000..a57a5ab0
--- /dev/null
+++ b/MIDAS/src/hilsim/Continuity.cpp
@@ -0,0 +1,33 @@
+#include "sensors.h"
+#include "ads7138-q1.h"
+#include
+
+#define PYRO_VOLTAGE_DIVIDER (5.0 / (5.0 + 20.0)) //voltage divider for pyro batt voltage, check hardware schematic
+#define CONT_VOLTAGE_DIVIDER (5.0 / (5.0 + 20.0)) //voltage divider for continuity voltage, check hardware schematic
+
+/**
+ * @brief Initializes ADC, returns NoError
+ *
+ * @return Error code
+*/
+ErrorCode ContinuitySensor::init() {
+ ADS7138Init(); // Ask ADS to init the pins, we still need to get the device to actually read
+
+ return ErrorCode::NoError;
+}
+
+/**
+ * @brief Reads the value of the ADC
+ *
+ * @return Continuity data packet
+*/
+Continuity ContinuitySensor::read() {
+ Continuity continuity;
+ //ADC reference voltage is 3.3, returns 12 bit value
+ continuity.sense_pyro = adcAnalogRead(ADCAddress{SENSE_PYRO}).value * 3.3f / 4096.f / PYRO_VOLTAGE_DIVIDER;
+ continuity.pins[0] = adcAnalogRead(ADCAddress{SENSE_MOTOR}).value * 3.3f / 4096.f / CONT_VOLTAGE_DIVIDER;
+ continuity.pins[1] = adcAnalogRead(ADCAddress{SENSE_MAIN}).value * 3.3f / 4096.f / CONT_VOLTAGE_DIVIDER;
+ continuity.pins[2] = adcAnalogRead(ADCAddress{SENSE_APOGEE}).value * 3.3f / 4096.f / CONT_VOLTAGE_DIVIDER;
+ continuity.pins[3] = adcAnalogRead(ADCAddress{SENSE_AUX}).value * 3.3f / 4096.f / CONT_VOLTAGE_DIVIDER;
+ return continuity;
+}
diff --git a/MIDAS/src/hilsim/GPSSensor.cpp b/MIDAS/src/hilsim/GPSSensor.cpp
new file mode 100644
index 00000000..4ec04923
--- /dev/null
+++ b/MIDAS/src/hilsim/GPSSensor.cpp
@@ -0,0 +1,78 @@
+#include
+
+#include "MicroNMEA.h"
+#include "teseo_liv3f_class.h"
+
+#include "pins.h"
+#include "sensors.h"
+#include "sensor_data.h"
+
+TeseoLIV3F teseo(&Wire, GPS_RESET, GPS_ENABLE); // singleton for the teseo gps
+
+/**
+ * @brief Initializes GPS, returns NoError
+ *
+ * @return Error code
+ */
+ErrorCode GPSSensor::init() {
+ teseo.init(); // always returns ok for some reason
+
+ return ErrorCode::NoError;
+}
+
+
+// This is needed because GPS doesn't provide unix time and just gives dd mm yy
+// 'needed' is a strong word
+const uint16_t months[12] = {
+ 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334
+};
+
+inline bool is_leapyear(int year) {
+ return ((year % 100 != 0) || (year % 400 == 0)) && (year % 4 == 0);
+}
+
+
+/**
+ * @brief Reads the GPS data from the sensor (lat, long, altitude, sat count, etc)
+ *
+ * @return GPS data packet
+ */
+GPS GPSSensor::read() {
+ teseo.update();
+ GPGGA_Info_t gpgga_message = teseo.getGPGGAData();
+ GPRMC_Info_t gprmc_message = teseo.getGPRMCData();
+
+ float64_t lat = gpgga_message.xyz.lat;
+ float64_t lon = gpgga_message.xyz.lon;
+
+ // d ddm m.mm mmm
+ // the max value of an unsigned 32 bit int is 2,147,4 83,647
+ // Since the maximum longitude is 180, we can store 3 degree digits, and
+ // 7 minute digits, which is all we need, because NMEA gives 6 minute digits.
+ // See https://www.sparkfun.com/datasheets/GPS/NMEA%20Reference%20Manual-Rev2.1-Dec07.pdf
+ int32_t lat_int = static_cast(lat*100000);
+ int32_t lon_int = static_cast(lon*100000);
+
+ lat_int *= (gpgga_message.xyz.ns == 'N') ? 1 : -1;
+ lon_int *= (gpgga_message.xyz.ew == 'E') ? 1 : -1;
+ float alt = gpgga_message.xyz.alt;
+ float v = gprmc_message.speed;
+ uint16_t sat_count = gpgga_message.sats;
+
+ uint32_t day = gprmc_message.date / 10000 * 86400;
+ int32_t month = gprmc_message.date / 100 % 100;
+ if (month <= 0 || month > 12) {
+ month = 1;
+ }
+ int month_time = months[month - 1];
+ if (is_leapyear(gprmc_message.date % 100) && month >= 3) {
+ month_time++;
+ }
+ uint32_t time = (day - 1) + month_time * 86400 + (30 + gprmc_message.date % 100) * 31536000;
+ // Sum everything together now
+ uint32_t time_of_day = gprmc_message.utc.hh * 3600 + gprmc_message.utc.mm * 60 + gprmc_message.utc.ss;
+ time += time_of_day;
+ time += (int) ((30 + gprmc_message.date % 100) / 4) * 86400;
+
+ return gps;
+}
diff --git a/MIDAS/src/hilsim/HighG.cpp b/MIDAS/src/hilsim/HighG.cpp
new file mode 100644
index 00000000..a86127b3
--- /dev/null
+++ b/MIDAS/src/hilsim/HighG.cpp
@@ -0,0 +1,33 @@
+#include "sensors.h"
+#include "SparkFun_Qwiic_KX13X.h"
+
+QwiicKX134 KX; // global static instance of the sensor
+
+/**
+ * @brief Initializes the high G sensor
+ *
+ * @return Error Code
+*/
+ErrorCode HighGSensor::init() {
+ KX.beginSPI(KX134_CS);
+ if (!KX.initialize(DEFAULT_SETTINGS)) {
+ return ErrorCode::HighGCouldNotBeInitialized;
+ }
+
+ if(!KX.setOutputDataRate(0xb)) {
+ return ErrorCode::HighGCouldNotUpdateDataRate;
+ }
+
+ KX.setRange(3);
+ return ErrorCode::NoError;
+}
+
+/**
+ * @brief Reads and returns the data from the sensor
+ *
+ * @return a HighGData packet with current acceleration in all three axes
+*/
+HighGData HighGSensor::read() {
+ auto data = KX.getAccelData();
+ return highg;
+}
diff --git a/MIDAS/src/hilsim/LowG.cpp b/MIDAS/src/hilsim/LowG.cpp
new file mode 100644
index 00000000..30a4ed8a
--- /dev/null
+++ b/MIDAS/src/hilsim/LowG.cpp
@@ -0,0 +1,31 @@
+#include "sensors.h"
+#include "PL_ADXL355.h"
+
+PL::ADXL355 sensor(ADXL355_CS); //singleton object for the adxl
+
+/**
+ * @brief Initializes the low G sensor
+ *
+ * @return Error Code
+*/
+ErrorCode LowGSensor::init() {
+ ErrorCode error = ErrorCode::NoError;
+ sensor.begin();
+ sensor.setRange(PL::ADXL355_Range::range2g);
+ sensor.setOutputDataRate(PL::ADXL355_OutputDataRate::odr1000);
+ // todo set low pass filter frequency to 250hx
+ sensor.enableMeasurement();
+ return error;
+}
+
+/**
+ * @brief Reads and returns the data from the sensor
+ *
+ * @return a LowGData packet with current acceleration in all three axes
+*/
+LowGData LowGSensor::read()
+{
+ auto data = sensor.getAccelerations();
+
+ return lowg;
+}
diff --git a/MIDAS/src/hilsim/LowGLSM.cpp b/MIDAS/src/hilsim/LowGLSM.cpp
new file mode 100644
index 00000000..14982428
--- /dev/null
+++ b/MIDAS/src/hilsim/LowGLSM.cpp
@@ -0,0 +1,29 @@
+#include "sensors.h"
+#include
+
+LSM6DS3Class LSM(SPI, LSM6DS3_CS, 46); // global static instance of the sensor
+
+/**
+ * @brief Initializes the low G LSM sensor
+ *
+ * @return Error Code
+*/
+ErrorCode LowGLSMSensor::init() {
+ if (!LSM.begin()) {
+ return ErrorCode::GyroCouldNotBeInitialized;
+ }
+ return ErrorCode::NoError;
+}
+
+/**
+ * @brief Reads and returns the data from the sensor
+ *
+ * @return a LowGLSM packet with current acceleration and gyro in all three axes
+*/
+LowGLSM LowGLSMSensor::read() {
+ // read from aforementioned global instance of sensor
+ LowGLSM result;
+ LSM.readAcceleration(result.ax, result.ay, result.az);
+ LSM.readGyroscope(result.gx, result.gy, result.gz);
+ return lowglsm;
+}
diff --git a/MIDAS/src/hilsim/Magnetometer.cpp b/MIDAS/src/hilsim/Magnetometer.cpp
new file mode 100644
index 00000000..7c829b07
--- /dev/null
+++ b/MIDAS/src/hilsim/Magnetometer.cpp
@@ -0,0 +1,27 @@
+#include
+
+#include "sensors.h"
+#include "hal.h"
+
+Adafruit_LIS3MDL LIS3MDL; // global static instance of the sensor
+
+ErrorCode MagnetometerSensor::init() {
+ if (!LIS3MDL.begin_SPI(LIS3MDL_CS)) { // Checks if sensor is connected
+ return ErrorCode::MagnetometerCouldNotBeInitialized;
+ }
+ LIS3MDL.setOperationMode(LIS3MDL_CONTINUOUSMODE); // Reading continuously, instead of single-shot or off
+ LIS3MDL.setDataRate(LIS3MDL_DATARATE_155_HZ);
+ LIS3MDL.setRange(LIS3MDL_RANGE_4_GAUSS); // Earth's magnetic field is 1/2 gauss, can detect high current
+ return ErrorCode::NoError;
+}
+
+Magnetometer MagnetometerSensor::read() {
+ // read from aforementioned global instance of sensor
+ LIS3MDL.read();
+
+ float mx = LIS3MDL.x_gauss;
+ float my = LIS3MDL.y_gauss;
+ float mz = LIS3MDL.z_gauss;
+ Magnetometer reading{mx, my, mz};
+ return mag;
+}
diff --git a/MIDAS/src/hilsim/Orientation.cpp b/MIDAS/src/hilsim/Orientation.cpp
new file mode 100644
index 00000000..fd1ed9a0
--- /dev/null
+++ b/MIDAS/src/hilsim/Orientation.cpp
@@ -0,0 +1,135 @@
+#include "sensors.h"
+#include "Adafruit_BNO08x.h"
+
+// global static instance of the sensor
+Adafruit_BNO08x imu(BNO086_RESET);
+#define REPORT_INTERVAL_US 5000
+
+/**
+ * @brief Initializes the bno sensor
+ *
+ * @return Error Code
+*/
+ErrorCode OrientationSensor::init() {
+ gpioPinMode(BNO086_RESET, OUTPUT);
+ delay(100);
+ // do whatever steps to initialize the sensor
+ // if it errors, return the relevant error code
+ if (!imu.begin_SPI(BNO086_CS, BNO086_INT)) {
+ return ErrorCode::CannotConnectBNO;
+ }
+ Serial.println("Setting desired reports");
+ if (!imu.enableReport(SH2_ARVR_STABILIZED_RV, REPORT_INTERVAL_US)) {
+ return ErrorCode::CannotInitBNO;
+ }
+ return ErrorCode::NoError;
+}
+
+/**
+ * @brief Turns a quaternion into its corresponding Euler 3D vector representation
+ *
+ * @param qr Quaternion real component
+ * @param qi Quaternion i component
+ * @param qj Quaternion j component
+ * @param qk Quaternion k component
+ * @param degrees Quaternion degrees, not used
+ *
+ * @return 3D representation of the quaternion
+*/
+Vec3 quaternionToEuler(float qr, float qi, float qj, float qk, bool degrees) {
+ float sqr = sq(qr);
+ float sqi = sq(qi);
+ float sqj = sq(qj);
+ float sqk = sq(qk);
+
+ Vec3 euler;
+ euler.x = atan2(2.0 * (qi * qj + qk * qr), (sqi - sqj - sqk + sqr)); // roll
+ euler.y = asin(-2.0 * (qi * qk - qj * qr) / (sqi + sqj + sqk + sqr)); // yaw
+ euler.z = -1 * atan2(2.0 * (qj * qk + qi * qr), (-sqi - sqj + sqk + sqr)); // pitch
+ return euler;
+}
+
+/**
+ * @brief Takes a rotation quaternion and turns it into its Euler angle counterpart
+ *
+ * @param rotational_vector Rotation quaternion
+ * @param degrees Quaternion degrees, not used
+ *
+ * @return Euler angle vector representation of the quaternion
+*/
+Vec3 quaternionToEulerRV(sh2_RotationVectorWAcc_t* rotational_vector, bool degrees) {
+ return quaternionToEuler(rotational_vector->real, rotational_vector->i, rotational_vector->j, rotational_vector->k,
+ degrees);
+
+}
+
+/**
+ * @brief Takes a gyroscope quaternion and turns it into its Euler 3D counterpart
+ *
+ * @param rotational_vector Gyroscope quaternion
+ * @param degrees Quaternion degrees, not used
+ *
+ * @return Euler angle vector representation of the quaternion
+*/
+Vec3 quaternionToEulerGI(sh2_GyroIntegratedRV_t* rotational_vector, bool degrees) {
+ return quaternionToEuler(rotational_vector->real, rotational_vector->i, rotational_vector->j, rotational_vector->k,
+ degrees);
+}
+
+/**
+ * @brief Reads and returns the data from the sensor
+ *
+ * @return An orientation packet with orientation, acceleration, gyroscope, and magenetometer for all axes, along with temperature and pressure
+*/
+Orientation OrientationSensor::read() {
+ // read from aforementioned global instance of sensor
+ sh2_SensorValue_t event;
+ Vec3 euler;
+ if (imu.getSensorEvent(&event)) {
+ switch (event.sensorId) {
+ case SH2_ARVR_STABILIZED_RV:
+ euler = quaternionToEulerRV(&event.un.arvrStabilizedRV, true);
+ case SH2_GYRO_INTEGRATED_RV:
+ // faster (more noise?)
+ euler = quaternionToEulerGI(&event.un.gyroIntegratedRV, true);
+ break;
+ }
+
+ Orientation sensor_reading;
+ sensor_reading.has_data = true;
+
+ sensor_reading.yaw = -euler.y;
+ sensor_reading.pitch = euler.x;
+ sensor_reading.roll = euler.z;
+
+ sensor_reading.linear_acceleration.ax = -event.un.accelerometer.y;
+ sensor_reading.linear_acceleration.ay = event.un.accelerometer.x;
+ sensor_reading.linear_acceleration.az = event.un.accelerometer.z;
+
+ sensor_reading.gx = -event.un.gyroscope.y;
+ sensor_reading.gy = event.un.gyroscope.x;
+ sensor_reading.gz = event.un.gyroscope.z;
+
+ sensor_reading.magnetometer.mx = -event.un.magneticField.y;
+ sensor_reading.magnetometer.my = event.un.magneticField.x;
+ sensor_reading.magnetometer.mz = event.un.magneticField.z;
+
+ sensor_reading.temperature = event.un.temperature.value;
+ sensor_reading.pressure = event.un.pressure.value;
+
+ if (initial_flag == 0) {
+ initial_orientation = sensor_reading;
+ initial_flag = 1;
+ }
+
+ // calculate tilt from initial orientation
+ Orientation deviation;
+ deviation.yaw = min(abs(sensor_reading.yaw - initial_orientation.yaw), 2 * 3.14F - abs(sensor_reading.yaw - initial_orientation.yaw));
+ deviation.pitch = min(abs(sensor_reading.pitch - initial_orientation.pitch), 2 * 3.14F - abs(sensor_reading.pitch - initial_orientation.pitch));
+
+ sensor_reading.tilt = sqrt(pow(deviation.yaw, 2) + pow(deviation.pitch, 2));
+
+ return orient;
+ }
+ return orient;
+}
diff --git a/MIDAS/src/hilsim/StructSizes.cpp b/MIDAS/src/hilsim/StructSizes.cpp
new file mode 100644
index 00000000..567b71ad
--- /dev/null
+++ b/MIDAS/src/hilsim/StructSizes.cpp
@@ -0,0 +1,54 @@
+#include
+#include
+#include
+#include "../sensor_data.h"
+
+void writeStructSizesToJson() {
+ // Create an ostringstream to build the JSON string
+ std::ostringstream jsonStream;
+
+ // Start the JSON object
+ jsonStream << "{\n";
+
+ // Add struct sizes as key-value pairs
+ /*jsonStream << " \"Vec3\": " << sizeof(Vec3) << ",\n";
+ jsonStream << " \"Position\": " << sizeof(Position) << ",\n";
+ jsonStream << " \"Velocity\": " << sizeof(Velocity) << ",\n";
+ jsonStream << " \"Acceleration\": " << sizeof(Acceleration) << ",\n";
+ jsonStream << " \"euler_t\": " << sizeof(euler_t) << ",\n";*/
+ jsonStream << " \"1\": " << sizeof(LowGData) << ",\n";
+ jsonStream << " \"2\": " << sizeof(HighGData) << ",\n";
+ jsonStream << " \"3\": " << sizeof(Barometer) << ",\n";
+ jsonStream << " \"4\": " << sizeof(Continuity) << ",\n";
+ jsonStream << " \"5\": " << sizeof(Voltage) << ",\n";
+ jsonStream << " \"6\": " << sizeof(GPS) << ",\n";
+ jsonStream << " \"7\": " << sizeof(Magnetometer) << ",\n";
+ jsonStream << " \"8\": " << sizeof(Orientation) << ",\n";
+ jsonStream << " \"9\": " << sizeof(LowGLSM) << ",\n";
+ jsonStream << " \"10\": " << sizeof(FSMState) << ",\n";
+ jsonStream << " \"11\": " << sizeof(KalmanData) << ",\n";
+ jsonStream << " \"12\": " << sizeof(PyroState) << "\n";
+ //jsonStream << " \"PyroChannel\": " << sizeof(PyroChannel) << ",\n";
+
+
+ // End the JSON object
+ jsonStream << "}";
+
+ // Convert the stream to a string
+ std::string jsonString = jsonStream.str();
+
+ // Write the JSON string to a file
+ std::ofstream file("struct_sizes.json");
+ if (file.is_open()) {
+ file << jsonString; // Write the JSON string to the file
+ file.close();
+ std::cout << "Struct sizes written to struct_sizes.json\n";
+ } else {
+ std::cerr << "Error: Could not open file for writing.\n";
+ }
+}
+
+int main() {
+ writeStructSizesToJson();
+ return 0;
+}
diff --git a/MIDAS/src/hilsim/Voltage.cpp b/MIDAS/src/hilsim/Voltage.cpp
new file mode 100644
index 00000000..7147fa86
--- /dev/null
+++ b/MIDAS/src/hilsim/Voltage.cpp
@@ -0,0 +1,29 @@
+#include "sensors.h"
+#include
+#include
+
+#define VOLTAGE_DIVIDER (5.0 / (5.0 + 20.0))
+
+/**
+ * @brief "Initializes" the voltage sensor. Since it reads directly from a pin without a library, there is no specific initialization.
+ *
+ * @return Error Code, will always be NoError
+*/
+ErrorCode VoltageSensor::init() {
+ return ErrorCode::NoError;
+}
+
+/**
+ * @brief Reads the value of the given analog pin and converts it to a battery voltage with the assumption that the voltage sensor is plugged into that pin
+ *
+ * @return The scaled voltage given by the voltage sensor
+*/
+Voltage VoltageSensor::read() {
+ Voltage v_battery;
+ v_battery.voltage = adcAnalogRead(ADCAddress{VOLTAGE_PIN}).value * 3.3f / 4095.0f / VOLTAGE_DIVIDER;
+// Serial.print("Raw voltage reading: ");
+// Serial.print(v_battery.voltage);
+// Serial.println("");
+ //* 3.3f / 4095.f / VOLTAGE_DIVIDER;
+ return voltage;
+}
diff --git a/MIDAS/src/hilsim/generate_protobuf.bat b/MIDAS/src/hilsim/generate_protobuf.bat
deleted file mode 100644
index 415d458f..00000000
--- a/MIDAS/src/hilsim/generate_protobuf.bat
+++ /dev/null
@@ -1,11 +0,0 @@
-@echo off
-REM Generate hilsim packet
-protoc -I=. --python_out=. hilsimpacket.proto
-python nanopb_generator/nanopb_generator.py hilsimpacket.proto
-
-python nanopb_generator/nanopb_generator.py rocketstate.proto
-protoc -I=. --python_out=. rocketstate.proto
-
-cp hilsimpacket_pb2.py ../../hilsim/hilsimpacket_pb2.py
-cp rocketstate_pb2.py ../../hilsim/rocketstate_pb2.py
-REM Get the size of the packets and automatically dump it to a header file
diff --git a/MIDAS/src/hilsim/generate_protobuf.sh b/MIDAS/src/hilsim/generate_protobuf.sh
deleted file mode 100644
index e69de29b..00000000
diff --git a/MIDAS/src/hilsim/generate_size.py b/MIDAS/src/hilsim/generate_size.py
deleted file mode 100644
index a7b115c3..00000000
--- a/MIDAS/src/hilsim/generate_size.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import hilsimpacket_pb2
-
-def main():
- hilsim_packet = hilsimpacket_pb2.HILSIMPacket()
- hilsim_packet.imu_high_ax = 0
- hilsim_packet.imu_high_ay = 0
- hilsim_packet.imu_high_az = 0
- hilsim_packet.barometer_altitude = 0
- hilsim_packet.barometer_temperature = 0
- hilsim_packet.barometer_pressure = 0
- hilsim_packet.imu_low_ax = 0
- hilsim_packet.imu_low_ay = 0
- hilsim_packet.imu_low_az = 0
- hilsim_packet.imu_low_gx = 0
- hilsim_packet.imu_low_gy = 0
- hilsim_packet.imu_low_gz = 0
- hilsim_packet.mag_x = 0
- hilsim_packet.mag_y = 0
- hilsim_packet.mag_z = 0
- print(vars(hilsimpacket_pb2.HILSIMPacket))
- print(len(hilsim_packet.SerializeToString()))
-
-if __name__ == '__main__':
- main()
diff --git a/MIDAS/src/hilsim/global_packet.h b/MIDAS/src/hilsim/global_packet.h
deleted file mode 100644
index a2b936b9..00000000
--- a/MIDAS/src/hilsim/global_packet.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#pragma once
-#include "hilsim/hilsimpacket.pb.h"
-#include "hilsim/rocketstate.pb.h"
-
-extern HILSIMPacket global_packet;
diff --git a/MIDAS/src/hilsim/hilsimpacket.pb.c b/MIDAS/src/hilsim/hilsimpacket.pb.c
deleted file mode 100644
index 1fb5b4fe..00000000
--- a/MIDAS/src/hilsim/hilsimpacket.pb.c
+++ /dev/null
@@ -1,12 +0,0 @@
-/* Automatically generated nanopb constant definitions */
-/* Generated by nanopb-0.4.7 */
-
-#include "hilsimpacket.pb.h"
-#if PB_PROTO_HEADER_VERSION != 40
-#error Regenerate this file with the current version of nanopb generator.
-#endif
-
-PB_BIND(HILSIMPacket, HILSIMPacket, AUTO)
-
-
-
diff --git a/MIDAS/src/hilsim/hilsimpacket.pb.h b/MIDAS/src/hilsim/hilsimpacket.pb.h
deleted file mode 100644
index e0d00ff2..00000000
--- a/MIDAS/src/hilsim/hilsimpacket.pb.h
+++ /dev/null
@@ -1,161 +0,0 @@
-/* Automatically generated nanopb header */
-/* Generated by nanopb-0.4.7 */
-
-#ifndef PB_HILSIMPACKET_PB_H_INCLUDED
-#define PB_HILSIMPACKET_PB_H_INCLUDED
-#include
-
-#if PB_PROTO_HEADER_VERSION != 40
-#error Regenerate this file with the current version of nanopb generator.
-#endif
-
-/* Struct definitions */
-typedef struct _HILSIMPacket {
- /* High-G IMU data */
- float imu_high_ax;
- float imu_high_ay;
- float imu_high_az;
- /* Barometer data */
- float barometer_altitude;
- float barometer_temperature;
- float barometer_pressure;
- /* Low-G IMU data */
- float imu_low_ax;
- float imu_low_ay;
- float imu_low_az;
- /* Low-G lsm IMU data */
- float imu_low_lsm_ax;
- float imu_low_lsm_ay;
- float imu_low_lsm_az;
- float imu_low_lsm_gx;
- float imu_low_lsm_gy;
- float imu_low_lsm_gz;
- /* Mag data */
- float mag_x;
- float mag_y;
- float mag_z;
- /* Orientation data */
- float ornt_roll;
- float ornt_pitch;
- float ornt_yaw;
- float ornt_rollv;
- float ornt_pitchv;
- float ornt_yawv;
- float ornt_rolla;
- float ornt_pitcha;
- float ornt_yawa;
- float ornt_ax;
- float ornt_ay;
- float ornt_az;
- float ornt_gx;
- float ornt_gy;
- float ornt_gz;
- float ornt_mx;
- float ornt_my;
- float ornt_mz;
- float ornt_temp;
-} HILSIMPacket;
-
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/* Initializer values for message structs */
-#define HILSIMPacket_init_default {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
-#define HILSIMPacket_init_zero {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
-
-/* Field tags (for use in manual encoding/decoding) */
-#define HILSIMPacket_imu_high_ax_tag 1
-#define HILSIMPacket_imu_high_ay_tag 2
-#define HILSIMPacket_imu_high_az_tag 3
-#define HILSIMPacket_barometer_altitude_tag 4
-#define HILSIMPacket_barometer_temperature_tag 5
-#define HILSIMPacket_barometer_pressure_tag 6
-#define HILSIMPacket_imu_low_ax_tag 7
-#define HILSIMPacket_imu_low_ay_tag 8
-#define HILSIMPacket_imu_low_az_tag 9
-#define HILSIMPacket_imu_low_lsm_ax_tag 10
-#define HILSIMPacket_imu_low_lsm_ay_tag 11
-#define HILSIMPacket_imu_low_lsm_az_tag 12
-#define HILSIMPacket_imu_low_lsm_gx_tag 13
-#define HILSIMPacket_imu_low_lsm_gy_tag 14
-#define HILSIMPacket_imu_low_lsm_gz_tag 15
-#define HILSIMPacket_mag_x_tag 16
-#define HILSIMPacket_mag_y_tag 17
-#define HILSIMPacket_mag_z_tag 18
-#define HILSIMPacket_ornt_roll_tag 19
-#define HILSIMPacket_ornt_pitch_tag 20
-#define HILSIMPacket_ornt_yaw_tag 21
-#define HILSIMPacket_ornt_rollv_tag 22
-#define HILSIMPacket_ornt_pitchv_tag 23
-#define HILSIMPacket_ornt_yawv_tag 24
-#define HILSIMPacket_ornt_rolla_tag 25
-#define HILSIMPacket_ornt_pitcha_tag 26
-#define HILSIMPacket_ornt_yawa_tag 27
-#define HILSIMPacket_ornt_ax_tag 28
-#define HILSIMPacket_ornt_ay_tag 29
-#define HILSIMPacket_ornt_az_tag 30
-#define HILSIMPacket_ornt_gx_tag 31
-#define HILSIMPacket_ornt_gy_tag 32
-#define HILSIMPacket_ornt_gz_tag 33
-#define HILSIMPacket_ornt_mx_tag 34
-#define HILSIMPacket_ornt_my_tag 35
-#define HILSIMPacket_ornt_mz_tag 36
-#define HILSIMPacket_ornt_temp_tag 37
-
-/* Struct field encoding specification for nanopb */
-#define HILSIMPacket_FIELDLIST(X, a) \
-X(a, STATIC, REQUIRED, FLOAT, imu_high_ax, 1) \
-X(a, STATIC, REQUIRED, FLOAT, imu_high_ay, 2) \
-X(a, STATIC, REQUIRED, FLOAT, imu_high_az, 3) \
-X(a, STATIC, REQUIRED, FLOAT, barometer_altitude, 4) \
-X(a, STATIC, REQUIRED, FLOAT, barometer_temperature, 5) \
-X(a, STATIC, REQUIRED, FLOAT, barometer_pressure, 6) \
-X(a, STATIC, REQUIRED, FLOAT, imu_low_ax, 7) \
-X(a, STATIC, REQUIRED, FLOAT, imu_low_ay, 8) \
-X(a, STATIC, REQUIRED, FLOAT, imu_low_az, 9) \
-X(a, STATIC, REQUIRED, FLOAT, imu_low_lsm_ax, 10) \
-X(a, STATIC, REQUIRED, FLOAT, imu_low_lsm_ay, 11) \
-X(a, STATIC, REQUIRED, FLOAT, imu_low_lsm_az, 12) \
-X(a, STATIC, REQUIRED, FLOAT, imu_low_lsm_gx, 13) \
-X(a, STATIC, REQUIRED, FLOAT, imu_low_lsm_gy, 14) \
-X(a, STATIC, REQUIRED, FLOAT, imu_low_lsm_gz, 15) \
-X(a, STATIC, REQUIRED, FLOAT, mag_x, 16) \
-X(a, STATIC, REQUIRED, FLOAT, mag_y, 17) \
-X(a, STATIC, REQUIRED, FLOAT, mag_z, 18) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_roll, 19) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_pitch, 20) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_yaw, 21) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_rollv, 22) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_pitchv, 23) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_yawv, 24) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_rolla, 25) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_pitcha, 26) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_yawa, 27) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_ax, 28) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_ay, 29) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_az, 30) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_gx, 31) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_gy, 32) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_gz, 33) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_mx, 34) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_my, 35) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_mz, 36) \
-X(a, STATIC, REQUIRED, FLOAT, ornt_temp, 37)
-#define HILSIMPacket_CALLBACK NULL
-#define HILSIMPacket_DEFAULT NULL
-
-extern const pb_msgdesc_t HILSIMPacket_msg;
-
-/* Defines for backwards compatibility with code written before nanopb-0.4.0 */
-#define HILSIMPacket_fields &HILSIMPacket_msg
-
-/* Maximum encoded size of messages (where known) */
-#define HILSIMPacket_size 207
-
-#ifdef __cplusplus
-} /* extern "C" */
-#endif
-
-#endif
diff --git a/MIDAS/src/hilsim/hilsimpacket.proto b/MIDAS/src/hilsim/hilsimpacket.proto
deleted file mode 100644
index 44df7991..00000000
--- a/MIDAS/src/hilsim/hilsimpacket.proto
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * @struct HILSIMPacket
- * @brief Structure to hold data received serially from a desktop computer
- *
- * The simulated/past-launch data is streamed through serial row-by-row to TARS. TARS receives it in the HILSIM thread and populates
- * data that would otherwise be read from sensors via the received HILSIM packet. Used for rapid testing and iteration of onboard
- * hardware, GNC, and telemetry systems.
- * To generate hilsimpacket.pb.h, use the following command:
- * `python nanopb_generator/nanopb_generator.py hilsimpacket.proto`
- * To generate hilsimpacket_pb2.py, use the following command:
- * `protoc -I=. --python_out=. hilsimpacket.proto`
-*/
-syntax = "proto2";
-
-message HILSIMPacket {
- // High-G IMU data
- required float imu_high_ax = 1;
- required float imu_high_ay = 2;
- required float imu_high_az = 3;
-
- // Barometer data
- required float barometer_altitude = 4;
- required float barometer_temperature = 5;
- required float barometer_pressure = 6;
-
- // Low-G IMU data
- required float imu_low_ax = 7;
- required float imu_low_ay = 8;
- required float imu_low_az = 9;
-
- // Low-G lsm IMU data
- required float imu_low_lsm_ax = 10;
- required float imu_low_lsm_ay = 11;
- required float imu_low_lsm_az = 12;
- required float imu_low_lsm_gx = 13;
- required float imu_low_lsm_gy = 14;
- required float imu_low_lsm_gz = 15;
-
- // Mag data
- required float mag_x = 16;
- required float mag_y = 17;
- required float mag_z = 18;
-
- // Orientation data
- required float ornt_roll = 19;
- required float ornt_pitch = 20;
- required float ornt_yaw = 21;
- required float ornt_rollv = 22;
- required float ornt_pitchv = 23;
- required float ornt_yawv = 24;
- required float ornt_rolla = 25;
- required float ornt_pitcha = 26;
- required float ornt_yawa = 27;
- required float ornt_ax = 28;
- required float ornt_ay = 29;
- required float ornt_az = 30;
- required float ornt_gx = 31;
- required float ornt_gy = 32;
- required float ornt_gz = 33;
- required float ornt_mx = 34;
- required float ornt_my = 35;
- required float ornt_mz = 36;
- required float ornt_temp = 37;
-}
diff --git a/MIDAS/src/hilsim/hilsimpacket_pb2.py b/MIDAS/src/hilsim/hilsimpacket_pb2.py
deleted file mode 100644
index ff60dffb..00000000
--- a/MIDAS/src/hilsim/hilsimpacket_pb2.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: hilsimpacket.proto
-"""Generated protocol buffer code."""
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf.internal import builder as _builder
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x12hilsimpacket.proto\"\xfa\x05\n\x0cHILSIMPacket\x12\x13\n\x0bimu_high_ax\x18\x01 \x02(\x02\x12\x13\n\x0bimu_high_ay\x18\x02 \x02(\x02\x12\x13\n\x0bimu_high_az\x18\x03 \x02(\x02\x12\x1a\n\x12\x62\x61rometer_altitude\x18\x04 \x02(\x02\x12\x1d\n\x15\x62\x61rometer_temperature\x18\x05 \x02(\x02\x12\x1a\n\x12\x62\x61rometer_pressure\x18\x06 \x02(\x02\x12\x12\n\nimu_low_ax\x18\x07 \x02(\x02\x12\x12\n\nimu_low_ay\x18\x08 \x02(\x02\x12\x12\n\nimu_low_az\x18\t \x02(\x02\x12\x16\n\x0eimu_low_lsm_ax\x18\n \x02(\x02\x12\x16\n\x0eimu_low_lsm_ay\x18\x0b \x02(\x02\x12\x16\n\x0eimu_low_lsm_az\x18\x0c \x02(\x02\x12\x16\n\x0eimu_low_lsm_gx\x18\r \x02(\x02\x12\x16\n\x0eimu_low_lsm_gy\x18\x0e \x02(\x02\x12\x16\n\x0eimu_low_lsm_gz\x18\x0f \x02(\x02\x12\r\n\x05mag_x\x18\x10 \x02(\x02\x12\r\n\x05mag_y\x18\x11 \x02(\x02\x12\r\n\x05mag_z\x18\x12 \x02(\x02\x12\x11\n\tornt_roll\x18\x13 \x02(\x02\x12\x12\n\nornt_pitch\x18\x14 \x02(\x02\x12\x10\n\x08ornt_yaw\x18\x15 \x02(\x02\x12\x12\n\nornt_rollv\x18\x16 \x02(\x02\x12\x13\n\x0bornt_pitchv\x18\x17 \x02(\x02\x12\x11\n\tornt_yawv\x18\x18 \x02(\x02\x12\x12\n\nornt_rolla\x18\x19 \x02(\x02\x12\x13\n\x0bornt_pitcha\x18\x1a \x02(\x02\x12\x11\n\tornt_yawa\x18\x1b \x02(\x02\x12\x0f\n\x07ornt_ax\x18\x1c \x02(\x02\x12\x0f\n\x07ornt_ay\x18\x1d \x02(\x02\x12\x0f\n\x07ornt_az\x18\x1e \x02(\x02\x12\x0f\n\x07ornt_gx\x18\x1f \x02(\x02\x12\x0f\n\x07ornt_gy\x18 \x02(\x02\x12\x0f\n\x07ornt_gz\x18! \x02(\x02\x12\x0f\n\x07ornt_mx\x18\" \x02(\x02\x12\x0f\n\x07ornt_my\x18# \x02(\x02\x12\x0f\n\x07ornt_mz\x18$ \x02(\x02\x12\x11\n\tornt_temp\x18% \x02(\x02')
-
-_globals = globals()
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'hilsimpacket_pb2', _globals)
-if _descriptor._USE_C_DESCRIPTORS == False:
- DESCRIPTOR._options = None
- _globals['_HILSIMPACKET']._serialized_start=23
- _globals['_HILSIMPACKET']._serialized_end=785
-# @@protoc_insertion_point(module_scope)
diff --git a/MIDAS/src/hilsim/main.cpp b/MIDAS/src/hilsim/main.cpp
index c56e07ce..ea157234 100644
--- a/MIDAS/src/hilsim/main.cpp
+++ b/MIDAS/src/hilsim/main.cpp
@@ -1,63 +1,68 @@
#include
-#include
-#include
+
+#include
+#include
#include
-#include "global_packet.h"
+#include
+
+#include "sensor_data.h"
+#include "log_checksum.h"
-HILSIMPacket global_packet = HILSIMPacket_init_zero;
+#include "SDLog.h"
-MultipleLogSink<> sink;
+MultipleLogSink sink;
RocketSystems systems{.log_sink = sink};
-DECLARE_THREAD(hilsim, void*arg) {
- uint8_t buffer[HILSIMPacket_size];
- int n = 0;
- // Debug kamaji output to verify if we're reading the correct packets
- while (Serial.read() != 33);
+void setup(){
+ Serial.begin(115200);
+ while (!Serial);
+ while (!Serial.available()) {}
+ while (Serial.read() != 33) ;
char magic[] = {69, 110, 117, 109, 99, 108, 97, 119, 0};
- Serial.println(magic);
- Serial.println(__TIME__);
- Serial.println(__DATE__);
+ Serial.print(magic);
+ Serial.print('\n');
+ Serial.print(LOG_CHECKSUM);
+ Serial.print('\n');
Serial.flush();
- while (true) {
- while (!Serial.available());
- uint8_t a = Serial.read();
- uint8_t b = Serial.read();
- uint16_t length = (uint16_t) b + (((uint16_t) a) << 8);
- // Parse the two bytes as integers
-
- size_t hilsim_packet_size = Serial.readBytes(buffer, length);
- // Serial.print(length);
- // Serial.print(" ");
- // Serial.printf("%d %d ", a, b);
- HILSIMPacket packet = HILSIMPacket_init_zero;
- pb_istream_t stream = pb_istream_from_buffer(buffer, hilsim_packet_size);
- bool status = pb_decode(&stream, HILSIMPacket_fields, &packet);
- if (!status) {
- THREAD_SLEEP(10);
- continue;
- }
- global_packet = packet;
- RocketState rocket_state = RocketState_init_zero;
- rocket_state.rocket_state = (int) (100 * sin((double)n / 360));
- uint8_t buffer2[RocketState_size];
- pb_ostream_t output_stream = pb_ostream_from_buffer(buffer, sizeof(buffer));
- status = pb_encode(&output_stream, RocketState_fields, &rocket_state);
- Serial.write(output_stream.bytes_written);
- Serial.write(buffer, output_stream.bytes_written);
- Serial.flush();
- n++;
-
- THREAD_SLEEP(10);
- }
-}
+ //begin sensor SPI bus
+ // Serial.println("Starting SPI...");
+ SPI.begin(SPI_SCK, SPI_MISO, SPI_MOSI);
+
+ //begin I2C bus
+ // Serial.println("Starting I2C...");
+ Wire.begin(I2C_SDA, I2C_SCL);
+
+ //set all chip selects high (deselected)
+ pinMode(MS5611_CS, OUTPUT);
+ pinMode(LSM6DS3_CS, OUTPUT);
+ pinMode(KX134_CS, OUTPUT);
+ pinMode(ADXL355_CS, OUTPUT);
+ pinMode(LIS3MDL_CS, OUTPUT);
+ pinMode(BNO086_CS, OUTPUT);
+ pinMode(CAN_CS, OUTPUT);
+ pinMode(RFM96_CS, OUTPUT);
+ digitalWrite(MS5611_CS, HIGH);
+ digitalWrite(LSM6DS3_CS, HIGH);
+ digitalWrite(KX134_CS, HIGH);
+ digitalWrite(ADXL355_CS, HIGH);
+ digitalWrite(LIS3MDL_CS, HIGH);
+ digitalWrite(BNO086_CS, HIGH);
+ digitalWrite(CAN_CS, HIGH);
+ digitalWrite(RFM96_CS, HIGH);
+
+ //configure output leds
+ gpioPinMode(LED_BLUE, OUTPUT);
+ gpioPinMode(LED_GREEN, OUTPUT);
+ gpioPinMode(LED_ORANGE, OUTPUT);
+ gpioPinMode(LED_RED, OUTPUT);
+
+ delay(200);
+
+ //init and start threads
+ begin_systems(&systems);
-void setup() {
- Serial.begin(9600);
- while (!Serial);
- hilsim_thread(nullptr);
}
void loop(){}
diff --git a/MIDAS/src/hilsim/nanopb_generator/nanopb_generator.py b/MIDAS/src/hilsim/nanopb_generator/nanopb_generator.py
deleted file mode 100644
index 84372fc6..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/nanopb_generator.py
+++ /dev/null
@@ -1,2586 +0,0 @@
-#!/usr/bin/env python3
-# kate: replace-tabs on; indent-width 4;
-
-from __future__ import unicode_literals
-
-'''Generate header file for nanopb from a ProtoBuf FileDescriptorSet.'''
-nanopb_version = "nanopb-0.4.7"
-
-import sys
-import re
-import codecs
-import copy
-import itertools
-import tempfile
-import shutil
-import shlex
-import os
-from functools import reduce
-
-# Python-protobuf breaks easily with protoc version differences if
-# using the cpp or upb implementation. Force it to use pure Python
-# implementation. Performance is not very important in the generator.
-if not os.getenv("PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"):
- os.putenv("PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION", "python")
- os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
-
-try:
- # Make sure grpc_tools gets included in binary package if it is available
- import grpc_tools.protoc
-except:
- pass
-
-try:
- import google.protobuf.text_format as text_format
- import google.protobuf.descriptor_pb2 as descriptor
- import google.protobuf.compiler.plugin_pb2 as plugin_pb2
- import google.protobuf.reflection as reflection
- import google.protobuf.descriptor
-except:
- sys.stderr.write('''
- **********************************************************************
- *** Could not import the Google protobuf Python libraries ***
- *** ***
- *** Easiest solution is often to install the dependencies via pip: ***
- *** pip install protobuf grpcio-tools ***
- **********************************************************************
- ''' + '\n')
- raise
-
-# Depending on how this script is run, we may or may not have PEP366 package name
-# available for relative imports.
-if not __package__:
- import proto
- from proto._utils import invoke_protoc
- from proto import TemporaryDirectory
-else:
- from . import proto
- from .proto._utils import invoke_protoc
- from .proto import TemporaryDirectory
-
-if getattr(sys, 'frozen', False):
- # Binary package, just import the file
- from proto import nanopb_pb2
-else:
- # Try to rebuild nanopb_pb2.py if necessary
- nanopb_pb2 = proto.load_nanopb_pb2()
-
-try:
- # Add some dummy imports to keep packaging tools happy.
- import google # bbfreeze seems to need these
- import pkg_resources # pyinstaller / protobuf 2.5 seem to need these
- from proto import nanopb_pb2 # pyinstaller seems to need this
- import pkg_resources.py2_warn
-except:
- # Don't care, we will error out later if it is actually important.
- pass
-
-# ---------------------------------------------------------------------------
-# Generation of single fields
-# ---------------------------------------------------------------------------
-
-import time
-import os.path
-
-# Values are tuple (c type, pb type, encoded size, data_size)
-FieldD = descriptor.FieldDescriptorProto
-datatypes = {
- FieldD.TYPE_BOOL: ('bool', 'BOOL', 1, 4),
- FieldD.TYPE_DOUBLE: ('double', 'DOUBLE', 8, 8),
- FieldD.TYPE_FIXED32: ('uint32_t', 'FIXED32', 4, 4),
- FieldD.TYPE_FIXED64: ('uint64_t', 'FIXED64', 8, 8),
- FieldD.TYPE_FLOAT: ('float', 'FLOAT', 4, 4),
- FieldD.TYPE_INT32: ('int32_t', 'INT32', 10, 4),
- FieldD.TYPE_INT64: ('int64_t', 'INT64', 10, 8),
- FieldD.TYPE_SFIXED32: ('int32_t', 'SFIXED32', 4, 4),
- FieldD.TYPE_SFIXED64: ('int64_t', 'SFIXED64', 8, 8),
- FieldD.TYPE_SINT32: ('int32_t', 'SINT32', 5, 4),
- FieldD.TYPE_SINT64: ('int64_t', 'SINT64', 10, 8),
- FieldD.TYPE_UINT32: ('uint32_t', 'UINT32', 5, 4),
- FieldD.TYPE_UINT64: ('uint64_t', 'UINT64', 10, 8),
-
- # Integer size override options
- (FieldD.TYPE_INT32, nanopb_pb2.IS_8): ('int8_t', 'INT32', 10, 1),
- (FieldD.TYPE_INT32, nanopb_pb2.IS_16): ('int16_t', 'INT32', 10, 2),
- (FieldD.TYPE_INT32, nanopb_pb2.IS_32): ('int32_t', 'INT32', 10, 4),
- (FieldD.TYPE_INT32, nanopb_pb2.IS_64): ('int64_t', 'INT32', 10, 8),
- (FieldD.TYPE_SINT32, nanopb_pb2.IS_8): ('int8_t', 'SINT32', 2, 1),
- (FieldD.TYPE_SINT32, nanopb_pb2.IS_16): ('int16_t', 'SINT32', 3, 2),
- (FieldD.TYPE_SINT32, nanopb_pb2.IS_32): ('int32_t', 'SINT32', 5, 4),
- (FieldD.TYPE_SINT32, nanopb_pb2.IS_64): ('int64_t', 'SINT32', 10, 8),
- (FieldD.TYPE_UINT32, nanopb_pb2.IS_8): ('uint8_t', 'UINT32', 2, 1),
- (FieldD.TYPE_UINT32, nanopb_pb2.IS_16): ('uint16_t','UINT32', 3, 2),
- (FieldD.TYPE_UINT32, nanopb_pb2.IS_32): ('uint32_t','UINT32', 5, 4),
- (FieldD.TYPE_UINT32, nanopb_pb2.IS_64): ('uint64_t','UINT32', 10, 8),
- (FieldD.TYPE_INT64, nanopb_pb2.IS_8): ('int8_t', 'INT64', 10, 1),
- (FieldD.TYPE_INT64, nanopb_pb2.IS_16): ('int16_t', 'INT64', 10, 2),
- (FieldD.TYPE_INT64, nanopb_pb2.IS_32): ('int32_t', 'INT64', 10, 4),
- (FieldD.TYPE_INT64, nanopb_pb2.IS_64): ('int64_t', 'INT64', 10, 8),
- (FieldD.TYPE_SINT64, nanopb_pb2.IS_8): ('int8_t', 'SINT64', 2, 1),
- (FieldD.TYPE_SINT64, nanopb_pb2.IS_16): ('int16_t', 'SINT64', 3, 2),
- (FieldD.TYPE_SINT64, nanopb_pb2.IS_32): ('int32_t', 'SINT64', 5, 4),
- (FieldD.TYPE_SINT64, nanopb_pb2.IS_64): ('int64_t', 'SINT64', 10, 8),
- (FieldD.TYPE_UINT64, nanopb_pb2.IS_8): ('uint8_t', 'UINT64', 2, 1),
- (FieldD.TYPE_UINT64, nanopb_pb2.IS_16): ('uint16_t','UINT64', 3, 2),
- (FieldD.TYPE_UINT64, nanopb_pb2.IS_32): ('uint32_t','UINT64', 5, 4),
- (FieldD.TYPE_UINT64, nanopb_pb2.IS_64): ('uint64_t','UINT64', 10, 8),
-}
-
-class NamingStyle:
- def enum_name(self, name):
- return "_%s" % (name)
-
- def struct_name(self, name):
- return "_%s" % (name)
-
- def type_name(self, name):
- return "%s" % (name)
-
- def define_name(self, name):
- return "%s" % (name)
-
- def var_name(self, name):
- return "%s" % (name)
-
- def enum_entry(self, name):
- return "%s" % (name)
-
- def func_name(self, name):
- return "%s" % (name)
-
- def bytes_type(self, struct_name, name):
- return "%s_%s_t" % (struct_name, name)
-
-class NamingStyleC(NamingStyle):
- def enum_name(self, name):
- return self.underscore(name)
-
- def struct_name(self, name):
- return self.underscore(name)
-
- def type_name(self, name):
- return "%s_t" % self.underscore(name)
-
- def define_name(self, name):
- return self.underscore(name).upper()
-
- def var_name(self, name):
- return self.underscore(name)
-
- def enum_entry(self, name):
- return self.underscore(name).upper()
-
- def func_name(self, name):
- return self.underscore(name)
-
- def bytes_type(self, struct_name, name):
- return "%s_%s_t" % (self.underscore(struct_name), self.underscore(name))
-
- def underscore(self, word):
- word = str(word)
- word = re.sub(r"([A-Z]+)([A-Z][a-z])", r'\1_\2', word)
- word = re.sub(r"([a-z\d])([A-Z])", r'\1_\2', word)
- word = word.replace("-", "_")
- return word.lower()
-
-class Globals:
- '''Ugly global variables, should find a good way to pass these.'''
- verbose_options = False
- separate_options = []
- matched_namemasks = set()
- protoc_insertion_points = False
- naming_style = NamingStyle()
-
-# String types and file encoding for Python2 UTF-8 support
-if sys.version_info.major == 2:
- import codecs
- open = codecs.open
- strtypes = (unicode, str)
-
- def str(x):
- try:
- return strtypes[1](x)
- except UnicodeEncodeError:
- return strtypes[0](x)
-else:
- strtypes = (str, )
-
-
-class Names:
- '''Keeps a set of nested names and formats them to C identifier.'''
- def __init__(self, parts = ()):
- if isinstance(parts, Names):
- parts = parts.parts
- elif isinstance(parts, strtypes):
- parts = (parts,)
- self.parts = tuple(parts)
-
- if self.parts == ('',):
- self.parts = ()
-
- def __str__(self):
- return '_'.join(self.parts)
-
- def __repr__(self):
- return 'Names(%s)' % ','.join("'%s'" % x for x in self.parts)
-
- def __add__(self, other):
- if isinstance(other, strtypes):
- return Names(self.parts + (other,))
- elif isinstance(other, Names):
- return Names(self.parts + other.parts)
- elif isinstance(other, tuple):
- return Names(self.parts + other)
- else:
- raise ValueError("Name parts should be of type str")
-
- def __eq__(self, other):
- return isinstance(other, Names) and self.parts == other.parts
-
- def __lt__(self, other):
- if not isinstance(other, Names):
- return NotImplemented
- return str(self) < str(other)
-
-def names_from_type_name(type_name):
- '''Parse Names() from FieldDescriptorProto type_name'''
- if type_name[0] != '.':
- raise NotImplementedError("Lookup of non-absolute type names is not supported")
- return Names(type_name[1:].split('.'))
-
-def varint_max_size(max_value):
- '''Returns the maximum number of bytes a varint can take when encoded.'''
- if max_value < 0:
- max_value = 2**64 - max_value
- for i in range(1, 11):
- if (max_value >> (i * 7)) == 0:
- return i
- raise ValueError("Value too large for varint: " + str(max_value))
-
-assert varint_max_size(-1) == 10
-assert varint_max_size(0) == 1
-assert varint_max_size(127) == 1
-assert varint_max_size(128) == 2
-
-class EncodedSize:
- '''Class used to represent the encoded size of a field or a message.
- Consists of a combination of symbolic sizes and integer sizes.'''
- def __init__(self, value = 0, symbols = [], declarations = [], required_defines = []):
- if isinstance(value, EncodedSize):
- self.value = value.value
- self.symbols = value.symbols
- self.declarations = value.declarations
- self.required_defines = value.required_defines
- elif isinstance(value, strtypes + (Names,)):
- self.symbols = [str(value)]
- self.value = 0
- self.declarations = []
- self.required_defines = [str(value)]
- else:
- self.value = value
- self.symbols = symbols
- self.declarations = declarations
- self.required_defines = required_defines
-
- def __add__(self, other):
- if isinstance(other, int):
- return EncodedSize(self.value + other, self.symbols, self.declarations, self.required_defines)
- elif isinstance(other, strtypes + (Names,)):
- return EncodedSize(self.value, self.symbols + [str(other)], self.declarations, self.required_defines + [str(other)])
- elif isinstance(other, EncodedSize):
- return EncodedSize(self.value + other.value, self.symbols + other.symbols,
- self.declarations + other.declarations, self.required_defines + other.required_defines)
- else:
- raise ValueError("Cannot add size: " + repr(other))
-
- def __mul__(self, other):
- if isinstance(other, int):
- return EncodedSize(self.value * other, [str(other) + '*' + s for s in self.symbols],
- self.declarations, self.required_defines)
- else:
- raise ValueError("Cannot multiply size: " + repr(other))
-
- def __str__(self):
- if not self.symbols:
- return str(self.value)
- else:
- return '(' + str(self.value) + ' + ' + ' + '.join(self.symbols) + ')'
-
- def __repr__(self):
- return 'EncodedSize(%s, %s, %s, %s)' % (self.value, self.symbols, self.declarations, self.required_defines)
-
- def get_declarations(self):
- '''Get any declarations that must appear alongside this encoded size definition,
- such as helper union {} types.'''
- return '\n'.join(self.declarations)
-
- def get_cpp_guard(self, local_defines):
- '''Get an #if preprocessor statement listing all defines that are required for this definition.'''
- needed = [x for x in self.required_defines if x not in local_defines]
- if needed:
- return '#if ' + ' && '.join(['defined(%s)' % x for x in needed]) + "\n"
- else:
- return ''
-
- def upperlimit(self):
- if not self.symbols:
- return self.value
- else:
- return 2**32 - 1
-
-class ProtoElement(object):
- # Constants regarding path of proto elements in file descriptor.
- # They are used to connect proto elements with source code information (comments)
- # These values come from:
- # https://github.com/google/protobuf/blob/master/src/google/protobuf/descriptor.proto
- FIELD = 2
- MESSAGE = 4
- ENUM = 5
- NESTED_TYPE = 3
- NESTED_ENUM = 4
-
- def __init__(self, path, comments = None):
- '''
- path is a tuple containing integers (type, index, ...)
- comments is a dictionary mapping between element path & SourceCodeInfo.Location
- (contains information about source comments).
- '''
- assert(isinstance(path, tuple))
- self.element_path = path
- self.comments = comments or {}
-
- def get_member_comments(self, index):
- '''Get comments for a member of enum or message.'''
- return self.get_comments((ProtoElement.FIELD, index), leading_indent = True)
-
- def format_comment(self, comment):
- '''Put comment inside /* */ and sanitize comment contents'''
- comment = comment.strip()
- comment = comment.replace('/*', '/ *')
- comment = comment.replace('*/', '* /')
- return "/* %s */" % comment
-
- def get_comments(self, member_path = (), leading_indent = False):
- '''Get leading & trailing comments for a protobuf element.
-
- member_path is the proto path of an element or member (ex. [5 0] or [4 1 2 0])
- leading_indent is a flag that indicates if leading comments should be indented
- '''
-
- # Obtain SourceCodeInfo.Location object containing comment
- # information (based on the member path)
- path = self.element_path + member_path
- comment = self.comments.get(path)
-
- leading_comment = ""
- trailing_comment = ""
-
- if not comment:
- return leading_comment, trailing_comment
-
- if comment.leading_comments:
- leading_comment = " " if leading_indent else ""
- leading_comment += self.format_comment(comment.leading_comments)
-
- if comment.trailing_comments:
- trailing_comment = self.format_comment(comment.trailing_comments)
-
- return leading_comment, trailing_comment
-
-
-class Enum(ProtoElement):
- def __init__(self, names, desc, enum_options, element_path, comments):
- '''
- desc is EnumDescriptorProto
- index is the index of this enum element inside the file
- comments is a dictionary mapping between element path & SourceCodeInfo.Location
- (contains information about source comments)
- '''
- super(Enum, self).__init__(element_path, comments)
-
- self.options = enum_options
- self.names = names
-
- # by definition, `names` include this enum's name
- base_name = Names(names.parts[:-1])
-
- if enum_options.long_names:
- self.values = [(names + x.name, x.number) for x in desc.value]
- else:
- self.values = [(base_name + x.name, x.number) for x in desc.value]
-
- self.value_longnames = [self.names + x.name for x in desc.value]
- self.packed = enum_options.packed_enum
-
- def has_negative(self):
- for n, v in self.values:
- if v < 0:
- return True
- return False
-
- def encoded_size(self):
- return max([varint_max_size(v) for n,v in self.values])
-
- def __repr__(self):
- return 'Enum(%s)' % self.names
-
- def __str__(self):
- leading_comment, trailing_comment = self.get_comments()
-
- result = ''
- if leading_comment:
- result = '%s\n' % leading_comment
-
- result += 'typedef enum %s {' % Globals.naming_style.enum_name(self.names)
- if trailing_comment:
- result += " " + trailing_comment
-
- result += "\n"
-
- enum_length = len(self.values)
- enum_values = []
- for index, (name, value) in enumerate(self.values):
- leading_comment, trailing_comment = self.get_member_comments(index)
-
- if leading_comment:
- enum_values.append(leading_comment)
-
- comma = ","
- if index == enum_length - 1:
- # last enum member should not end with a comma
- comma = ""
-
- enum_value = " %s = %d%s" % (Globals.naming_style.enum_entry(name), value, comma)
- if trailing_comment:
- enum_value += " " + trailing_comment
-
- enum_values.append(enum_value)
-
- result += '\n'.join(enum_values)
- result += '\n}'
-
- if self.packed:
- result += ' pb_packed'
-
- result += ' %s;' % Globals.naming_style.type_name(self.names)
- return result
-
- def auxiliary_defines(self):
- # sort the enum by value
- sorted_values = sorted(self.values, key = lambda x: (x[1], x[0]))
- result = '#define %s %s\n' % (
- Globals.naming_style.define_name('_%s_MIN' % self.names),
- Globals.naming_style.enum_entry(sorted_values[0][0]))
- result += '#define %s %s\n' % (
- Globals.naming_style.define_name('_%s_MAX' % self.names),
- Globals.naming_style.enum_entry(sorted_values[-1][0]))
- result += '#define %s ((%s)(%s+1))\n' % (
- Globals.naming_style.define_name('_%s_ARRAYSIZE' % self.names),
- Globals.naming_style.type_name(self.names),
- Globals.naming_style.enum_entry(sorted_values[-1][0]))
-
- if not self.options.long_names:
- # Define the long names always so that enum value references
- # from other files work properly.
- for i, x in enumerate(self.values):
- result += '#define %s %s\n' % (self.value_longnames[i], x[0])
-
- if self.options.enum_to_string:
- result += 'const char *%s(%s v);\n' % (
- Globals.naming_style.func_name('%s_name' % self.names),
- Globals.naming_style.type_name(self.names))
-
- return result
-
- def enum_to_string_definition(self):
- if not self.options.enum_to_string:
- return ""
-
- result = 'const char *%s(%s v) {\n' % (
- Globals.naming_style.func_name('%s_name' % self.names),
- Globals.naming_style.type_name(self.names))
-
- result += ' switch (v) {\n'
-
- for ((enumname, _), strname) in zip(self.values, self.value_longnames):
- # Strip off the leading type name from the string value.
- strval = str(strname)[len(str(self.names)) + 1:]
- result += ' case %s: return "%s";\n' % (
- Globals.naming_style.enum_entry(enumname),
- Globals.naming_style.enum_entry(strval))
-
- result += ' }\n'
- result += ' return "unknown";\n'
- result += '}\n'
-
- return result
-
-class FieldMaxSize:
- def __init__(self, worst = 0, checks = [], field_name = 'undefined'):
- if isinstance(worst, list):
- self.worst = max(i for i in worst if i is not None)
- else:
- self.worst = worst
-
- self.worst_field = field_name
- self.checks = list(checks)
-
- def extend(self, extend, field_name = None):
- self.worst = max(self.worst, extend.worst)
-
- if self.worst == extend.worst:
- self.worst_field = extend.worst_field
-
- self.checks.extend(extend.checks)
-
-class Field(ProtoElement):
- macro_x_param = 'X'
- macro_a_param = 'a'
-
- def __init__(self, struct_name, desc, field_options, element_path = (), comments = None):
- '''desc is FieldDescriptorProto'''
- ProtoElement.__init__(self, element_path, comments)
- self.tag = desc.number
- self.struct_name = struct_name
- self.union_name = None
- self.name = desc.name
- self.default = None
- self.max_size = None
- self.max_count = None
- self.array_decl = ""
- self.enc_size = None
- self.data_item_size = None
- self.ctype = None
- self.fixed_count = False
- self.callback_datatype = field_options.callback_datatype
- self.math_include_required = False
- self.sort_by_tag = field_options.sort_by_tag
-
- if field_options.type == nanopb_pb2.FT_INLINE:
- # Before nanopb-0.3.8, fixed length bytes arrays were specified
- # by setting type to FT_INLINE. But to handle pointer typed fields,
- # it makes sense to have it as a separate option.
- field_options.type = nanopb_pb2.FT_STATIC
- field_options.fixed_length = True
-
- # Parse field options
- if field_options.HasField("max_size"):
- self.max_size = field_options.max_size
-
- self.default_has = field_options.default_has
-
- if desc.type == FieldD.TYPE_STRING and field_options.HasField("max_length"):
- # max_length overrides max_size for strings
- self.max_size = field_options.max_length + 1
-
- if field_options.HasField("max_count"):
- self.max_count = field_options.max_count
-
- if desc.HasField('default_value'):
- self.default = desc.default_value
-
- # Check field rules, i.e. required/optional/repeated.
- can_be_static = True
- if desc.label == FieldD.LABEL_REPEATED:
- self.rules = 'REPEATED'
- if self.max_count is None:
- can_be_static = False
- else:
- self.array_decl = '[%d]' % self.max_count
- if field_options.fixed_count:
- self.rules = 'FIXARRAY'
-
- elif field_options.proto3:
- if desc.type == FieldD.TYPE_MESSAGE and not field_options.proto3_singular_msgs:
- # In most other protobuf libraries proto3 submessages have
- # "null" status. For nanopb, that is implemented as has_ field.
- self.rules = 'OPTIONAL'
- elif hasattr(desc, "proto3_optional") and desc.proto3_optional:
- # Protobuf 3.12 introduced optional fields for proto3 syntax
- self.rules = 'OPTIONAL'
- else:
- # Proto3 singular fields (without has_field)
- self.rules = 'SINGULAR'
- elif desc.label == FieldD.LABEL_REQUIRED:
- self.rules = 'REQUIRED'
- elif desc.label == FieldD.LABEL_OPTIONAL:
- self.rules = 'OPTIONAL'
- else:
- raise NotImplementedError(desc.label)
-
- # Check if the field can be implemented with static allocation
- # i.e. whether the data size is known.
- if desc.type == FieldD.TYPE_STRING and self.max_size is None:
- can_be_static = False
-
- if desc.type == FieldD.TYPE_BYTES and self.max_size is None:
- can_be_static = False
-
- # Decide how the field data will be allocated
- if field_options.type == nanopb_pb2.FT_DEFAULT:
- if can_be_static:
- field_options.type = nanopb_pb2.FT_STATIC
- else:
- field_options.type = field_options.fallback_type
-
- if field_options.type == nanopb_pb2.FT_STATIC and not can_be_static:
- raise Exception("Field '%s' is defined as static, but max_size or "
- "max_count is not given." % self.name)
-
- if field_options.fixed_count and self.max_count is None:
- raise Exception("Field '%s' is defined as fixed count, "
- "but max_count is not given." % self.name)
-
- if field_options.type == nanopb_pb2.FT_STATIC:
- self.allocation = 'STATIC'
- elif field_options.type == nanopb_pb2.FT_POINTER:
- self.allocation = 'POINTER'
- elif field_options.type == nanopb_pb2.FT_CALLBACK:
- self.allocation = 'CALLBACK'
- else:
- raise NotImplementedError(field_options.type)
-
- if field_options.HasField("type_override"):
- desc.type = field_options.type_override
-
- # Decide the C data type to use in the struct.
- if desc.type in datatypes:
- self.ctype, self.pbtype, self.enc_size, self.data_item_size = datatypes[desc.type]
-
- # Override the field size if user wants to use smaller integers
- if (desc.type, field_options.int_size) in datatypes:
- self.ctype, self.pbtype, self.enc_size, self.data_item_size = datatypes[(desc.type, field_options.int_size)]
- elif desc.type == FieldD.TYPE_ENUM:
- self.pbtype = 'ENUM'
- self.data_item_size = 4
- self.ctype = names_from_type_name(desc.type_name)
- if self.default is not None:
- self.default = self.ctype + self.default
- self.enc_size = None # Needs to be filled in when enum values are known
- elif desc.type == FieldD.TYPE_STRING:
- self.pbtype = 'STRING'
- self.ctype = 'char'
- if self.allocation == 'STATIC':
- self.ctype = 'char'
- self.array_decl += '[%d]' % self.max_size
- # -1 because of null terminator. Both pb_encode and pb_decode
- # check the presence of it.
- self.enc_size = varint_max_size(self.max_size) + self.max_size - 1
- elif desc.type == FieldD.TYPE_BYTES:
- if field_options.fixed_length:
- self.pbtype = 'FIXED_LENGTH_BYTES'
-
- if self.max_size is None:
- raise Exception("Field '%s' is defined as fixed length, "
- "but max_size is not given." % self.name)
-
- self.enc_size = varint_max_size(self.max_size) + self.max_size
- self.ctype = 'pb_byte_t'
- self.array_decl += '[%d]' % self.max_size
- else:
- self.pbtype = 'BYTES'
- self.ctype = 'pb_bytes_array_t'
- if self.allocation == 'STATIC':
- self.ctype = Globals.naming_style.bytes_type(self.struct_name, self.name)
- self.enc_size = varint_max_size(self.max_size) + self.max_size
- elif desc.type == FieldD.TYPE_MESSAGE:
- self.pbtype = 'MESSAGE'
- self.ctype = self.submsgname = names_from_type_name(desc.type_name)
- self.enc_size = None # Needs to be filled in after the message type is available
- if field_options.submsg_callback and self.allocation == 'STATIC':
- self.pbtype = 'MSG_W_CB'
- else:
- raise NotImplementedError(desc.type)
-
- if self.default and self.pbtype in ['FLOAT', 'DOUBLE']:
- if 'inf' in self.default or 'nan' in self.default:
- self.math_include_required = True
-
- def __lt__(self, other):
- return self.tag < other.tag
-
- def __repr__(self):
- return 'Field(%s)' % self.name
-
- def __str__(self):
- result = ''
-
- var_name = Globals.naming_style.var_name(self.name)
- type_name = Globals.naming_style.type_name(self.ctype) if isinstance(self.ctype, Names) else self.ctype
-
- if self.allocation == 'POINTER':
- if self.rules == 'REPEATED':
- if self.pbtype == 'MSG_W_CB':
- result += ' pb_callback_t cb_' + var_name + ';\n'
- result += ' pb_size_t ' + var_name + '_count;\n'
-
- if self.rules == 'FIXARRAY' and self.pbtype in ['STRING', 'BYTES']:
- # Pointer to fixed size array of pointers
- result += ' %s* (*%s)%s;' % (type_name, var_name, self.array_decl)
- elif self.pbtype == 'FIXED_LENGTH_BYTES' or self.rules == 'FIXARRAY':
- # Pointer to fixed size array of items
- result += ' %s (*%s)%s;' % (type_name, var_name, self.array_decl)
- elif self.rules == 'REPEATED' and self.pbtype in ['STRING', 'BYTES']:
- # String/bytes arrays need to be defined as pointers to pointers
- result += ' %s **%s;' % (type_name, var_name)
- elif self.pbtype in ['MESSAGE', 'MSG_W_CB']:
- # Use struct definition, so recursive submessages are possible
- result += ' struct %s *%s;' % (Globals.naming_style.struct_name(self.ctype), var_name)
- else:
- # Normal case, just a pointer to single item
- result += ' %s *%s;' % (type_name, var_name)
- elif self.allocation == 'CALLBACK':
- result += ' %s %s;' % (self.callback_datatype, var_name)
- else:
- if self.pbtype == 'MSG_W_CB' and self.rules in ['OPTIONAL', 'REPEATED']:
- result += ' pb_callback_t cb_' + var_name + ';\n'
-
- if self.rules == 'OPTIONAL':
- result += ' bool has_' + var_name + ';\n'
- elif self.rules == 'REPEATED':
- result += ' pb_size_t ' + var_name + '_count;\n'
-
- result += ' %s %s%s;' % (type_name, var_name, self.array_decl)
-
- leading_comment, trailing_comment = self.get_comments(leading_indent = True)
- if leading_comment: result = leading_comment + "\n" + result
- if trailing_comment: result = result + " " + trailing_comment
-
- return result
-
- def types(self):
- '''Return definitions for any special types this field might need.'''
- if self.pbtype == 'BYTES' and self.allocation == 'STATIC':
- result = 'typedef PB_BYTES_ARRAY_T(%d) %s;\n' % (self.max_size, Globals.naming_style.var_name(self.ctype))
- else:
- result = ''
- return result
-
- def get_dependencies(self):
- '''Get list of type names used by this field.'''
- if self.allocation == 'STATIC':
- return [str(self.ctype)]
- elif self.allocation == 'POINTER' and self.rules == 'FIXARRAY':
- return [str(self.ctype)]
- else:
- return []
-
- def get_initializer(self, null_init, inner_init_only = False):
- '''Return literal expression for this field's default value.
- null_init: If True, initialize to a 0 value instead of default from .proto
- inner_init_only: If True, exclude initialization for any count/has fields
- '''
-
- inner_init = None
- if self.pbtype in ['MESSAGE', 'MSG_W_CB']:
- if null_init:
- inner_init = Globals.naming_style.define_name('%s_init_zero' % self.ctype)
- else:
- inner_init = Globals.naming_style.define_name('%s_init_default' % self.ctype)
- elif self.default is None or null_init:
- if self.pbtype == 'STRING':
- inner_init = '""'
- elif self.pbtype == 'BYTES':
- inner_init = '{0, {0}}'
- elif self.pbtype == 'FIXED_LENGTH_BYTES':
- inner_init = '{0}'
- elif self.pbtype in ('ENUM', 'UENUM'):
- inner_init = '_%s_MIN' % Globals.naming_style.define_name(self.ctype)
- else:
- inner_init = '0'
- else:
- if self.pbtype == 'STRING':
- data = codecs.escape_encode(self.default.encode('utf-8'))[0]
- inner_init = '"' + data.decode('ascii') + '"'
- elif self.pbtype == 'BYTES':
- data = codecs.escape_decode(self.default)[0]
- data = ["0x%02x" % c for c in bytearray(data)]
- if len(data) == 0:
- inner_init = '{0, {0}}'
- else:
- inner_init = '{%d, {%s}}' % (len(data), ','.join(data))
- elif self.pbtype == 'FIXED_LENGTH_BYTES':
- data = codecs.escape_decode(self.default)[0]
- data = ["0x%02x" % c for c in bytearray(data)]
- if len(data) == 0:
- inner_init = '{0}'
- else:
- inner_init = '{%s}' % ','.join(data)
- elif self.pbtype in ['FIXED32', 'UINT32']:
- inner_init = str(self.default) + 'u'
- elif self.pbtype in ['FIXED64', 'UINT64']:
- inner_init = str(self.default) + 'ull'
- elif self.pbtype in ['SFIXED64', 'INT64']:
- inner_init = str(self.default) + 'll'
- elif self.pbtype in ['FLOAT', 'DOUBLE']:
- inner_init = str(self.default)
- if 'inf' in inner_init:
- inner_init = inner_init.replace('inf', 'INFINITY')
- elif 'nan' in inner_init:
- inner_init = inner_init.replace('nan', 'NAN')
- elif (not '.' in inner_init) and self.pbtype == 'FLOAT':
- inner_init += '.0f'
- elif self.pbtype == 'FLOAT':
- inner_init += 'f'
- else:
- inner_init = str(self.default)
-
- if inner_init_only:
- return inner_init
-
- outer_init = None
- if self.allocation == 'STATIC':
- if self.rules == 'REPEATED':
- outer_init = '0, {' + ', '.join([inner_init] * self.max_count) + '}'
- elif self.rules == 'FIXARRAY':
- outer_init = '{' + ', '.join([inner_init] * self.max_count) + '}'
- elif self.rules == 'OPTIONAL':
- if null_init or not self.default_has:
- outer_init = 'false, ' + inner_init
- else:
- outer_init = 'true, ' + inner_init
- else:
- outer_init = inner_init
- elif self.allocation == 'POINTER':
- if self.rules == 'REPEATED':
- outer_init = '0, NULL'
- else:
- outer_init = 'NULL'
- elif self.allocation == 'CALLBACK':
- if self.pbtype == 'EXTENSION':
- outer_init = 'NULL'
- else:
- outer_init = '{{NULL}, NULL}'
-
- if self.pbtype == 'MSG_W_CB' and self.rules in ['REPEATED', 'OPTIONAL']:
- outer_init = '{{NULL}, NULL}, ' + outer_init
-
- return outer_init
-
- def tags(self):
- '''Return the #define for the tag number of this field.'''
- identifier = Globals.naming_style.define_name('%s_%s_tag' % (self.struct_name, self.name))
- return '#define %-40s %d\n' % (identifier, self.tag)
-
- def fieldlist(self):
- '''Return the FIELDLIST macro entry for this field.
- Format is: X(a, ATYPE, HTYPE, LTYPE, field_name, tag)
- '''
- name = Globals.naming_style.var_name(self.name)
-
- if self.rules == "ONEOF":
- # For oneofs, make a tuple of the union name, union member name,
- # and the name inside the parent struct.
- if not self.anonymous:
- name = '(%s,%s,%s)' % (
- Globals.naming_style.var_name(self.union_name),
- Globals.naming_style.var_name(self.name),
- Globals.naming_style.var_name(self.union_name) + '.' +
- Globals.naming_style.var_name(self.name))
- else:
- name = '(%s,%s,%s)' % (
- Globals.naming_style.var_name(self.union_name),
- Globals.naming_style.var_name(self.name),
- Globals.naming_style.var_name(self.name))
-
- return '%s(%s, %-9s %-9s %-9s %-16s %3d)' % (self.macro_x_param,
- self.macro_a_param,
- self.allocation + ',',
- self.rules + ',',
- self.pbtype + ',',
- name + ',',
- self.tag)
-
- def data_size(self, dependencies):
- '''Return estimated size of this field in the C struct.
- This is used to try to automatically pick right descriptor size.
- If the estimate is wrong, it will result in compile time error and
- user having to specify descriptor_width option.
- '''
- if self.allocation == 'POINTER' or self.pbtype == 'EXTENSION':
- size = 8
- alignment = 8
- elif self.allocation == 'CALLBACK':
- size = 16
- alignment = 8
- elif self.pbtype in ['MESSAGE', 'MSG_W_CB']:
- alignment = 8
- if str(self.submsgname) in dependencies:
- other_dependencies = dict(x for x in dependencies.items() if x[0] != str(self.struct_name))
- size = dependencies[str(self.submsgname)].data_size(other_dependencies)
- else:
- size = 256 # Message is in other file, this is reasonable guess for most cases
- sys.stderr.write('Could not determine size for submessage %s, using default %d\n' % (self.submsgname, size))
-
- if self.pbtype == 'MSG_W_CB':
- size += 16
- elif self.pbtype in ['STRING', 'FIXED_LENGTH_BYTES']:
- size = self.max_size
- alignment = 4
- elif self.pbtype == 'BYTES':
- size = self.max_size + 4
- alignment = 4
- elif self.data_item_size is not None:
- size = self.data_item_size
- alignment = 4
- if self.data_item_size >= 8:
- alignment = 8
- else:
- raise Exception("Unhandled field type: %s" % self.pbtype)
-
- if self.rules in ['REPEATED', 'FIXARRAY'] and self.allocation == 'STATIC':
- size *= self.max_count
-
- if self.rules not in ('REQUIRED', 'SINGULAR'):
- size += 4
-
- if size % alignment != 0:
- # Estimate how much alignment requirements will increase the size.
- size += alignment - (size % alignment)
-
- return size
-
- def encoded_size(self, dependencies):
- '''Return the maximum size that this field can take when encoded,
- including the field tag. If the size cannot be determined, returns
- None.'''
-
- if self.allocation != 'STATIC':
- return None
-
- if self.pbtype in ['MESSAGE', 'MSG_W_CB']:
- encsize = None
- if str(self.submsgname) in dependencies:
- submsg = dependencies[str(self.submsgname)]
- other_dependencies = dict(x for x in dependencies.items() if x[0] != str(self.struct_name))
- encsize = submsg.encoded_size(other_dependencies)
-
- my_msg = dependencies.get(str(self.struct_name))
- external = (not my_msg or submsg.protofile != my_msg.protofile)
-
- if encsize and encsize.symbols and external:
- # Couldn't fully resolve the size of a dependency from
- # another file. Instead of including the symbols directly,
- # just use the #define SubMessage_size from the header.
- encsize = None
-
- if encsize is not None:
- # Include submessage length prefix
- encsize += varint_max_size(encsize.upperlimit())
- elif not external:
- # The dependency is from the same file and size cannot be
- # determined for it, thus we know it will not be possible
- # in runtime either.
- return None
-
- if encsize is None:
- # Submessage or its size cannot be found.
- # This can occur if submessage is defined in different
- # file, and it or its .options could not be found.
- # Instead of direct numeric value, reference the size that
- # has been #defined in the other file.
- encsize = EncodedSize(self.submsgname + 'size')
-
- # We will have to make a conservative assumption on the length
- # prefix size, though.
- encsize += 5
-
- elif self.pbtype in ['ENUM', 'UENUM']:
- if str(self.ctype) in dependencies:
- enumtype = dependencies[str(self.ctype)]
- encsize = enumtype.encoded_size()
- else:
- # Conservative assumption
- encsize = 10
-
- elif self.enc_size is None:
- raise RuntimeError("Could not determine encoded size for %s.%s"
- % (self.struct_name, self.name))
- else:
- encsize = EncodedSize(self.enc_size)
-
- encsize += varint_max_size(self.tag << 3) # Tag + wire type
-
- if self.rules in ['REPEATED', 'FIXARRAY']:
- # Decoders must be always able to handle unpacked arrays.
- # Therefore we have to reserve space for it, even though
- # we emit packed arrays ourselves. For length of 1, packed
- # arrays are larger however so we need to add allowance
- # for the length byte.
- encsize *= self.max_count
-
- if self.max_count == 1:
- encsize += 1
-
- return encsize
-
- def has_callbacks(self):
- return self.allocation == 'CALLBACK'
-
- def requires_custom_field_callback(self):
- return self.allocation == 'CALLBACK' and self.callback_datatype != 'pb_callback_t'
-
-class ExtensionRange(Field):
- def __init__(self, struct_name, range_start, field_options):
- '''Implements a special pb_extension_t* field in an extensible message
- structure. The range_start signifies the index at which the extensions
- start. Not necessarily all tags above this are extensions, it is merely
- a speed optimization.
- '''
- self.tag = range_start
- self.struct_name = struct_name
- self.name = 'extensions'
- self.pbtype = 'EXTENSION'
- self.rules = 'OPTIONAL'
- self.allocation = 'CALLBACK'
- self.ctype = 'pb_extension_t'
- self.array_decl = ''
- self.default = None
- self.max_size = 0
- self.max_count = 0
- self.data_item_size = 0
- self.fixed_count = False
- self.callback_datatype = 'pb_extension_t*'
-
- def requires_custom_field_callback(self):
- return False
-
- def __str__(self):
- return ' pb_extension_t *extensions;'
-
- def types(self):
- return ''
-
- def tags(self):
- return ''
-
- def encoded_size(self, dependencies):
- # We exclude extensions from the count, because they cannot be known
- # until runtime. Other option would be to return None here, but this
- # way the value remains useful if extensions are not used.
- return EncodedSize(0)
-
-class ExtensionField(Field):
- def __init__(self, fullname, desc, field_options):
- self.fullname = fullname
- self.extendee_name = names_from_type_name(desc.extendee)
- Field.__init__(self, self.fullname + "extmsg", desc, field_options)
-
- if self.rules != 'OPTIONAL':
- self.skip = True
- else:
- self.skip = False
- self.rules = 'REQUIRED' # We don't really want the has_field for extensions
- # currently no support for comments for extension fields => provide (), {}
- self.msg = Message(self.fullname + "extmsg", None, field_options, (), {})
- self.msg.fields.append(self)
-
- def tags(self):
- '''Return the #define for the tag number of this field.'''
- identifier = Globals.naming_style.define_name('%s_tag' % (self.fullname))
- return '#define %-40s %d\n' % (identifier, self.tag)
-
- def extension_decl(self):
- '''Declaration of the extension type in the .pb.h file'''
- if self.skip:
- msg = '/* Extension field %s was skipped because only "optional"\n' % self.fullname
- msg +=' type of extension fields is currently supported. */\n'
- return msg
-
- return ('extern const pb_extension_type_t %s; /* field type: %s */\n' %
- (Globals.naming_style.var_name(self.fullname), str(self).strip()))
-
- def extension_def(self, dependencies):
- '''Definition of the extension type in the .pb.c file'''
-
- if self.skip:
- return ''
-
- result = "/* Definition for extension field %s */\n" % self.fullname
- result += str(self.msg)
- result += self.msg.fields_declaration(dependencies)
- result += 'pb_byte_t %s_default[] = {0x00};\n' % self.msg.name
- result += self.msg.fields_definition(dependencies)
- result += 'const pb_extension_type_t %s = {\n' % Globals.naming_style.var_name(self.fullname)
- result += ' NULL,\n'
- result += ' NULL,\n'
- result += ' &%s_msg\n' % Globals.naming_style.type_name(self.msg.name)
- result += '};\n'
- return result
-
-
-# ---------------------------------------------------------------------------
-# Generation of oneofs (unions)
-# ---------------------------------------------------------------------------
-
-class OneOf(Field):
- def __init__(self, struct_name, oneof_desc, oneof_options):
- self.struct_name = struct_name
- self.name = oneof_desc.name
- self.ctype = 'union'
- self.pbtype = 'oneof'
- self.fields = []
- self.allocation = 'ONEOF'
- self.default = None
- self.rules = 'ONEOF'
- self.anonymous = oneof_options.anonymous_oneof
- self.sort_by_tag = oneof_options.sort_by_tag
- self.has_msg_cb = False
-
- def add_field(self, field):
- field.union_name = self.name
- field.rules = 'ONEOF'
- field.anonymous = self.anonymous
- self.fields.append(field)
-
- if self.sort_by_tag:
- self.fields.sort()
-
- if field.pbtype == 'MSG_W_CB':
- self.has_msg_cb = True
-
- # Sort by the lowest tag number inside union
- self.tag = min([f.tag for f in self.fields])
-
- def __str__(self):
- result = ''
- if self.fields:
- if self.has_msg_cb:
- result += ' pb_callback_t cb_' + Globals.naming_style.var_name(self.name) + ';\n'
-
- result += ' pb_size_t which_' + Globals.naming_style.var_name(self.name) + ";\n"
- result += ' union {\n'
- for f in self.fields:
- result += ' ' + str(f).replace('\n', '\n ') + '\n'
- if self.anonymous:
- result += ' };'
- else:
- result += ' } ' + Globals.naming_style.var_name(self.name) + ';'
- return result
-
- def types(self):
- return ''.join([f.types() for f in self.fields])
-
- def get_dependencies(self):
- deps = []
- for f in self.fields:
- deps += f.get_dependencies()
- return deps
-
- def get_initializer(self, null_init):
- if self.has_msg_cb:
- return '{{NULL}, NULL}, 0, {' + self.fields[0].get_initializer(null_init) + '}'
- else:
- return '0, {' + self.fields[0].get_initializer(null_init) + '}'
-
- def tags(self):
- return ''.join([f.tags() for f in self.fields])
-
- def data_size(self, dependencies):
- return max(f.data_size(dependencies) for f in self.fields)
-
- def encoded_size(self, dependencies):
- '''Returns the size of the largest oneof field.'''
- largest = 0
- dynamic_sizes = {}
- for f in self.fields:
- size = EncodedSize(f.encoded_size(dependencies))
- if size is None or size.value is None:
- return None
- elif size.symbols:
- dynamic_sizes[f.tag] = size
- elif size.value > largest:
- largest = size.value
-
- if not dynamic_sizes:
- # Simple case, all sizes were known at generator time
- return EncodedSize(largest)
-
- if largest > 0:
- # Some sizes were known, some were not
- dynamic_sizes[0] = EncodedSize(largest)
-
- # Couldn't find size for submessage at generation time,
- # have to rely on macro resolution at compile time.
- if len(dynamic_sizes) == 1:
- # Only one symbol was needed
- return list(dynamic_sizes.values())[0]
- else:
- # Use sizeof(union{}) construct to find the maximum size of
- # submessages.
- union_name = "%s_%s_size_union" % (self.struct_name, self.name)
- union_def = 'union %s {%s};\n' % (union_name, ' '.join('char f%d[%s];' % (k, s) for k,s in dynamic_sizes.items()))
- required_defs = list(itertools.chain.from_iterable(s.required_defines for k,s in dynamic_sizes.items()))
- return EncodedSize(0, ['sizeof(union %s)' % union_name], [union_def], required_defs)
-
- def has_callbacks(self):
- return bool([f for f in self.fields if f.has_callbacks()])
-
- def requires_custom_field_callback(self):
- return bool([f for f in self.fields if f.requires_custom_field_callback()])
-
-# ---------------------------------------------------------------------------
-# Generation of messages (structures)
-# ---------------------------------------------------------------------------
-
-
-class Message(ProtoElement):
- def __init__(self, names, desc, message_options, element_path, comments):
- super(Message, self).__init__(element_path, comments)
- self.name = names
- self.fields = []
- self.oneofs = {}
- self.desc = desc
- self.math_include_required = False
- self.packed = message_options.packed_struct
- self.descriptorsize = message_options.descriptorsize
-
- if message_options.msgid:
- self.msgid = message_options.msgid
-
- if desc is not None:
- self.load_fields(desc, message_options)
-
- self.callback_function = message_options.callback_function
- if not message_options.HasField('callback_function'):
- # Automatically assign a per-message callback if any field has
- # a special callback_datatype.
- for field in self.fields:
- if field.requires_custom_field_callback():
- self.callback_function = "%s_callback" % self.name
- break
-
- def load_fields(self, desc, message_options):
- '''Load field list from DescriptorProto'''
-
- no_unions = []
-
- if hasattr(desc, 'oneof_decl'):
- for i, f in enumerate(desc.oneof_decl):
- oneof_options = get_nanopb_suboptions(desc, message_options, self.name + f.name)
- if oneof_options.no_unions:
- no_unions.append(i) # No union, but add fields normally
- elif oneof_options.type == nanopb_pb2.FT_IGNORE:
- pass # No union and skip fields also
- else:
- oneof = OneOf(self.name, f, oneof_options)
- self.oneofs[i] = oneof
- else:
- sys.stderr.write('Note: This Python protobuf library has no OneOf support\n')
-
- for index, f in enumerate(desc.field):
- field_options = get_nanopb_suboptions(f, message_options, self.name + f.name)
- if field_options.type == nanopb_pb2.FT_IGNORE:
- continue
-
- if field_options.descriptorsize > self.descriptorsize:
- self.descriptorsize = field_options.descriptorsize
-
- field = Field(self.name, f, field_options, self.element_path + (ProtoElement.FIELD, index), self.comments)
- if hasattr(f, 'oneof_index') and f.HasField('oneof_index'):
- if hasattr(f, 'proto3_optional') and f.proto3_optional:
- no_unions.append(f.oneof_index)
-
- if f.oneof_index in no_unions:
- self.fields.append(field)
- elif f.oneof_index in self.oneofs:
- self.oneofs[f.oneof_index].add_field(field)
-
- if self.oneofs[f.oneof_index] not in self.fields:
- self.fields.append(self.oneofs[f.oneof_index])
- else:
- self.fields.append(field)
-
- if field.math_include_required:
- self.math_include_required = True
-
- if len(desc.extension_range) > 0:
- field_options = get_nanopb_suboptions(desc, message_options, self.name + 'extensions')
- range_start = min([r.start for r in desc.extension_range])
- if field_options.type != nanopb_pb2.FT_IGNORE:
- self.fields.append(ExtensionRange(self.name, range_start, field_options))
-
- if message_options.sort_by_tag:
- self.fields.sort()
-
- def get_dependencies(self):
- '''Get list of type names that this structure refers to.'''
- deps = []
- for f in self.fields:
- deps += f.get_dependencies()
- return deps
-
- def __repr__(self):
- return 'Message(%s)' % self.name
-
- def __str__(self):
- leading_comment, trailing_comment = self.get_comments()
-
- result = ''
- if leading_comment:
- result = '%s\n' % leading_comment
-
- result += 'typedef struct %s {' % Globals.naming_style.struct_name(self.name)
- if trailing_comment:
- result += " " + trailing_comment
-
- result += '\n'
-
- if not self.fields:
- # Empty structs are not allowed in C standard.
- # Therefore add a dummy field if an empty message occurs.
- result += ' char dummy_field;'
-
- result += '\n'.join([str(f) for f in self.fields])
-
- if Globals.protoc_insertion_points:
- result += '\n/* @@protoc_insertion_point(struct:%s) */' % self.name
-
- result += '\n}'
-
- if self.packed:
- result += ' pb_packed'
-
- result += ' %s;' % Globals.naming_style.type_name(self.name)
-
- if self.packed:
- result = 'PB_PACKED_STRUCT_START\n' + result
- result += '\nPB_PACKED_STRUCT_END'
-
- return result + '\n'
-
- def types(self):
- return ''.join([f.types() for f in self.fields])
-
- def get_initializer(self, null_init):
- if not self.fields:
- return '{0}'
-
- parts = []
- for field in self.fields:
- parts.append(field.get_initializer(null_init))
- return '{' + ', '.join(parts) + '}'
-
- def count_required_fields(self):
- '''Returns number of required fields inside this message'''
- count = 0
- for f in self.fields:
- if not isinstance(f, OneOf):
- if f.rules == 'REQUIRED':
- count += 1
- return count
-
- def all_fields(self):
- '''Iterate over all fields in this message, including nested OneOfs.'''
- for f in self.fields:
- if isinstance(f, OneOf):
- for f2 in f.fields:
- yield f2
- else:
- yield f
-
-
- def field_for_tag(self, tag):
- '''Given a tag number, return the Field instance.'''
- for field in self.all_fields():
- if field.tag == tag:
- return field
- return None
-
- def count_all_fields(self):
- '''Count the total number of fields in this message.'''
- count = 0
- for f in self.fields:
- if isinstance(f, OneOf):
- count += len(f.fields)
- else:
- count += 1
- return count
-
- def fields_declaration(self, dependencies):
- '''Return X-macro declaration of all fields in this message.'''
- Field.macro_x_param = 'X'
- Field.macro_a_param = 'a'
- while any(field.name == Field.macro_x_param for field in self.all_fields()):
- Field.macro_x_param += '_'
- while any(field.name == Field.macro_a_param for field in self.all_fields()):
- Field.macro_a_param += '_'
-
- # Field descriptor array must be sorted by tag number, pb_common.c relies on it.
- sorted_fields = list(self.all_fields())
- sorted_fields.sort(key = lambda x: x.tag)
-
- result = '#define %s_FIELDLIST(%s, %s) \\\n' % (
- Globals.naming_style.define_name(self.name),
- Field.macro_x_param,
- Field.macro_a_param)
- result += ' \\\n'.join(x.fieldlist() for x in sorted_fields)
- result += '\n'
-
- has_callbacks = bool([f for f in self.fields if f.has_callbacks()])
- if has_callbacks:
- if self.callback_function != 'pb_default_field_callback':
- result += "extern bool %s(pb_istream_t *istream, pb_ostream_t *ostream, const pb_field_t *field);\n" % self.callback_function
- result += "#define %s_CALLBACK %s\n" % (
- Globals.naming_style.define_name(self.name),
- self.callback_function)
- else:
- result += "#define %s_CALLBACK NULL\n" % Globals.naming_style.define_name(self.name)
-
- defval = self.default_value(dependencies)
- if defval:
- hexcoded = ''.join("\\x%02x" % ord(defval[i:i+1]) for i in range(len(defval)))
- result += '#define %s_DEFAULT (const pb_byte_t*)"%s\\x00"\n' % (
- Globals.naming_style.define_name(self.name),
- hexcoded)
- else:
- result += '#define %s_DEFAULT NULL\n' % Globals.naming_style.define_name(self.name)
-
- for field in sorted_fields:
- if field.pbtype in ['MESSAGE', 'MSG_W_CB']:
- if field.rules == 'ONEOF':
- result += "#define %s_%s_%s_MSGTYPE %s\n" % (
- Globals.naming_style.type_name(self.name),
- Globals.naming_style.var_name(field.union_name),
- Globals.naming_style.var_name(field.name),
- Globals.naming_style.type_name(field.ctype)
- )
- else:
- result += "#define %s_%s_MSGTYPE %s\n" % (
- Globals.naming_style.type_name(self.name),
- Globals.naming_style.var_name(field.name),
- Globals.naming_style.type_name(field.ctype)
- )
-
- return result
-
- def enumtype_defines(self):
- '''Defines to allow user code to refer to enum type of a specific field'''
- result = ''
- for field in self.all_fields():
- if field.pbtype in ['ENUM', "UENUM"]:
- if field.rules == 'ONEOF':
- result += "#define %s_%s_%s_ENUMTYPE %s\n" % (
- Globals.naming_style.type_name(self.name),
- Globals.naming_style.var_name(field.union_name),
- Globals.naming_style.var_name(field.name),
- Globals.naming_style.type_name(field.ctype)
- )
- else:
- result += "#define %s_%s_ENUMTYPE %s\n" % (
- Globals.naming_style.type_name(self.name),
- Globals.naming_style.var_name(field.name),
- Globals.naming_style.type_name(field.ctype)
- )
-
- return result
-
- def fields_declaration_cpp_lookup(self):
- result = 'template <>\n'
- result += 'struct MessageDescriptor<%s> {\n' % (self.name)
- result += ' static PB_INLINE_CONSTEXPR const pb_size_t fields_array_length = %d;\n' % (self.count_all_fields())
- result += ' static inline const pb_msgdesc_t* fields() {\n'
- result += ' return &%s_msg;\n' % (self.name)
- result += ' }\n'
- result += '};'
- return result
-
- def fields_definition(self, dependencies):
- '''Return the field descriptor definition that goes in .pb.c file.'''
- width = self.required_descriptor_width(dependencies)
- if width == 1:
- width = 'AUTO'
-
- result = 'PB_BIND(%s, %s, %s)\n' % (
- Globals.naming_style.define_name(self.name),
- Globals.naming_style.type_name(self.name),
- width)
- return result
-
- def required_descriptor_width(self, dependencies):
- '''Estimate how many words are necessary for each field descriptor.'''
- if self.descriptorsize != nanopb_pb2.DS_AUTO:
- return int(self.descriptorsize)
-
- if not self.fields:
- return 1
-
- max_tag = max(field.tag for field in self.all_fields())
- max_offset = self.data_size(dependencies)
- max_arraysize = max((field.max_count or 0) for field in self.all_fields())
- max_datasize = max(field.data_size(dependencies) for field in self.all_fields())
-
- if max_arraysize > 0xFFFF:
- return 8
- elif (max_tag > 0x3FF or max_offset > 0xFFFF or
- max_arraysize > 0x0FFF or max_datasize > 0x0FFF):
- return 4
- elif max_tag > 0x3F or max_offset > 0xFF:
- return 2
- else:
- # NOTE: Macro logic in pb.h ensures that width 1 will
- # be raised to 2 automatically for string/submsg fields
- # and repeated fields. Thus only tag and offset need to
- # be checked.
- return 1
-
- def data_size(self, dependencies):
- '''Return approximate sizeof(struct) in the compiled code.'''
- return sum(f.data_size(dependencies) for f in self.fields)
-
- def encoded_size(self, dependencies):
- '''Return the maximum size that this message can take when encoded.
- If the size cannot be determined, returns None.
- '''
- size = EncodedSize(0)
- for field in self.fields:
- fsize = field.encoded_size(dependencies)
- if fsize is None:
- return None
- size += fsize
-
- return size
-
- def default_value(self, dependencies):
- '''Generate serialized protobuf message that contains the
- default values for optional fields.'''
-
- if not self.desc:
- return b''
-
- if self.desc.options.map_entry:
- return b''
-
- optional_only = copy.deepcopy(self.desc)
-
- # Remove fields without default values
- # The iteration is done in reverse order to avoid remove() messing up iteration.
- for field in reversed(list(optional_only.field)):
- field.ClearField(str('extendee'))
- parsed_field = self.field_for_tag(field.number)
- if parsed_field is None or parsed_field.allocation != 'STATIC':
- optional_only.field.remove(field)
- elif (field.label == FieldD.LABEL_REPEATED or
- field.type == FieldD.TYPE_MESSAGE):
- optional_only.field.remove(field)
- elif hasattr(field, 'oneof_index') and field.HasField('oneof_index'):
- optional_only.field.remove(field)
- elif field.type == FieldD.TYPE_ENUM:
- # The partial descriptor doesn't include the enum type
- # so we fake it with int64.
- enumname = names_from_type_name(field.type_name)
- try:
- enumtype = dependencies[str(enumname)]
- except KeyError:
- raise Exception("Could not find enum type %s while generating default values for %s.\n" % (enumname, self.name)
- + "Try passing all source files to generator at once, or use -I option.")
-
- if not isinstance(enumtype, Enum):
- raise Exception("Expected enum type as %s, got %s" % (enumname, repr(enumtype)))
-
- if field.HasField('default_value'):
- defvals = [v for n,v in enumtype.values if n.parts[-1] == field.default_value]
- else:
- # If no default is specified, the default is the first value.
- defvals = [v for n,v in enumtype.values]
- if defvals and defvals[0] != 0:
- field.type = FieldD.TYPE_INT64
- field.default_value = str(defvals[0])
- field.ClearField(str('type_name'))
- else:
- optional_only.field.remove(field)
- elif not field.HasField('default_value'):
- optional_only.field.remove(field)
-
- if len(optional_only.field) == 0:
- return b''
-
- optional_only.ClearField(str('oneof_decl'))
- optional_only.ClearField(str('nested_type'))
- optional_only.ClearField(str('extension'))
- optional_only.ClearField(str('enum_type'))
- optional_only.name += str(id(self))
-
- desc = google.protobuf.descriptor.MakeDescriptor(optional_only)
- msg = reflection.MakeClass(desc)()
-
- for field in optional_only.field:
- if field.type == FieldD.TYPE_STRING:
- setattr(msg, field.name, field.default_value)
- elif field.type == FieldD.TYPE_BYTES:
- setattr(msg, field.name, codecs.escape_decode(field.default_value)[0])
- elif field.type in [FieldD.TYPE_FLOAT, FieldD.TYPE_DOUBLE]:
- setattr(msg, field.name, float(field.default_value))
- elif field.type == FieldD.TYPE_BOOL:
- setattr(msg, field.name, field.default_value == 'true')
- else:
- setattr(msg, field.name, int(field.default_value))
-
- return msg.SerializeToString()
-
-
-# ---------------------------------------------------------------------------
-# Processing of entire .proto files
-# ---------------------------------------------------------------------------
-
-def iterate_messages(desc, flatten = False, names = Names(), comment_path = ()):
- '''Recursively find all messages. For each, yield name, DescriptorProto, comment_path.'''
- if hasattr(desc, 'message_type'):
- submsgs = desc.message_type
- comment_path += (ProtoElement.MESSAGE,)
- else:
- submsgs = desc.nested_type
- comment_path += (ProtoElement.NESTED_TYPE,)
-
- for idx, submsg in enumerate(submsgs):
- sub_names = names + submsg.name
- sub_path = comment_path + (idx,)
- if flatten:
- yield Names(submsg.name), submsg, sub_path
- else:
- yield sub_names, submsg, sub_path
-
- for x in iterate_messages(submsg, flatten, sub_names, sub_path):
- yield x
-
-def iterate_extensions(desc, flatten = False, names = Names()):
- '''Recursively find all extensions.
- For each, yield name, FieldDescriptorProto.
- '''
- for extension in desc.extension:
- yield names, extension
-
- for subname, subdesc, comment_path in iterate_messages(desc, flatten, names):
- for extension in subdesc.extension:
- yield subname, extension
-
-def sort_dependencies(messages):
- '''Sort a list of Messages based on dependencies.'''
-
- # Construct first level list of dependencies
- dependencies = {}
- for message in messages:
- dependencies[str(message.name)] = set(message.get_dependencies())
-
- # Emit messages after all their dependencies have been processed
- remaining = list(messages)
- remainset = set(str(m.name) for m in remaining)
- while remaining:
- for candidate in remaining:
- if not remainset.intersection(dependencies[str(candidate.name)]):
- remaining.remove(candidate)
- remainset.remove(str(candidate.name))
- yield candidate
- break
- else:
- sys.stderr.write("Circular dependency in messages: " + ', '.join(remainset) + " (consider changing to FT_POINTER or FT_CALLBACK)\n")
- candidate = remaining.pop(0)
- remainset.remove(str(candidate.name))
- yield candidate
-
-def make_identifier(headername):
- '''Make #ifndef identifier that contains uppercase A-Z and digits 0-9'''
- result = ""
- for c in headername.upper():
- if c.isalnum():
- result += c
- else:
- result += '_'
- return result
-
-class MangleNames:
- '''Handles conversion of type names according to mangle_names option:
- M_NONE = 0; // Default, no typename mangling
- M_STRIP_PACKAGE = 1; // Strip current package name
- M_FLATTEN = 2; // Only use last path component
- M_PACKAGE_INITIALS = 3; // Replace the package name by the initials
- '''
- def __init__(self, fdesc, file_options):
- self.file_options = file_options
- self.mangle_names = file_options.mangle_names
- self.flatten = (self.mangle_names == nanopb_pb2.M_FLATTEN)
- self.strip_prefix = None
- self.replacement_prefix = None
- self.name_mapping = {}
- self.reverse_name_mapping = {}
- self.canonical_base = Names(fdesc.package.split('.'))
-
- if self.mangle_names == nanopb_pb2.M_STRIP_PACKAGE:
- self.strip_prefix = "." + fdesc.package
- elif self.mangle_names == nanopb_pb2.M_PACKAGE_INITIALS:
- self.strip_prefix = "." + fdesc.package
- self.replacement_prefix = ""
- for part in fdesc.package.split("."):
- self.replacement_prefix += part[0]
- elif file_options.package:
- self.strip_prefix = "." + fdesc.package
- self.replacement_prefix = file_options.package
-
- if self.strip_prefix == '.':
- self.strip_prefix = ''
-
- if self.replacement_prefix is not None:
- self.base_name = Names(self.replacement_prefix.split('.'))
- elif fdesc.package:
- self.base_name = Names(fdesc.package.split('.'))
- else:
- self.base_name = Names()
-
- def create_name(self, names):
- '''Create name for a new message / enum.
- Argument can be either string or Names instance.
- '''
- if str(names) not in self.name_mapping:
- if self.mangle_names in (nanopb_pb2.M_NONE, nanopb_pb2.M_PACKAGE_INITIALS):
- new_name = self.base_name + names
- elif self.mangle_names == nanopb_pb2.M_STRIP_PACKAGE:
- new_name = Names(names)
- elif isinstance(names, Names):
- new_name = Names(names.parts[-1])
- else:
- new_name = Names(names)
-
- if str(new_name) in self.reverse_name_mapping:
- sys.stderr.write("Warning: Duplicate name with mangle_names=%s: %s and %s map to %s\n" %
- (self.mangle_names, self.reverse_name_mapping[str(new_name)], names, new_name))
-
- self.name_mapping[str(names)] = new_name
- self.reverse_name_mapping[str(new_name)] = self.canonical_base + names
-
- return self.name_mapping[str(names)]
-
- def mangle_field_typename(self, typename):
- '''Mangle type name for a submessage / enum crossreference.
- Argument is a string.
- '''
- if self.mangle_names == nanopb_pb2.M_FLATTEN:
- return "." + typename.split(".")[-1]
-
- if self.strip_prefix is not None and typename.startswith(self.strip_prefix):
- if self.replacement_prefix is not None:
- return "." + self.replacement_prefix + typename[len(self.strip_prefix):]
- else:
- return typename[len(self.strip_prefix):]
-
- if self.file_options.package:
- return "." + self.replacement_prefix + typename
-
- return typename
-
- def unmangle(self, names):
- return self.reverse_name_mapping.get(str(names), names)
-
-class ProtoFile:
- def __init__(self, fdesc, file_options):
- '''Takes a FileDescriptorProto and parses it.'''
- self.fdesc = fdesc
- self.file_options = file_options
- self.dependencies = {}
- self.math_include_required = False
- self.parse()
- for message in self.messages:
- if message.math_include_required:
- self.math_include_required = True
- break
-
- # Some of types used in this file probably come from the file itself.
- # Thus it has implicit dependency on itself.
- self.add_dependency(self)
-
- def parse(self):
- self.enums = []
- self.messages = []
- self.extensions = []
- self.manglenames = MangleNames(self.fdesc, self.file_options)
-
- # process source code comment locations
- # ignores any locations that do not contain any comment information
- self.comment_locations = {
- tuple(location.path): location
- for location in self.fdesc.source_code_info.location
- if location.leading_comments or location.leading_detached_comments or location.trailing_comments
- }
-
- for index, enum in enumerate(self.fdesc.enum_type):
- name = self.manglenames.create_name(enum.name)
- enum_options = get_nanopb_suboptions(enum, self.file_options, name)
- enum_path = (ProtoElement.ENUM, index)
- self.enums.append(Enum(name, enum, enum_options, enum_path, self.comment_locations))
-
- for names, message, comment_path in iterate_messages(self.fdesc, self.manglenames.flatten):
- name = self.manglenames.create_name(names)
- message_options = get_nanopb_suboptions(message, self.file_options, name)
-
- if message_options.skip_message:
- continue
-
- message = copy.deepcopy(message)
- for field in message.field:
- if field.type in (FieldD.TYPE_MESSAGE, FieldD.TYPE_ENUM):
- field.type_name = self.manglenames.mangle_field_typename(field.type_name)
-
- self.messages.append(Message(name, message, message_options, comment_path, self.comment_locations))
- for index, enum in enumerate(message.enum_type):
- name = self.manglenames.create_name(names + enum.name)
- enum_options = get_nanopb_suboptions(enum, message_options, name)
- enum_path = comment_path + (ProtoElement.NESTED_ENUM, index)
- self.enums.append(Enum(name, enum, enum_options, enum_path, self.comment_locations))
-
- for names, extension in iterate_extensions(self.fdesc, self.manglenames.flatten):
- name = self.manglenames.create_name(names + extension.name)
- field_options = get_nanopb_suboptions(extension, self.file_options, name)
-
- extension = copy.deepcopy(extension)
- if extension.type in (FieldD.TYPE_MESSAGE, FieldD.TYPE_ENUM):
- extension.type_name = self.manglenames.mangle_field_typename(extension.type_name)
-
- if field_options.type != nanopb_pb2.FT_IGNORE:
- self.extensions.append(ExtensionField(name, extension, field_options))
-
- def add_dependency(self, other):
- for enum in other.enums:
- self.dependencies[str(enum.names)] = enum
- self.dependencies[str(other.manglenames.unmangle(enum.names))] = enum
- enum.protofile = other
-
- for msg in other.messages:
- self.dependencies[str(msg.name)] = msg
- self.dependencies[str(other.manglenames.unmangle(msg.name))] = msg
- msg.protofile = other
-
- # Fix field default values where enum short names are used.
- for enum in other.enums:
- if not enum.options.long_names:
- for message in self.messages:
- for field in message.all_fields():
- if field.default in enum.value_longnames:
- idx = enum.value_longnames.index(field.default)
- field.default = enum.values[idx][0]
-
- # Fix field data types where enums have negative values.
- for enum in other.enums:
- if not enum.has_negative():
- for message in self.messages:
- for field in message.all_fields():
- if field.pbtype == 'ENUM' and field.ctype == enum.names:
- field.pbtype = 'UENUM'
-
- def generate_header(self, includes, headername, options):
- '''Generate content for a header file.
- Generates strings, which should be concatenated and stored to file.
- '''
-
- yield '/* Automatically generated nanopb header */\n'
- if options.notimestamp:
- yield '/* Generated by %s */\n\n' % (nanopb_version)
- else:
- yield '/* Generated by %s at %s. */\n\n' % (nanopb_version, time.asctime())
-
- if self.fdesc.package:
- symbol = make_identifier(self.fdesc.package + '_' + headername)
- else:
- symbol = make_identifier(headername)
- yield '#ifndef PB_%s_INCLUDED\n' % symbol
- yield '#define PB_%s_INCLUDED\n' % symbol
- if self.math_include_required:
- yield '#include \n'
- try:
- yield options.libformat % ('pb.h')
- except TypeError:
- # no %s specified - use whatever was passed in as options.libformat
- yield options.libformat
- yield '\n'
-
- for incfile in self.file_options.include:
- # allow including system headers
- if (incfile.startswith('<')):
- yield '#include %s\n' % incfile
- else:
- yield options.genformat % incfile
- yield '\n'
-
- for incfile in includes:
- noext = os.path.splitext(incfile)[0]
- yield options.genformat % (noext + options.extension + options.header_extension)
- yield '\n'
-
- if Globals.protoc_insertion_points:
- yield '/* @@protoc_insertion_point(includes) */\n'
-
- yield '\n'
-
- yield '#if PB_PROTO_HEADER_VERSION != 40\n'
- yield '#error Regenerate this file with the current version of nanopb generator.\n'
- yield '#endif\n'
- yield '\n'
-
- if self.enums:
- yield '/* Enum definitions */\n'
- for enum in self.enums:
- yield str(enum) + '\n\n'
-
- if self.messages:
- yield '/* Struct definitions */\n'
- for msg in sort_dependencies(self.messages):
- yield msg.types()
- yield str(msg) + '\n'
- yield '\n'
-
- if self.extensions:
- yield '/* Extensions */\n'
- for extension in self.extensions:
- yield extension.extension_decl()
- yield '\n'
-
- yield '#ifdef __cplusplus\n'
- yield 'extern "C" {\n'
- yield '#endif\n\n'
-
- if self.enums:
- yield '/* Helper constants for enums */\n'
- for enum in self.enums:
- yield enum.auxiliary_defines() + '\n'
-
- for msg in self.messages:
- yield msg.enumtype_defines() + '\n'
- yield '\n'
-
- if self.messages:
- yield '/* Initializer values for message structs */\n'
- for msg in self.messages:
- identifier = Globals.naming_style.define_name('%s_init_default' % msg.name)
- yield '#define %-40s %s\n' % (identifier, msg.get_initializer(False))
- for msg in self.messages:
- identifier = Globals.naming_style.define_name('%s_init_zero' % msg.name)
- yield '#define %-40s %s\n' % (identifier, msg.get_initializer(True))
- yield '\n'
-
- yield '/* Field tags (for use in manual encoding/decoding) */\n'
- for msg in sort_dependencies(self.messages):
- for field in msg.fields:
- yield field.tags()
- for extension in self.extensions:
- yield extension.tags()
- yield '\n'
-
- yield '/* Struct field encoding specification for nanopb */\n'
- for msg in self.messages:
- yield msg.fields_declaration(self.dependencies) + '\n'
- for msg in self.messages:
- yield 'extern const pb_msgdesc_t %s_msg;\n' % Globals.naming_style.type_name(msg.name)
- yield '\n'
-
- yield '/* Defines for backwards compatibility with code written before nanopb-0.4.0 */\n'
- for msg in self.messages:
- yield '#define %s &%s_msg\n' % (
- Globals.naming_style.define_name('%s_fields' % msg.name),
- Globals.naming_style.type_name(msg.name))
- yield '\n'
-
- yield '/* Maximum encoded size of messages (where known) */\n'
- messagesizes = []
- for msg in self.messages:
- identifier = '%s_size' % msg.name
- messagesizes.append((identifier, msg.encoded_size(self.dependencies)))
-
- # If we require a symbol from another file, put a preprocessor if statement
- # around it to prevent compilation errors if the symbol is not actually available.
- local_defines = [identifier for identifier, msize in messagesizes if msize is not None]
-
- # emit size_unions, if any
- oneof_sizes = []
- for msg in self.messages:
- for f in msg.fields:
- if isinstance(f, OneOf):
- msize = f.encoded_size(self.dependencies)
- if msize is not None:
- oneof_sizes.append(msize)
- for msize in oneof_sizes:
- guard = msize.get_cpp_guard(local_defines)
- if guard:
- yield guard
- yield msize.get_declarations()
- if guard:
- yield '#endif\n'
-
- guards = {}
- for identifier, msize in messagesizes:
- if msize is not None:
- cpp_guard = msize.get_cpp_guard(local_defines)
- if cpp_guard not in guards:
- guards[cpp_guard] = set()
- guards[cpp_guard].add('#define %-40s %s' % (
- Globals.naming_style.define_name(identifier), msize))
- else:
- yield '/* %s depends on runtime parameters */\n' % identifier
- for guard, values in guards.items():
- if guard:
- yield guard
- for v in sorted(values):
- yield v
- yield '\n'
- if guard:
- yield '#endif\n'
- yield '\n'
-
- if [msg for msg in self.messages if hasattr(msg,'msgid')]:
- yield '/* Message IDs (where set with "msgid" option) */\n'
- for msg in self.messages:
- if hasattr(msg,'msgid'):
- yield '#define PB_MSG_%d %s\n' % (msg.msgid, msg.name)
- yield '\n'
-
- symbol = make_identifier(headername.split('.')[0])
- yield '#define %s_MESSAGES \\\n' % symbol
-
- for msg in self.messages:
- m = "-1"
- msize = msg.encoded_size(self.dependencies)
- if msize is not None:
- m = msize
- if hasattr(msg,'msgid'):
- yield '\tPB_MSG(%d,%s,%s) \\\n' % (msg.msgid, m, msg.name)
- yield '\n'
-
- for msg in self.messages:
- if hasattr(msg,'msgid'):
- yield '#define %s_msgid %d\n' % (msg.name, msg.msgid)
- yield '\n'
-
- # Check if there is any name mangling active
- pairs = [x for x in self.manglenames.reverse_name_mapping.items() if str(x[0]) != str(x[1])]
- if pairs:
- yield '/* Mapping from canonical names (mangle_names or overridden package name) */\n'
- for shortname, longname in pairs:
- yield '#define %s %s\n' % (longname, shortname)
- yield '\n'
-
- yield '#ifdef __cplusplus\n'
- yield '} /* extern "C" */\n'
- yield '#endif\n'
-
- if options.cpp_descriptors:
- yield '\n'
- yield '#ifdef __cplusplus\n'
- yield '/* Message descriptors for nanopb */\n'
- yield 'namespace nanopb {\n'
- for msg in self.messages:
- yield msg.fields_declaration_cpp_lookup() + '\n'
- yield '} // namespace nanopb\n'
- yield '\n'
- yield '#endif /* __cplusplus */\n'
- yield '\n'
-
- if Globals.protoc_insertion_points:
- yield '/* @@protoc_insertion_point(eof) */\n'
-
- # End of header
- yield '\n#endif\n'
-
- def generate_source(self, headername, options):
- '''Generate content for a source file.'''
-
- yield '/* Automatically generated nanopb constant definitions */\n'
- if options.notimestamp:
- yield '/* Generated by %s */\n\n' % (nanopb_version)
- else:
- yield '/* Generated by %s at %s. */\n\n' % (nanopb_version, time.asctime())
- yield options.genformat % (headername)
- yield '\n'
-
- if Globals.protoc_insertion_points:
- yield '/* @@protoc_insertion_point(includes) */\n'
-
- yield '#if PB_PROTO_HEADER_VERSION != 40\n'
- yield '#error Regenerate this file with the current version of nanopb generator.\n'
- yield '#endif\n'
- yield '\n'
-
- # Check if any messages exceed the 64 kB limit of 16-bit pb_size_t
- exceeds_64kB = []
- for msg in self.messages:
- size = msg.data_size(self.dependencies)
- if size >= 65536:
- exceeds_64kB.append(str(msg.name))
-
- if exceeds_64kB:
- yield '\n/* The following messages exceed 64kB in size: ' + ', '.join(exceeds_64kB) + ' */\n'
- yield '\n/* The PB_FIELD_32BIT compilation option must be defined to support messages that exceed 64 kB in size. */\n'
- yield '#ifndef PB_FIELD_32BIT\n'
- yield '#error Enable PB_FIELD_32BIT to support messages exceeding 64kB in size: ' + ', '.join(exceeds_64kB) + '\n'
- yield '#endif\n'
-
- # Generate the message field definitions (PB_BIND() call)
- for msg in self.messages:
- yield msg.fields_definition(self.dependencies) + '\n\n'
-
- # Generate pb_extension_type_t definitions if extensions are used in proto file
- for ext in self.extensions:
- yield ext.extension_def(self.dependencies) + '\n'
-
- # Generate enum_name function if enum_to_string option is defined
- for enum in self.enums:
- yield enum.enum_to_string_definition() + '\n'
-
- # Add checks for numeric limits
- if self.messages:
- largest_msg = max(self.messages, key = lambda m: m.count_required_fields())
- largest_count = largest_msg.count_required_fields()
- if largest_count > 64:
- yield '\n/* Check that missing required fields will be properly detected */\n'
- yield '#if PB_MAX_REQUIRED_FIELDS < %d\n' % largest_count
- yield '#error Properly detecting missing required fields in %s requires \\\n' % largest_msg.name
- yield ' setting PB_MAX_REQUIRED_FIELDS to %d or more.\n' % largest_count
- yield '#endif\n'
-
- # Add check for sizeof(double)
- has_double = False
- for msg in self.messages:
- for field in msg.all_fields():
- if field.ctype == 'double':
- has_double = True
-
- if has_double:
- yield '\n'
- yield '#ifndef PB_CONVERT_DOUBLE_FLOAT\n'
- yield '/* On some platforms (such as AVR), double is really float.\n'
- yield ' * To be able to encode/decode double on these platforms, you need.\n'
- yield ' * to define PB_CONVERT_DOUBLE_FLOAT in pb.h or compiler command line.\n'
- yield ' */\n'
- yield 'PB_STATIC_ASSERT(sizeof(double) == 8, DOUBLE_MUST_BE_8_BYTES)\n'
- yield '#endif\n'
-
- yield '\n'
-
- if Globals.protoc_insertion_points:
- yield '/* @@protoc_insertion_point(eof) */\n'
-
-# ---------------------------------------------------------------------------
-# Options parsing for the .proto files
-# ---------------------------------------------------------------------------
-
-from fnmatch import fnmatchcase
-
-def read_options_file(infile):
- '''Parse a separate options file to list:
- [(namemask, options), ...]
- '''
- results = []
- data = infile.read()
- data = re.sub(r'/\*.*?\*/', '', data, flags = re.MULTILINE)
- data = re.sub(r'//.*?$', '', data, flags = re.MULTILINE)
- data = re.sub(r'#.*?$', '', data, flags = re.MULTILINE)
- for i, line in enumerate(data.split('\n')):
- line = line.strip()
- if not line:
- continue
-
- parts = line.split(None, 1)
-
- if len(parts) < 2:
- sys.stderr.write("%s:%d: " % (infile.name, i + 1) +
- "Option lines should have space between field name and options. " +
- "Skipping line: '%s'\n" % line)
- sys.exit(1)
-
- opts = nanopb_pb2.NanoPBOptions()
-
- try:
- text_format.Merge(parts[1], opts)
- except Exception as e:
- sys.stderr.write("%s:%d: " % (infile.name, i + 1) +
- "Unparsable option line: '%s'. " % line +
- "Error: %s\n" % str(e))
- sys.exit(1)
- results.append((parts[0], opts))
-
- return results
-
-def get_nanopb_suboptions(subdesc, options, name):
- '''Get copy of options, and merge information from subdesc.'''
- new_options = nanopb_pb2.NanoPBOptions()
- new_options.CopyFrom(options)
-
- if hasattr(subdesc, 'syntax') and subdesc.syntax == "proto3":
- new_options.proto3 = True
-
- # Handle options defined in a separate file
- dotname = '.'.join(name.parts)
- for namemask, options in Globals.separate_options:
- if fnmatchcase(dotname, namemask):
- Globals.matched_namemasks.add(namemask)
- new_options.MergeFrom(options)
-
- # Handle options defined in .proto
- if isinstance(subdesc.options, descriptor.FieldOptions):
- ext_type = nanopb_pb2.nanopb
- elif isinstance(subdesc.options, descriptor.FileOptions):
- ext_type = nanopb_pb2.nanopb_fileopt
- elif isinstance(subdesc.options, descriptor.MessageOptions):
- ext_type = nanopb_pb2.nanopb_msgopt
- elif isinstance(subdesc.options, descriptor.EnumOptions):
- ext_type = nanopb_pb2.nanopb_enumopt
- else:
- raise Exception("Unknown options type")
-
- if subdesc.options.HasExtension(ext_type):
- ext = subdesc.options.Extensions[ext_type]
- new_options.MergeFrom(ext)
-
- if Globals.verbose_options:
- sys.stderr.write("Options for " + dotname + ": ")
- sys.stderr.write(text_format.MessageToString(new_options) + "\n")
-
- return new_options
-
-
-# ---------------------------------------------------------------------------
-# Command line interface
-# ---------------------------------------------------------------------------
-
-import sys
-import os.path
-from optparse import OptionParser
-
-optparser = OptionParser(
- usage = "Usage: nanopb_generator.py [options] file.pb ...",
- epilog = "Compile file.pb from file.proto by: 'protoc -ofile.pb file.proto'. " +
- "Output will be written to file.pb.h and file.pb.c.")
-optparser.add_option("--version", dest="version", action="store_true",
- help="Show version info and exit")
-optparser.add_option("-x", dest="exclude", metavar="FILE", action="append", default=[],
- help="Exclude file from generated #include list.")
-optparser.add_option("-e", "--extension", dest="extension", metavar="EXTENSION", default=".pb",
- help="Set extension to use instead of '.pb' for generated files. [default: %default]")
-optparser.add_option("-H", "--header-extension", dest="header_extension", metavar="EXTENSION", default=".h",
- help="Set extension to use for generated header files. [default: %default]")
-optparser.add_option("-S", "--source-extension", dest="source_extension", metavar="EXTENSION", default=".c",
- help="Set extension to use for generated source files. [default: %default]")
-optparser.add_option("-f", "--options-file", dest="options_file", metavar="FILE", default="%s.options",
- help="Set name of a separate generator options file.")
-optparser.add_option("-I", "--options-path", "--proto-path", dest="options_path", metavar="DIR",
- action="append", default = [],
- help="Search path for .options and .proto files. Also determines relative paths for output directory structure.")
-optparser.add_option("--error-on-unmatched", dest="error_on_unmatched", action="store_true", default=False,
- help ="Stop generation if there are unmatched fields in options file")
-optparser.add_option("--no-error-on-unmatched", dest="error_on_unmatched", action="store_false", default=False,
- help ="Continue generation if there are unmatched fields in options file (default)")
-optparser.add_option("-D", "--output-dir", dest="output_dir",
- metavar="OUTPUTDIR", default=None,
- help="Output directory of .pb.h and .pb.c files")
-optparser.add_option("-Q", "--generated-include-format", dest="genformat",
- metavar="FORMAT", default='#include "%s"',
- help="Set format string to use for including other .pb.h files. Value can be 'quote', 'bracket' or a format string. [default: %default]")
-optparser.add_option("-L", "--library-include-format", dest="libformat",
- metavar="FORMAT", default='#include <%s>',
- help="Set format string to use for including the nanopb pb.h header. Value can be 'quote', 'bracket' or a format string. [default: %default]")
-optparser.add_option("--strip-path", dest="strip_path", action="store_true", default=False,
- help="Strip directory path from #included .pb.h file name")
-optparser.add_option("--no-strip-path", dest="strip_path", action="store_false",
- help="Opposite of --strip-path (default since 0.4.0)")
-optparser.add_option("--cpp-descriptors", action="store_true",
- help="Generate C++ descriptors to lookup by type (e.g. pb_field_t for a message)")
-optparser.add_option("-T", "--no-timestamp", dest="notimestamp", action="store_true", default=True,
- help="Don't add timestamp to .pb.h and .pb.c preambles (default since 0.4.0)")
-optparser.add_option("-t", "--timestamp", dest="notimestamp", action="store_false", default=True,
- help="Add timestamp to .pb.h and .pb.c preambles")
-optparser.add_option("-q", "--quiet", dest="quiet", action="store_true", default=False,
- help="Don't print anything except errors.")
-optparser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False,
- help="Print more information.")
-optparser.add_option("-s", dest="settings", metavar="OPTION:VALUE", action="append", default=[],
- help="Set generator option (max_size, max_count etc.).")
-optparser.add_option("--protoc-opt", dest="protoc_opts", action="append", default = [], metavar="OPTION",
- help="Pass an option to protoc when compiling .proto files")
-optparser.add_option("--protoc-insertion-points", dest="protoc_insertion_points", action="store_true", default=False,
- help="Include insertion point comments in output for use by custom protoc plugins")
-optparser.add_option("-C", "--c-style", dest="c_style", action="store_true", default=False,
- help="Use C naming convention.")
-
-def process_cmdline(args, is_plugin):
- '''Process command line options. Returns list of options, filenames.'''
-
- options, filenames = optparser.parse_args(args)
-
- if options.version:
- if is_plugin:
- sys.stderr.write('%s\n' % (nanopb_version))
- else:
- print(nanopb_version)
- sys.exit(0)
-
- if not filenames and not is_plugin:
- optparser.print_help()
- sys.exit(1)
-
- if options.quiet:
- options.verbose = False
-
- include_formats = {'quote': '#include "%s"', 'bracket': '#include <%s>'}
- options.libformat = include_formats.get(options.libformat, options.libformat)
- options.genformat = include_formats.get(options.genformat, options.genformat)
-
- if options.c_style:
- Globals.naming_style = NamingStyleC()
-
- Globals.verbose_options = options.verbose
-
- if options.verbose:
- sys.stderr.write("Nanopb version %s\n" % nanopb_version)
- sys.stderr.write('Google Python protobuf library imported from %s, version %s\n'
- % (google.protobuf.__file__, google.protobuf.__version__))
-
- return options, filenames
-
-
-def parse_file(filename, fdesc, options):
- '''Parse a single file. Returns a ProtoFile instance.'''
- toplevel_options = nanopb_pb2.NanoPBOptions()
- for s in options.settings:
- if ':' not in s and '=' in s:
- s = s.replace('=', ':')
- text_format.Merge(s, toplevel_options)
-
- if not fdesc:
- data = open(filename, 'rb').read()
- fdesc = descriptor.FileDescriptorSet.FromString(data).file[0]
-
- # Check if there is a separate .options file
- had_abspath = False
- try:
- optfilename = options.options_file % os.path.splitext(filename)[0]
- except TypeError:
- # No %s specified, use the filename as-is
- optfilename = options.options_file
- had_abspath = True
-
- paths = ['.'] + options.options_path
- for p in paths:
- if os.path.isfile(os.path.join(p, optfilename)):
- optfilename = os.path.join(p, optfilename)
- if options.verbose:
- sys.stderr.write('Reading options from ' + optfilename + '\n')
- Globals.separate_options = read_options_file(open(optfilename, 'r', encoding = 'utf-8'))
- break
- else:
- # If we are given a full filename and it does not exist, give an error.
- # However, don't give error when we automatically look for .options file
- # with the same name as .proto.
- if options.verbose or had_abspath:
- sys.stderr.write('Options file not found: ' + optfilename + '\n')
- Globals.separate_options = []
-
- Globals.matched_namemasks = set()
- Globals.protoc_insertion_points = options.protoc_insertion_points
-
- # Parse the file
- file_options = get_nanopb_suboptions(fdesc, toplevel_options, Names([filename]))
- f = ProtoFile(fdesc, file_options)
- f.optfilename = optfilename
-
- return f
-
-def process_file(filename, fdesc, options, other_files = {}):
- '''Process a single file.
- filename: The full path to the .proto or .pb source file, as string.
- fdesc: The loaded FileDescriptorSet, or None to read from the input file.
- options: Command line options as they come from OptionsParser.
-
- Returns a dict:
- {'headername': Name of header file,
- 'headerdata': Data for the .h header file,
- 'sourcename': Name of the source code file,
- 'sourcedata': Data for the .c source code file
- }
- '''
- f = parse_file(filename, fdesc, options)
-
- # Check the list of dependencies, and if they are available in other_files,
- # add them to be considered for import resolving. Recursively add any files
- # imported by the dependencies.
- deps = list(f.fdesc.dependency)
- while deps:
- dep = deps.pop(0)
- if dep in other_files:
- f.add_dependency(other_files[dep])
- deps += list(other_files[dep].fdesc.dependency)
-
- # Decide the file names
- noext = os.path.splitext(filename)[0]
- headername = noext + options.extension + options.header_extension
- sourcename = noext + options.extension + options.source_extension
-
- if options.strip_path:
- headerbasename = os.path.basename(headername)
- else:
- headerbasename = headername
-
- # List of .proto files that should not be included in the C header file
- # even if they are mentioned in the source .proto.
- excludes = ['nanopb.proto', 'google/protobuf/descriptor.proto'] + options.exclude + list(f.file_options.exclude)
- includes = [d for d in f.fdesc.dependency if d not in excludes]
-
- headerdata = ''.join(f.generate_header(includes, headerbasename, options))
- sourcedata = ''.join(f.generate_source(headerbasename, options))
-
- # Check if there were any lines in .options that did not match a member
- unmatched = [n for n,o in Globals.separate_options if n not in Globals.matched_namemasks]
- if unmatched:
- if options.error_on_unmatched:
- raise Exception("Following patterns in " + f.optfilename + " did not match any fields: "
- + ', '.join(unmatched));
- elif not options.quiet:
- sys.stderr.write("Following patterns in " + f.optfilename + " did not match any fields: "
- + ', '.join(unmatched) + "\n")
-
- if not Globals.verbose_options:
- sys.stderr.write("Use protoc --nanopb-out=-v:. to see a list of the field names.\n")
-
- return {'headername': headername, 'headerdata': headerdata,
- 'sourcename': sourcename, 'sourcedata': sourcedata}
-
-def main_cli():
- '''Main function when invoked directly from the command line.'''
-
- options, filenames = process_cmdline(sys.argv[1:], is_plugin = False)
-
- if options.output_dir and not os.path.exists(options.output_dir):
- optparser.print_help()
- sys.stderr.write("\noutput_dir does not exist: %s\n" % options.output_dir)
- sys.exit(1)
-
- # Load .pb files into memory and compile any .proto files.
- include_path = ['-I%s' % p for p in options.options_path]
- all_fdescs = {}
- out_fdescs = {}
- for filename in filenames:
- if filename.endswith(".proto"):
- with TemporaryDirectory() as tmpdir:
- tmpname = os.path.join(tmpdir, os.path.basename(filename) + ".pb")
- args = ["protoc"] + include_path
- args += options.protoc_opts
- args += ['--include_imports', '--include_source_info', '-o' + tmpname, filename]
- status = invoke_protoc(args)
- if status != 0: sys.exit(status)
- data = open(tmpname, 'rb').read()
- else:
- data = open(filename, 'rb').read()
-
- fdescs = descriptor.FileDescriptorSet.FromString(data).file
- last_fdesc = fdescs[-1]
-
- for fdesc in fdescs:
- all_fdescs[fdesc.name] = fdesc
-
- out_fdescs[last_fdesc.name] = last_fdesc
-
- # Process any include files first, in order to have them
- # available as dependencies
- other_files = {}
- for fdesc in all_fdescs.values():
- other_files[fdesc.name] = parse_file(fdesc.name, fdesc, options)
-
- # Then generate the headers / sources
- for fdesc in out_fdescs.values():
- results = process_file(fdesc.name, fdesc, options, other_files)
-
- base_dir = options.output_dir or ''
- to_write = [
- (os.path.join(base_dir, results['headername']), results['headerdata']),
- (os.path.join(base_dir, results['sourcename']), results['sourcedata']),
- ]
-
- if not options.quiet:
- paths = " and ".join([x[0] for x in to_write])
- sys.stderr.write("Writing to %s\n" % paths)
-
- for path, data in to_write:
- dirname = os.path.dirname(path)
- if dirname and not os.path.exists(dirname):
- os.makedirs(dirname)
-
- with open(path, 'w') as f:
- f.write(data)
-
-def main_plugin():
- '''Main function when invoked as a protoc plugin.'''
-
- import io, sys
- if sys.platform == "win32":
- import os, msvcrt
- # Set stdin and stdout to binary mode
- msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
- msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
-
- data = io.open(sys.stdin.fileno(), "rb").read()
-
- request = plugin_pb2.CodeGeneratorRequest.FromString(data)
-
- try:
- # Versions of Python prior to 2.7.3 do not support unicode
- # input to shlex.split(). Try to convert to str if possible.
- params = str(request.parameter)
- except UnicodeEncodeError:
- params = request.parameter
-
- if ',' not in params and ' -' in params:
- # Nanopb has traditionally supported space as separator in options
- args = shlex.split(params)
- else:
- # Protoc separates options passed to plugins by comma
- # This allows also giving --nanopb_opt option multiple times.
- lex = shlex.shlex(params)
- lex.whitespace_split = True
- lex.whitespace = ','
- lex.commenters = ''
- args = list(lex)
-
- optparser.usage = "protoc --nanopb_out=outdir [--nanopb_opt=option] ['--nanopb_opt=option with spaces'] file.proto"
- optparser.epilog = "Output will be written to file.pb.h and file.pb.c."
-
- if '-h' in args or '--help' in args:
- # By default optparser prints help to stdout, which doesn't work for
- # protoc plugins.
- optparser.print_help(sys.stderr)
- sys.exit(1)
-
- options, dummy = process_cmdline(args, is_plugin = True)
-
- response = plugin_pb2.CodeGeneratorResponse()
-
- # Google's protoc does not currently indicate the full path of proto files.
- # Instead always add the main file path to the search dirs, that works for
- # the common case.
- import os.path
- options.options_path.append(os.path.dirname(request.file_to_generate[0]))
-
- # Process any include files first, in order to have them
- # available as dependencies
- other_files = {}
- for fdesc in request.proto_file:
- other_files[fdesc.name] = parse_file(fdesc.name, fdesc, options)
-
- for filename in request.file_to_generate:
- for fdesc in request.proto_file:
- if fdesc.name == filename:
- results = process_file(filename, fdesc, options, other_files)
-
- f = response.file.add()
- f.name = results['headername']
- f.content = results['headerdata']
-
- f = response.file.add()
- f.name = results['sourcename']
- f.content = results['sourcedata']
-
- if hasattr(plugin_pb2.CodeGeneratorResponse, "FEATURE_PROTO3_OPTIONAL"):
- response.supported_features = plugin_pb2.CodeGeneratorResponse.FEATURE_PROTO3_OPTIONAL
-
- io.open(sys.stdout.fileno(), "wb").write(response.SerializeToString())
-
-if __name__ == '__main__':
- # Check if we are running as a plugin under protoc
- if 'protoc-gen-' in sys.argv[0] or '--protoc-plugin' in sys.argv:
- main_plugin()
- else:
- main_cli()
diff --git a/MIDAS/src/hilsim/nanopb_generator/nanopb_generator.py2 b/MIDAS/src/hilsim/nanopb_generator/nanopb_generator.py2
deleted file mode 100644
index 0469461d..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/nanopb_generator.py2
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env python2
-# This file is a wrapper around nanopb_generator.py in case you want to run
-# it with Python 2 instead of default Python 3. This only exists for backwards
-# compatibility, do not use for new projects.
-
-from nanopb_generator import *
-
-if __name__ == '__main__':
- # Check if we are running as a plugin under protoc
- if 'protoc-gen-' in sys.argv[0] or '--protoc-plugin' in sys.argv:
- main_plugin()
- else:
- main_cli()
diff --git a/MIDAS/src/hilsim/nanopb_generator/platformio_generator.py b/MIDAS/src/hilsim/nanopb_generator/platformio_generator.py
deleted file mode 100644
index 1c3eeebc..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/platformio_generator.py
+++ /dev/null
@@ -1,157 +0,0 @@
-import os
-import hashlib
-import pathlib
-import shlex
-import subprocess
-
-import SCons.Action
-from platformio import fs
-
-Import("env")
-
-# We don't use `env.Execute` because it does not handle spaces in path
-# See https://github.com/nanopb/nanopb/pull/834
-# So, we resolve the path to the executable and then use `subprocess.run`
-python_exe = env.subst("$PYTHONEXE")
-
-try:
- import protobuf
-except ImportError:
- print("[nanopb] Installing Protocol Buffers dependencies");
-
- # We need to specify protobuf version. In other case got next (on Ubuntu 20.04):
- # Requirement already satisfied: protobuf in /usr/lib/python3/dist-packages (3.6.1)
- subprocess.run([python_exe, '-m', 'pip', 'install', "protobuf>=3.19.1"])
-
-try:
- import grpc_tools.protoc
-except ImportError:
- print("[nanopb] Installing gRPC dependencies");
- subprocess.run([python_exe, '-m', 'pip', 'install', "grpcio-tools>=1.43.0"])
-
-
-nanopb_root = os.path.join(os.getcwd(), '..')
-
-project_dir = env.subst("$PROJECT_DIR")
-build_dir = env.subst("$BUILD_DIR")
-
-generated_src_dir = os.path.join(build_dir, 'nanopb', 'generated-src')
-generated_build_dir = os.path.join(build_dir, 'nanopb', 'generated-build')
-md5_dir = os.path.join(build_dir, 'nanopb', 'md5')
-
-nanopb_protos = env.GetProjectOption("custom_nanopb_protos", "")
-nanopb_plugin_options = env.GetProjectOption("custom_nanopb_options", "")
-
-if not nanopb_protos:
- print("[nanopb] No generation needed.")
-else:
- if isinstance(nanopb_plugin_options, (list, tuple)):
- nanopb_plugin_options = " ".join(nanopb_plugin_options)
-
- nanopb_plugin_options = shlex.split(nanopb_plugin_options)
-
- protos_files = fs.match_src_files(project_dir, nanopb_protos)
- if not len(protos_files):
- print("[nanopb] ERROR: No files matched pattern:")
- print(f"custom_nanopb_protos: {nanopb_protos}")
- exit(1)
-
- nanopb_generator = os.path.join(nanopb_root, 'generator', 'nanopb_generator.py')
-
- nanopb_options = []
- nanopb_options.extend(["--output-dir", generated_src_dir])
- for opt in nanopb_plugin_options:
- nanopb_options.append(opt)
-
- try:
- os.makedirs(generated_src_dir)
- except FileExistsError:
- pass
-
- try:
- os.makedirs(md5_dir)
- except FileExistsError:
- pass
-
- # Collect include dirs based on
- proto_include_dirs = set()
- for proto_file in protos_files:
- proto_file_abs = os.path.join(project_dir, proto_file)
- proto_dir = os.path.dirname(proto_file_abs)
- proto_include_dirs.add(proto_dir)
-
- for proto_include_dir in proto_include_dirs:
- nanopb_options.extend(["--proto-path", proto_include_dir])
-
- for proto_file in protos_files:
- proto_file_abs = os.path.join(project_dir, proto_file)
-
- proto_file_path_abs = os.path.dirname(proto_file_abs)
- proto_file_basename = os.path.basename(proto_file_abs)
- proto_file_without_ext = os.path.splitext(proto_file_basename)[0]
-
- proto_file_md5_abs = os.path.join(md5_dir, proto_file_basename + '.md5')
- proto_file_current_md5 = hashlib.md5(pathlib.Path(proto_file_abs).read_bytes()).hexdigest()
-
- options_file = proto_file_without_ext + ".options"
- options_file_abs = os.path.join(proto_file_path_abs, options_file)
- options_file_md5_abs = None
- options_file_current_md5 = None
- if pathlib.Path(options_file_abs).exists():
- options_file_md5_abs = os.path.join(md5_dir, options_file + '.md5')
- options_file_current_md5 = hashlib.md5(pathlib.Path(options_file_abs).read_bytes()).hexdigest()
- else:
- options_file = None
-
- header_file = proto_file_without_ext + ".pb.h"
- source_file = proto_file_without_ext + ".pb.c"
-
- header_file_abs = os.path.join(generated_src_dir, source_file)
- source_file_abs = os.path.join(generated_src_dir, header_file)
-
- need_generate = False
-
- # Check proto file md5
- try:
- last_md5 = pathlib.Path(proto_file_md5_abs).read_text()
- if last_md5 != proto_file_current_md5:
- need_generate = True
- except FileNotFoundError:
- need_generate = True
-
- if options_file:
- # Check options file md5
- try:
- last_md5 = pathlib.Path(options_file_md5_abs).read_text()
- if last_md5 != options_file_current_md5:
- need_generate = True
- except FileNotFoundError:
- need_generate = True
-
- options_info = f"{options_file}" if options_file else "no options"
-
- if not need_generate:
- print(f"[nanopb] Skipping '{proto_file}' ({options_info})")
- else:
- print(f"[nanopb] Processing '{proto_file}' ({options_info})")
- cmd = [python_exe, nanopb_generator] + nanopb_options + [proto_file_basename]
- action = SCons.Action.CommandAction(cmd)
- result = env.Execute(action)
- if result != 0:
- print(f"[nanopb] ERROR: ({result}) processing cmd: '{cmd}'")
- exit(1)
- pathlib.Path(proto_file_md5_abs).write_text(proto_file_current_md5)
- if options_file:
- pathlib.Path(options_file_md5_abs).write_text(options_file_current_md5)
-
- #
- # Add generated includes and sources to build environment
- #
- env.Append(CPPPATH=[generated_src_dir])
-
- # Fix for ESP32 ESP-IDF https://github.com/nanopb/nanopb/issues/734#issuecomment-1001544447
- global_env = DefaultEnvironment()
- already_called_env_name = "_PROTOBUF_GENERATOR_ALREADY_CALLED_" + env['PIOENV'].replace("-", "_")
- if not global_env.get(already_called_env_name, False):
- env.BuildSources(generated_build_dir, generated_src_dir)
- global_env[already_called_env_name] = True
diff --git a/MIDAS/src/hilsim/nanopb_generator/proto/Makefile b/MIDAS/src/hilsim/nanopb_generator/proto/Makefile
deleted file mode 100644
index a93d88ff..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/proto/Makefile
+++ /dev/null
@@ -1,10 +0,0 @@
-PROTOC?=../protoc
-
-all: nanopb_pb2.py
-
-%_pb2.py: %.proto
- $(PROTOC) --python_out=. $<
-
-.PHONY: clean
-clean:
- rm nanopb_pb2.py
diff --git a/MIDAS/src/hilsim/nanopb_generator/proto/__init__.py b/MIDAS/src/hilsim/nanopb_generator/proto/__init__.py
deleted file mode 100644
index cfab9103..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/proto/__init__.py
+++ /dev/null
@@ -1,128 +0,0 @@
-'''This file dynamically builds the proto definitions for Python.'''
-from __future__ import absolute_import
-
-import os
-import os.path
-import sys
-import tempfile
-import shutil
-import traceback
-import pkg_resources
-from ._utils import has_grpcio_protoc, invoke_protoc, print_versions
-
-# Compatibility layer to make TemporaryDirectory() available on Python 2.
-try:
- from tempfile import TemporaryDirectory
-except ImportError:
- class TemporaryDirectory:
- '''TemporaryDirectory fallback for Python 2'''
- def __init__(self, prefix = 'tmp', dir = None):
- self.prefix = prefix
- self.dir = dir
-
- def __enter__(self):
- self.dir = tempfile.mkdtemp(prefix = self.prefix, dir = self.dir)
- return self.dir
-
- def __exit__(self, *args):
- shutil.rmtree(self.dir)
-
-def build_nanopb_proto(protosrc, dirname):
- '''Try to build a .proto file for python-protobuf.
- Returns True if successful.
- '''
-
- cmd = [
- "protoc",
- "--python_out={}".format(dirname),
- protosrc,
- "-I={}".format(dirname),
- ]
-
- if has_grpcio_protoc():
- # grpcio-tools has an extra CLI argument
- # from grpc.tools.protoc __main__ invocation.
- _builtin_proto_include = pkg_resources.resource_filename('grpc_tools', '_proto')
- cmd.append("-I={}".format(_builtin_proto_include))
-
- try:
- invoke_protoc(argv=cmd)
- except:
- sys.stderr.write("Failed to build nanopb_pb2.py: " + ' '.join(cmd) + "\n")
- sys.stderr.write(traceback.format_exc() + "\n")
- return False
-
- return True
-
-def load_nanopb_pb2():
- # To work, the generator needs python-protobuf built version of nanopb.proto.
- # There are three methods to provide this:
- #
- # 1) Load a previously generated generator/proto/nanopb_pb2.py
- # 2) Use protoc to build it and store it permanently generator/proto/nanopb_pb2.py
- # 3) Use protoc to build it, but store only temporarily in system-wide temp folder
- #
- # By default these are tried in numeric order.
- # If NANOPB_PB2_TEMP_DIR environment variable is defined, the 2) is skipped.
- # If the value of the $NANOPB_PB2_TEMP_DIR exists as a directory, it is used instead
- # of system temp folder.
-
- build_error = None
- proto_ok = False
- tmpdir = os.getenv("NANOPB_PB2_TEMP_DIR")
- temporary_only = (tmpdir is not None)
- dirname = os.path.dirname(__file__)
- protosrc = os.path.join(dirname, "nanopb.proto")
- protodst = os.path.join(dirname, "nanopb_pb2.py")
- proto_ok = False
-
- if tmpdir is not None and not os.path.isdir(tmpdir):
- tmpdir = None # Use system-wide temp dir
-
- if os.path.isfile(protosrc):
- src_date = os.path.getmtime(protosrc)
- if not os.path.isfile(protodst) or os.path.getmtime(protodst) < src_date:
- # Outdated, rebuild
- proto_ok = False
- else:
- try:
- from . import nanopb_pb2 as nanopb_pb2_mod
- proto_ok = True
- except Exception as e:
- sys.stderr.write("Failed to import nanopb_pb2.py: " + str(e) + "\n"
- "Will automatically attempt to rebuild this.\n"
- "Verify that python-protobuf and protoc versions match.\n")
- print_versions()
-
- # Try to rebuild into generator/proto directory
- if not proto_ok and not temporary_only:
- proto_ok = build_nanopb_proto(protosrc, dirname)
-
- try:
- from . import nanopb_pb2 as nanopb_pb2_mod
- except:
- sys.stderr.write("Failed to import generator/proto/nanopb_pb2.py:\n")
- sys.stderr.write(traceback.format_exc() + "\n")
-
- # Try to rebuild into temporary directory
- if not proto_ok:
- with TemporaryDirectory(prefix = 'nanopb-', dir = tmpdir) as protodir:
- proto_ok = build_nanopb_proto(protosrc, protodir)
-
- if protodir not in sys.path:
- sys.path.insert(0, protodir)
-
- try:
- import nanopb_pb2 as nanopb_pb2_mod
- except:
- sys.stderr.write("Failed to import %s/nanopb_pb2.py:\n" % protodir)
- sys.stderr.write(traceback.format_exc() + "\n")
-
- # If everything fails
- if not proto_ok:
- sys.stderr.write("\n\nGenerating nanopb_pb2.py failed.\n")
- sys.stderr.write("Make sure that a protoc generator is available and matches python-protobuf version.\n")
- print_versions()
- sys.exit(1)
-
- return nanopb_pb2_mod
diff --git a/MIDAS/src/hilsim/nanopb_generator/proto/_utils.py b/MIDAS/src/hilsim/nanopb_generator/proto/_utils.py
deleted file mode 100644
index f9c8c94f..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/proto/_utils.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import sys
-import subprocess
-import os.path
-
-def has_grpcio_protoc():
- # type: () -> bool
- """ checks if grpcio-tools protoc is installed"""
-
- try:
- import grpc_tools.protoc
- except ImportError:
- return False
- return True
-
-
-def invoke_protoc(argv):
- # type: (list) -> typing.Any
- """
- Invoke protoc.
-
- This routine will use grpcio-provided protoc if it exists,
- using system-installed protoc as a fallback.
-
- Args:
- argv: protoc CLI invocation, first item must be 'protoc'
- """
-
- # Add current directory to include path if nothing else is specified
- if not [x for x in argv if x.startswith('-I')]:
- argv.append("-I.")
-
- # Add default protoc include paths
- nanopb_include = os.path.dirname(os.path.abspath(__file__))
- argv.append('-I' + nanopb_include)
-
- if has_grpcio_protoc():
- import grpc_tools.protoc as protoc
- import pkg_resources
- proto_include = pkg_resources.resource_filename('grpc_tools', '_proto')
- argv.append('-I' + proto_include)
-
- return protoc.main(argv)
- else:
- return subprocess.call(argv)
-
-def print_versions():
- try:
- if has_grpcio_protoc():
- import grpc_tools.protoc
- sys.stderr.write("Using grpcio-tools protoc from " + grpc_tools.protoc.__file__ + "\n")
- else:
- sys.stderr.write("Using protoc from system path\n")
-
- invoke_protoc(['protoc', '--version'])
- except Exception as e:
- sys.stderr.write("Failed to determine protoc version: " + str(e) + "\n")
-
- try:
- import google.protobuf
- sys.stderr.write("Python version " + sys.version + "\n")
- sys.stderr.write("Using python-protobuf from " + google.protobuf.__file__ + "\n")
- sys.stderr.write("Python-protobuf version: " + google.protobuf.__version__ + "\n")
- except Exception as e:
- sys.stderr.write("Failed to determine python-protobuf version: " + str(e) + "\n")
-
-if __name__ == '__main__':
- print_versions()
diff --git a/MIDAS/src/hilsim/nanopb_generator/proto/google/protobuf/descriptor.proto b/MIDAS/src/hilsim/nanopb_generator/proto/google/protobuf/descriptor.proto
deleted file mode 100644
index 8697a50d..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/proto/google/protobuf/descriptor.proto
+++ /dev/null
@@ -1,872 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: kenton@google.com (Kenton Varda)
-// Based on original Protocol Buffers design by
-// Sanjay Ghemawat, Jeff Dean, and others.
-//
-// The messages in this file describe the definitions found in .proto files.
-// A valid .proto file can be translated directly to a FileDescriptorProto
-// without any other information (e.g. without reading its imports).
-
-
-syntax = "proto2";
-
-package google.protobuf;
-option go_package = "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor";
-option java_package = "com.google.protobuf";
-option java_outer_classname = "DescriptorProtos";
-option csharp_namespace = "Google.Protobuf.Reflection";
-option objc_class_prefix = "GPB";
-option cc_enable_arenas = true;
-
-// descriptor.proto must be optimized for speed because reflection-based
-// algorithms don't work during bootstrapping.
-option optimize_for = SPEED;
-
-// The protocol compiler can output a FileDescriptorSet containing the .proto
-// files it parses.
-message FileDescriptorSet {
- repeated FileDescriptorProto file = 1;
-}
-
-// Describes a complete .proto file.
-message FileDescriptorProto {
- optional string name = 1; // file name, relative to root of source tree
- optional string package = 2; // e.g. "foo", "foo.bar", etc.
-
- // Names of files imported by this file.
- repeated string dependency = 3;
- // Indexes of the public imported files in the dependency list above.
- repeated int32 public_dependency = 10;
- // Indexes of the weak imported files in the dependency list.
- // For Google-internal migration only. Do not use.
- repeated int32 weak_dependency = 11;
-
- // All top-level definitions in this file.
- repeated DescriptorProto message_type = 4;
- repeated EnumDescriptorProto enum_type = 5;
- repeated ServiceDescriptorProto service = 6;
- repeated FieldDescriptorProto extension = 7;
-
- optional FileOptions options = 8;
-
- // This field contains optional information about the original source code.
- // You may safely remove this entire field without harming runtime
- // functionality of the descriptors -- the information is needed only by
- // development tools.
- optional SourceCodeInfo source_code_info = 9;
-
- // The syntax of the proto file.
- // The supported values are "proto2" and "proto3".
- optional string syntax = 12;
-}
-
-// Describes a message type.
-message DescriptorProto {
- optional string name = 1;
-
- repeated FieldDescriptorProto field = 2;
- repeated FieldDescriptorProto extension = 6;
-
- repeated DescriptorProto nested_type = 3;
- repeated EnumDescriptorProto enum_type = 4;
-
- message ExtensionRange {
- optional int32 start = 1;
- optional int32 end = 2;
-
- optional ExtensionRangeOptions options = 3;
- }
- repeated ExtensionRange extension_range = 5;
-
- repeated OneofDescriptorProto oneof_decl = 8;
-
- optional MessageOptions options = 7;
-
- // Range of reserved tag numbers. Reserved tag numbers may not be used by
- // fields or extension ranges in the same message. Reserved ranges may
- // not overlap.
- message ReservedRange {
- optional int32 start = 1; // Inclusive.
- optional int32 end = 2; // Exclusive.
- }
- repeated ReservedRange reserved_range = 9;
- // Reserved field names, which may not be used by fields in the same message.
- // A given name may only be reserved once.
- repeated string reserved_name = 10;
-}
-
-message ExtensionRangeOptions {
- // The parser stores options it doesn't recognize here. See above.
- repeated UninterpretedOption uninterpreted_option = 999;
-
- // Clients can define custom options in extensions of this message. See above.
- extensions 1000 to max;
-}
-
-// Describes a field within a message.
-message FieldDescriptorProto {
- enum Type {
- // 0 is reserved for errors.
- // Order is weird for historical reasons.
- TYPE_DOUBLE = 1;
- TYPE_FLOAT = 2;
- // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
- // negative values are likely.
- TYPE_INT64 = 3;
- TYPE_UINT64 = 4;
- // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
- // negative values are likely.
- TYPE_INT32 = 5;
- TYPE_FIXED64 = 6;
- TYPE_FIXED32 = 7;
- TYPE_BOOL = 8;
- TYPE_STRING = 9;
- // Tag-delimited aggregate.
- // Group type is deprecated and not supported in proto3. However, Proto3
- // implementations should still be able to parse the group wire format and
- // treat group fields as unknown fields.
- TYPE_GROUP = 10;
- TYPE_MESSAGE = 11; // Length-delimited aggregate.
-
- // New in version 2.
- TYPE_BYTES = 12;
- TYPE_UINT32 = 13;
- TYPE_ENUM = 14;
- TYPE_SFIXED32 = 15;
- TYPE_SFIXED64 = 16;
- TYPE_SINT32 = 17; // Uses ZigZag encoding.
- TYPE_SINT64 = 18; // Uses ZigZag encoding.
- };
-
- enum Label {
- // 0 is reserved for errors
- LABEL_OPTIONAL = 1;
- LABEL_REQUIRED = 2;
- LABEL_REPEATED = 3;
- };
-
- optional string name = 1;
- optional int32 number = 3;
- optional Label label = 4;
-
- // If type_name is set, this need not be set. If both this and type_name
- // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
- optional Type type = 5;
-
- // For message and enum types, this is the name of the type. If the name
- // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
- // rules are used to find the type (i.e. first the nested types within this
- // message are searched, then within the parent, on up to the root
- // namespace).
- optional string type_name = 6;
-
- // For extensions, this is the name of the type being extended. It is
- // resolved in the same manner as type_name.
- optional string extendee = 2;
-
- // For numeric types, contains the original text representation of the value.
- // For booleans, "true" or "false".
- // For strings, contains the default text contents (not escaped in any way).
- // For bytes, contains the C escaped value. All bytes >= 128 are escaped.
- // TODO(kenton): Base-64 encode?
- optional string default_value = 7;
-
- // If set, gives the index of a oneof in the containing type's oneof_decl
- // list. This field is a member of that oneof.
- optional int32 oneof_index = 9;
-
- // JSON name of this field. The value is set by protocol compiler. If the
- // user has set a "json_name" option on this field, that option's value
- // will be used. Otherwise, it's deduced from the field's name by converting
- // it to camelCase.
- optional string json_name = 10;
-
- optional FieldOptions options = 8;
-}
-
-// Describes a oneof.
-message OneofDescriptorProto {
- optional string name = 1;
- optional OneofOptions options = 2;
-}
-
-// Describes an enum type.
-message EnumDescriptorProto {
- optional string name = 1;
-
- repeated EnumValueDescriptorProto value = 2;
-
- optional EnumOptions options = 3;
-
- // Range of reserved numeric values. Reserved values may not be used by
- // entries in the same enum. Reserved ranges may not overlap.
- //
- // Note that this is distinct from DescriptorProto.ReservedRange in that it
- // is inclusive such that it can appropriately represent the entire int32
- // domain.
- message EnumReservedRange {
- optional int32 start = 1; // Inclusive.
- optional int32 end = 2; // Inclusive.
- }
-
- // Range of reserved numeric values. Reserved numeric values may not be used
- // by enum values in the same enum declaration. Reserved ranges may not
- // overlap.
- repeated EnumReservedRange reserved_range = 4;
-
- // Reserved enum value names, which may not be reused. A given name may only
- // be reserved once.
- repeated string reserved_name = 5;
-}
-
-// Describes a value within an enum.
-message EnumValueDescriptorProto {
- optional string name = 1;
- optional int32 number = 2;
-
- optional EnumValueOptions options = 3;
-}
-
-// Describes a service.
-message ServiceDescriptorProto {
- optional string name = 1;
- repeated MethodDescriptorProto method = 2;
-
- optional ServiceOptions options = 3;
-}
-
-// Describes a method of a service.
-message MethodDescriptorProto {
- optional string name = 1;
-
- // Input and output type names. These are resolved in the same way as
- // FieldDescriptorProto.type_name, but must refer to a message type.
- optional string input_type = 2;
- optional string output_type = 3;
-
- optional MethodOptions options = 4;
-
- // Identifies if client streams multiple client messages
- optional bool client_streaming = 5 [default=false];
- // Identifies if server streams multiple server messages
- optional bool server_streaming = 6 [default=false];
-}
-
-
-// ===================================================================
-// Options
-
-// Each of the definitions above may have "options" attached. These are
-// just annotations which may cause code to be generated slightly differently
-// or may contain hints for code that manipulates protocol messages.
-//
-// Clients may define custom options as extensions of the *Options messages.
-// These extensions may not yet be known at parsing time, so the parser cannot
-// store the values in them. Instead it stores them in a field in the *Options
-// message called uninterpreted_option. This field must have the same name
-// across all *Options messages. We then use this field to populate the
-// extensions when we build a descriptor, at which point all protos have been
-// parsed and so all extensions are known.
-//
-// Extension numbers for custom options may be chosen as follows:
-// * For options which will only be used within a single application or
-// organization, or for experimental options, use field numbers 50000
-// through 99999. It is up to you to ensure that you do not use the
-// same number for multiple options.
-// * For options which will be published and used publicly by multiple
-// independent entities, e-mail protobuf-global-extension-registry@google.com
-// to reserve extension numbers. Simply provide your project name (e.g.
-// Objective-C plugin) and your project website (if available) -- there's no
-// need to explain how you intend to use them. Usually you only need one
-// extension number. You can declare multiple options with only one extension
-// number by putting them in a sub-message. See the Custom Options section of
-// the docs for examples:
-// https://developers.google.com/protocol-buffers/docs/proto#options
-// If this turns out to be popular, a web service will be set up
-// to automatically assign option numbers.
-
-
-message FileOptions {
-
- // Sets the Java package where classes generated from this .proto will be
- // placed. By default, the proto package is used, but this is often
- // inappropriate because proto packages do not normally start with backwards
- // domain names.
- optional string java_package = 1;
-
-
- // If set, all the classes from the .proto file are wrapped in a single
- // outer class with the given name. This applies to both Proto1
- // (equivalent to the old "--one_java_file" option) and Proto2 (where
- // a .proto always translates to a single class, but you may want to
- // explicitly choose the class name).
- optional string java_outer_classname = 8;
-
- // If set true, then the Java code generator will generate a separate .java
- // file for each top-level message, enum, and service defined in the .proto
- // file. Thus, these types will *not* be nested inside the outer class
- // named by java_outer_classname. However, the outer class will still be
- // generated to contain the file's getDescriptor() method as well as any
- // top-level extensions defined in the file.
- optional bool java_multiple_files = 10 [default=false];
-
- // This option does nothing.
- optional bool java_generate_equals_and_hash = 20 [deprecated=true];
-
- // If set true, then the Java2 code generator will generate code that
- // throws an exception whenever an attempt is made to assign a non-UTF-8
- // byte sequence to a string field.
- // Message reflection will do the same.
- // However, an extension field still accepts non-UTF-8 byte sequences.
- // This option has no effect on when used with the lite runtime.
- optional bool java_string_check_utf8 = 27 [default=false];
-
-
- // Generated classes can be optimized for speed or code size.
- enum OptimizeMode {
- SPEED = 1; // Generate complete code for parsing, serialization,
- // etc.
- CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
- LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
- }
- optional OptimizeMode optimize_for = 9 [default=SPEED];
-
- // Sets the Go package where structs generated from this .proto will be
- // placed. If omitted, the Go package will be derived from the following:
- // - The basename of the package import path, if provided.
- // - Otherwise, the package statement in the .proto file, if present.
- // - Otherwise, the basename of the .proto file, without extension.
- optional string go_package = 11;
-
-
-
- // Should generic services be generated in each language? "Generic" services
- // are not specific to any particular RPC system. They are generated by the
- // main code generators in each language (without additional plugins).
- // Generic services were the only kind of service generation supported by
- // early versions of google.protobuf.
- //
- // Generic services are now considered deprecated in favor of using plugins
- // that generate code specific to your particular RPC system. Therefore,
- // these default to false. Old code which depends on generic services should
- // explicitly set them to true.
- optional bool cc_generic_services = 16 [default=false];
- optional bool java_generic_services = 17 [default=false];
- optional bool py_generic_services = 18 [default=false];
- optional bool php_generic_services = 42 [default=false];
-
- // Is this file deprecated?
- // Depending on the target platform, this can emit Deprecated annotations
- // for everything in the file, or it will be completely ignored; in the very
- // least, this is a formalization for deprecating files.
- optional bool deprecated = 23 [default=false];
-
- // Enables the use of arenas for the proto messages in this file. This applies
- // only to generated classes for C++.
- optional bool cc_enable_arenas = 31 [default=false];
-
-
- // Sets the objective c class prefix which is prepended to all objective c
- // generated classes from this .proto. There is no default.
- optional string objc_class_prefix = 36;
-
- // Namespace for generated classes; defaults to the package.
- optional string csharp_namespace = 37;
-
- // By default Swift generators will take the proto package and CamelCase it
- // replacing '.' with underscore and use that to prefix the types/symbols
- // defined. When this options is provided, they will use this value instead
- // to prefix the types/symbols defined.
- optional string swift_prefix = 39;
-
- // Sets the php class prefix which is prepended to all php generated classes
- // from this .proto. Default is empty.
- optional string php_class_prefix = 40;
-
- // Use this option to change the namespace of php generated classes. Default
- // is empty. When this option is empty, the package name will be used for
- // determining the namespace.
- optional string php_namespace = 41;
-
- // The parser stores options it doesn't recognize here.
- // See the documentation for the "Options" section above.
- repeated UninterpretedOption uninterpreted_option = 999;
-
- // Clients can define custom options in extensions of this message.
- // See the documentation for the "Options" section above.
- extensions 1000 to max;
-
- reserved 38;
-}
-
-message MessageOptions {
- // Set true to use the old proto1 MessageSet wire format for extensions.
- // This is provided for backwards-compatibility with the MessageSet wire
- // format. You should not use this for any other reason: It's less
- // efficient, has fewer features, and is more complicated.
- //
- // The message must be defined exactly as follows:
- // message Foo {
- // option message_set_wire_format = true;
- // extensions 4 to max;
- // }
- // Note that the message cannot have any defined fields; MessageSets only
- // have extensions.
- //
- // All extensions of your type must be singular messages; e.g. they cannot
- // be int32s, enums, or repeated messages.
- //
- // Because this is an option, the above two restrictions are not enforced by
- // the protocol compiler.
- optional bool message_set_wire_format = 1 [default=false];
-
- // Disables the generation of the standard "descriptor()" accessor, which can
- // conflict with a field of the same name. This is meant to make migration
- // from proto1 easier; new code should avoid fields named "descriptor".
- optional bool no_standard_descriptor_accessor = 2 [default=false];
-
- // Is this message deprecated?
- // Depending on the target platform, this can emit Deprecated annotations
- // for the message, or it will be completely ignored; in the very least,
- // this is a formalization for deprecating messages.
- optional bool deprecated = 3 [default=false];
-
- // Whether the message is an automatically generated map entry type for the
- // maps field.
- //
- // For maps fields:
- // map map_field = 1;
- // The parsed descriptor looks like:
- // message MapFieldEntry {
- // option map_entry = true;
- // optional KeyType key = 1;
- // optional ValueType value = 2;
- // }
- // repeated MapFieldEntry map_field = 1;
- //
- // Implementations may choose not to generate the map_entry=true message, but
- // use a native map in the target language to hold the keys and values.
- // The reflection APIs in such implementions still need to work as
- // if the field is a repeated message field.
- //
- // NOTE: Do not set the option in .proto files. Always use the maps syntax
- // instead. The option should only be implicitly set by the proto compiler
- // parser.
- optional bool map_entry = 7;
-
- reserved 8; // javalite_serializable
- reserved 9; // javanano_as_lite
-
- // The parser stores options it doesn't recognize here. See above.
- repeated UninterpretedOption uninterpreted_option = 999;
-
- // Clients can define custom options in extensions of this message. See above.
- extensions 1000 to max;
-}
-
-message FieldOptions {
- // The ctype option instructs the C++ code generator to use a different
- // representation of the field than it normally would. See the specific
- // options below. This option is not yet implemented in the open source
- // release -- sorry, we'll try to include it in a future version!
- optional CType ctype = 1 [default = STRING];
- enum CType {
- // Default mode.
- STRING = 0;
-
- CORD = 1;
-
- STRING_PIECE = 2;
- }
- // The packed option can be enabled for repeated primitive fields to enable
- // a more efficient representation on the wire. Rather than repeatedly
- // writing the tag and type for each element, the entire array is encoded as
- // a single length-delimited blob. In proto3, only explicit setting it to
- // false will avoid using packed encoding.
- optional bool packed = 2;
-
- // The jstype option determines the JavaScript type used for values of the
- // field. The option is permitted only for 64 bit integral and fixed types
- // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
- // is represented as JavaScript string, which avoids loss of precision that
- // can happen when a large value is converted to a floating point JavaScript.
- // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
- // use the JavaScript "number" type. The behavior of the default option
- // JS_NORMAL is implementation dependent.
- //
- // This option is an enum to permit additional types to be added, e.g.
- // goog.math.Integer.
- optional JSType jstype = 6 [default = JS_NORMAL];
- enum JSType {
- // Use the default type.
- JS_NORMAL = 0;
-
- // Use JavaScript strings.
- JS_STRING = 1;
-
- // Use JavaScript numbers.
- JS_NUMBER = 2;
- }
-
- // Should this field be parsed lazily? Lazy applies only to message-type
- // fields. It means that when the outer message is initially parsed, the
- // inner message's contents will not be parsed but instead stored in encoded
- // form. The inner message will actually be parsed when it is first accessed.
- //
- // This is only a hint. Implementations are free to choose whether to use
- // eager or lazy parsing regardless of the value of this option. However,
- // setting this option true suggests that the protocol author believes that
- // using lazy parsing on this field is worth the additional bookkeeping
- // overhead typically needed to implement it.
- //
- // This option does not affect the public interface of any generated code;
- // all method signatures remain the same. Furthermore, thread-safety of the
- // interface is not affected by this option; const methods remain safe to
- // call from multiple threads concurrently, while non-const methods continue
- // to require exclusive access.
- //
- //
- // Note that implementations may choose not to check required fields within
- // a lazy sub-message. That is, calling IsInitialized() on the outer message
- // may return true even if the inner message has missing required fields.
- // This is necessary because otherwise the inner message would have to be
- // parsed in order to perform the check, defeating the purpose of lazy
- // parsing. An implementation which chooses not to check required fields
- // must be consistent about it. That is, for any particular sub-message, the
- // implementation must either *always* check its required fields, or *never*
- // check its required fields, regardless of whether or not the message has
- // been parsed.
- optional bool lazy = 5 [default=false];
-
- // Is this field deprecated?
- // Depending on the target platform, this can emit Deprecated annotations
- // for accessors, or it will be completely ignored; in the very least, this
- // is a formalization for deprecating fields.
- optional bool deprecated = 3 [default=false];
-
- // For Google-internal migration only. Do not use.
- optional bool weak = 10 [default=false];
-
-
- // The parser stores options it doesn't recognize here. See above.
- repeated UninterpretedOption uninterpreted_option = 999;
-
- // Clients can define custom options in extensions of this message. See above.
- extensions 1000 to max;
-
- reserved 4; // removed jtype
-}
-
-message OneofOptions {
- // The parser stores options it doesn't recognize here. See above.
- repeated UninterpretedOption uninterpreted_option = 999;
-
- // Clients can define custom options in extensions of this message. See above.
- extensions 1000 to max;
-}
-
-message EnumOptions {
-
- // Set this option to true to allow mapping different tag names to the same
- // value.
- optional bool allow_alias = 2;
-
- // Is this enum deprecated?
- // Depending on the target platform, this can emit Deprecated annotations
- // for the enum, or it will be completely ignored; in the very least, this
- // is a formalization for deprecating enums.
- optional bool deprecated = 3 [default=false];
-
- reserved 5; // javanano_as_lite
-
- // The parser stores options it doesn't recognize here. See above.
- repeated UninterpretedOption uninterpreted_option = 999;
-
- // Clients can define custom options in extensions of this message. See above.
- extensions 1000 to max;
-}
-
-message EnumValueOptions {
- // Is this enum value deprecated?
- // Depending on the target platform, this can emit Deprecated annotations
- // for the enum value, or it will be completely ignored; in the very least,
- // this is a formalization for deprecating enum values.
- optional bool deprecated = 1 [default=false];
-
- // The parser stores options it doesn't recognize here. See above.
- repeated UninterpretedOption uninterpreted_option = 999;
-
- // Clients can define custom options in extensions of this message. See above.
- extensions 1000 to max;
-}
-
-message ServiceOptions {
-
- // Note: Field numbers 1 through 32 are reserved for Google's internal RPC
- // framework. We apologize for hoarding these numbers to ourselves, but
- // we were already using them long before we decided to release Protocol
- // Buffers.
-
- // Is this service deprecated?
- // Depending on the target platform, this can emit Deprecated annotations
- // for the service, or it will be completely ignored; in the very least,
- // this is a formalization for deprecating services.
- optional bool deprecated = 33 [default=false];
-
- // The parser stores options it doesn't recognize here. See above.
- repeated UninterpretedOption uninterpreted_option = 999;
-
- // Clients can define custom options in extensions of this message. See above.
- extensions 1000 to max;
-}
-
-message MethodOptions {
-
- // Note: Field numbers 1 through 32 are reserved for Google's internal RPC
- // framework. We apologize for hoarding these numbers to ourselves, but
- // we were already using them long before we decided to release Protocol
- // Buffers.
-
- // Is this method deprecated?
- // Depending on the target platform, this can emit Deprecated annotations
- // for the method, or it will be completely ignored; in the very least,
- // this is a formalization for deprecating methods.
- optional bool deprecated = 33 [default=false];
-
- // Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- // or neither? HTTP based RPC implementation may choose GET verb for safe
- // methods, and PUT verb for idempotent methods instead of the default POST.
- enum IdempotencyLevel {
- IDEMPOTENCY_UNKNOWN = 0;
- NO_SIDE_EFFECTS = 1; // implies idempotent
- IDEMPOTENT = 2; // idempotent, but may have side effects
- }
- optional IdempotencyLevel idempotency_level =
- 34 [default=IDEMPOTENCY_UNKNOWN];
-
- // The parser stores options it doesn't recognize here. See above.
- repeated UninterpretedOption uninterpreted_option = 999;
-
- // Clients can define custom options in extensions of this message. See above.
- extensions 1000 to max;
-}
-
-
-// A message representing a option the parser does not recognize. This only
-// appears in options protos created by the compiler::Parser class.
-// DescriptorPool resolves these when building Descriptor objects. Therefore,
-// options protos in descriptor objects (e.g. returned by Descriptor::options(),
-// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
-// in them.
-message UninterpretedOption {
- // The name of the uninterpreted option. Each string represents a segment in
- // a dot-separated name. is_extension is true iff a segment represents an
- // extension (denoted with parentheses in options specs in .proto files).
- // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
- // "foo.(bar.baz).qux".
- message NamePart {
- required string name_part = 1;
- required bool is_extension = 2;
- }
- repeated NamePart name = 2;
-
- // The value of the uninterpreted option, in whatever type the tokenizer
- // identified it as during parsing. Exactly one of these should be set.
- optional string identifier_value = 3;
- optional uint64 positive_int_value = 4;
- optional int64 negative_int_value = 5;
- optional double double_value = 6;
- optional bytes string_value = 7;
- optional string aggregate_value = 8;
-}
-
-// ===================================================================
-// Optional source code info
-
-// Encapsulates information about the original source file from which a
-// FileDescriptorProto was generated.
-message SourceCodeInfo {
- // A Location identifies a piece of source code in a .proto file which
- // corresponds to a particular definition. This information is intended
- // to be useful to IDEs, code indexers, documentation generators, and similar
- // tools.
- //
- // For example, say we have a file like:
- // message Foo {
- // optional string foo = 1;
- // }
- // Let's look at just the field definition:
- // optional string foo = 1;
- // ^ ^^ ^^ ^ ^^^
- // a bc de f ghi
- // We have the following locations:
- // span path represents
- // [a,i) [ 4, 0, 2, 0 ] The whole field definition.
- // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
- // [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
- // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
- // [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
- //
- // Notes:
- // - A location may refer to a repeated field itself (i.e. not to any
- // particular index within it). This is used whenever a set of elements are
- // logically enclosed in a single code segment. For example, an entire
- // extend block (possibly containing multiple extension definitions) will
- // have an outer location whose path refers to the "extensions" repeated
- // field without an index.
- // - Multiple locations may have the same path. This happens when a single
- // logical declaration is spread out across multiple places. The most
- // obvious example is the "extend" block again -- there may be multiple
- // extend blocks in the same scope, each of which will have the same path.
- // - A location's span is not always a subset of its parent's span. For
- // example, the "extendee" of an extension declaration appears at the
- // beginning of the "extend" block and is shared by all extensions within
- // the block.
- // - Just because a location's span is a subset of some other location's span
- // does not mean that it is a descendent. For example, a "group" defines
- // both a type and a field in a single declaration. Thus, the locations
- // corresponding to the type and field and their components will overlap.
- // - Code which tries to interpret locations should probably be designed to
- // ignore those that it doesn't understand, as more types of locations could
- // be recorded in the future.
- repeated Location location = 1;
- message Location {
- // Identifies which part of the FileDescriptorProto was defined at this
- // location.
- //
- // Each element is a field number or an index. They form a path from
- // the root FileDescriptorProto to the place where the definition. For
- // example, this path:
- // [ 4, 3, 2, 7, 1 ]
- // refers to:
- // file.message_type(3) // 4, 3
- // .field(7) // 2, 7
- // .name() // 1
- // This is because FileDescriptorProto.message_type has field number 4:
- // repeated DescriptorProto message_type = 4;
- // and DescriptorProto.field has field number 2:
- // repeated FieldDescriptorProto field = 2;
- // and FieldDescriptorProto.name has field number 1:
- // optional string name = 1;
- //
- // Thus, the above path gives the location of a field name. If we removed
- // the last element:
- // [ 4, 3, 2, 7 ]
- // this path refers to the whole field declaration (from the beginning
- // of the label to the terminating semicolon).
- repeated int32 path = 1 [packed=true];
-
- // Always has exactly three or four elements: start line, start column,
- // end line (optional, otherwise assumed same as start line), end column.
- // These are packed into a single field for efficiency. Note that line
- // and column numbers are zero-based -- typically you will want to add
- // 1 to each before displaying to a user.
- repeated int32 span = 2 [packed=true];
-
- // If this SourceCodeInfo represents a complete declaration, these are any
- // comments appearing before and after the declaration which appear to be
- // attached to the declaration.
- //
- // A series of line comments appearing on consecutive lines, with no other
- // tokens appearing on those lines, will be treated as a single comment.
- //
- // leading_detached_comments will keep paragraphs of comments that appear
- // before (but not connected to) the current element. Each paragraph,
- // separated by empty lines, will be one comment element in the repeated
- // field.
- //
- // Only the comment content is provided; comment markers (e.g. //) are
- // stripped out. For block comments, leading whitespace and an asterisk
- // will be stripped from the beginning of each line other than the first.
- // Newlines are included in the output.
- //
- // Examples:
- //
- // optional int32 foo = 1; // Comment attached to foo.
- // // Comment attached to bar.
- // optional int32 bar = 2;
- //
- // optional string baz = 3;
- // // Comment attached to baz.
- // // Another line attached to baz.
- //
- // // Comment attached to qux.
- // //
- // // Another line attached to qux.
- // optional double qux = 4;
- //
- // // Detached comment for corge. This is not leading or trailing comments
- // // to qux or corge because there are blank lines separating it from
- // // both.
- //
- // // Detached comment for corge paragraph 2.
- //
- // optional string corge = 5;
- // /* Block comment attached
- // * to corge. Leading asterisks
- // * will be removed. */
- // /* Block comment attached to
- // * grault. */
- // optional int32 grault = 6;
- //
- // // ignored detached comments.
- optional string leading_comments = 3;
- optional string trailing_comments = 4;
- repeated string leading_detached_comments = 6;
- }
-}
-
-// Describes the relationship between generated code and its original source
-// file. A GeneratedCodeInfo message is associated with only one generated
-// source file, but may contain references to different source .proto files.
-message GeneratedCodeInfo {
- // An Annotation connects some span of text in generated code to an element
- // of its generating .proto file.
- repeated Annotation annotation = 1;
- message Annotation {
- // Identifies the element in the original source .proto file. This field
- // is formatted the same as SourceCodeInfo.Location.path.
- repeated int32 path = 1 [packed=true];
-
- // Identifies the filesystem path to the original source .proto.
- optional string source_file = 2;
-
- // Identifies the starting offset in bytes in the generated code
- // that relates to the identified object.
- optional int32 begin = 3;
-
- // Identifies the ending offset in bytes in the generated code that
- // relates to the identified offset. The end offset should be one past
- // the last relevant byte (so the length of the text = end - begin).
- optional int32 end = 4;
- }
-}
diff --git a/MIDAS/src/hilsim/nanopb_generator/proto/nanopb.proto b/MIDAS/src/hilsim/nanopb_generator/proto/nanopb.proto
deleted file mode 100644
index 5e36eaa8..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/proto/nanopb.proto
+++ /dev/null
@@ -1,185 +0,0 @@
-// Custom options for defining:
-// - Maximum size of string/bytes
-// - Maximum number of elements in array
-//
-// These are used by nanopb to generate statically allocable structures
-// for memory-limited environments.
-
-syntax = "proto2";
-import "google/protobuf/descriptor.proto";
-
-option java_package = "fi.kapsi.koti.jpa.nanopb";
-
-enum FieldType {
- FT_DEFAULT = 0; // Automatically decide field type, generate static field if possible.
- FT_CALLBACK = 1; // Always generate a callback field.
- FT_POINTER = 4; // Always generate a dynamically allocated field.
- FT_STATIC = 2; // Generate a static field or raise an exception if not possible.
- FT_IGNORE = 3; // Ignore the field completely.
- FT_INLINE = 5; // Legacy option, use the separate 'fixed_length' option instead
-}
-
-enum IntSize {
- IS_DEFAULT = 0; // Default, 32/64bit based on type in .proto
- IS_8 = 8;
- IS_16 = 16;
- IS_32 = 32;
- IS_64 = 64;
-}
-
-enum TypenameMangling {
- M_NONE = 0; // Default, no typename mangling
- M_STRIP_PACKAGE = 1; // Strip current package name
- M_FLATTEN = 2; // Only use last path component
- M_PACKAGE_INITIALS = 3; // Replace the package name by the initials
-}
-
-enum DescriptorSize {
- DS_AUTO = 0; // Select minimal size based on field type
- DS_1 = 1; // 1 word; up to 15 byte fields, no arrays
- DS_2 = 2; // 2 words; up to 4095 byte fields, 4095 entry arrays
- DS_4 = 4; // 4 words; up to 2^32-1 byte fields, 2^16-1 entry arrays
- DS_8 = 8; // 8 words; up to 2^32-1 entry arrays
-}
-
-// This is the inner options message, which basically defines options for
-// a field. When it is used in message or file scope, it applies to all
-// fields.
-message NanoPBOptions {
- // Allocated size for 'bytes' and 'string' fields.
- // For string fields, this should include the space for null terminator.
- optional int32 max_size = 1;
-
- // Maximum length for 'string' fields. Setting this is equivalent
- // to setting max_size to a value of length+1.
- optional int32 max_length = 14;
-
- // Allocated number of entries in arrays ('repeated' fields)
- optional int32 max_count = 2;
-
- // Size of integer fields. Can save some memory if you don't need
- // full 32 bits for the value.
- optional IntSize int_size = 7 [default = IS_DEFAULT];
-
- // Force type of field (callback or static allocation)
- optional FieldType type = 3 [default = FT_DEFAULT];
-
- // Use long names for enums, i.e. EnumName_EnumValue.
- optional bool long_names = 4 [default = true];
-
- // Add 'packed' attribute to generated structs.
- // Note: this cannot be used on CPUs that break on unaligned
- // accesses to variables.
- optional bool packed_struct = 5 [default = false];
-
- // Add 'packed' attribute to generated enums.
- optional bool packed_enum = 10 [default = false];
-
- // Skip this message
- optional bool skip_message = 6 [default = false];
-
- // Generate oneof fields as normal optional fields instead of union.
- optional bool no_unions = 8 [default = false];
-
- // integer type tag for a message
- optional uint32 msgid = 9;
-
- // decode oneof as anonymous union
- optional bool anonymous_oneof = 11 [default = false];
-
- // Proto3 singular field does not generate a "has_" flag
- optional bool proto3 = 12 [default = false];
-
- // Force proto3 messages to have no "has_" flag.
- // This was default behavior until nanopb-0.4.0.
- optional bool proto3_singular_msgs = 21 [default = false];
-
- // Generate an enum->string mapping function (can take up lots of space).
- optional bool enum_to_string = 13 [default = false];
-
- // Generate bytes arrays with fixed length
- optional bool fixed_length = 15 [default = false];
-
- // Generate repeated field with fixed count
- optional bool fixed_count = 16 [default = false];
-
- // Generate message-level callback that is called before decoding submessages.
- // This can be used to set callback fields for submsgs inside oneofs.
- optional bool submsg_callback = 22 [default = false];
-
- // Shorten or remove package names from type names.
- // This option applies only on the file level.
- optional TypenameMangling mangle_names = 17 [default = M_NONE];
-
- // Data type for storage associated with callback fields.
- optional string callback_datatype = 18 [default = "pb_callback_t"];
-
- // Callback function used for encoding and decoding.
- // Prior to nanopb-0.4.0, the callback was specified in per-field pb_callback_t
- // structure. This is still supported, but does not work inside e.g. oneof or pointer
- // fields. Instead, a new method allows specifying a per-message callback that
- // will be called for all callback fields in a message type.
- optional string callback_function = 19 [default = "pb_default_field_callback"];
-
- // Select the size of field descriptors. This option has to be defined
- // for the whole message, not per-field. Usually automatic selection is
- // ok, but if it results in compilation errors you can increase the field
- // size here.
- optional DescriptorSize descriptorsize = 20 [default = DS_AUTO];
-
- // Set default value for has_ fields.
- optional bool default_has = 23 [default = false];
-
- // Extra files to include in generated `.pb.h`
- repeated string include = 24;
-
- // Automatic includes to exclude from generated `.pb.h`
- // Same as nanopb_generator.py command line flag -x.
- repeated string exclude = 26;
-
- // Package name that applies only for nanopb.
- optional string package = 25;
-
- // Override type of the field in generated C code. Only to be used with related field types
- optional google.protobuf.FieldDescriptorProto.Type type_override = 27;
-
- // Due to historical reasons, nanopb orders fields in structs by their tag number
- // instead of the order in .proto. Set this to false to keep the .proto order.
- // The default value will probably change to false in nanopb-0.5.0.
- optional bool sort_by_tag = 28 [default = true];
-
- // Set the FT_DEFAULT field conversion strategy.
- // A field that can become a static member of a c struct (e.g. int, bool, etc)
- // will be a a static field.
- // Fields with dynamic length are converted to either a pointer or a callback.
- optional FieldType fallback_type = 29 [default = FT_CALLBACK];
-}
-
-// Extensions to protoc 'Descriptor' type in order to define options
-// inside a .proto file.
-//
-// Protocol Buffers extension number registry
-// --------------------------------
-// Project: Nanopb
-// Contact: Petteri Aimonen
-// Web site: http://kapsi.fi/~jpa/nanopb
-// Extensions: 1010 (all types)
-// --------------------------------
-
-extend google.protobuf.FileOptions {
- optional NanoPBOptions nanopb_fileopt = 1010;
-}
-
-extend google.protobuf.MessageOptions {
- optional NanoPBOptions nanopb_msgopt = 1010;
-}
-
-extend google.protobuf.EnumOptions {
- optional NanoPBOptions nanopb_enumopt = 1010;
-}
-
-extend google.protobuf.FieldOptions {
- optional NanoPBOptions nanopb = 1010;
-}
-
-
diff --git a/MIDAS/src/hilsim/nanopb_generator/proto/nanopb_pb2.py b/MIDAS/src/hilsim/nanopb_generator/proto/nanopb_pb2.py
deleted file mode 100644
index 51193fdc..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/proto/nanopb_pb2.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: nanopb.proto
-"""Generated protocol buffer code."""
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf.internal import builder as _builder
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cnanopb.proto\x1a google/protobuf/descriptor.proto\"\xa4\x07\n\rNanoPBOptions\x12\x10\n\x08max_size\x18\x01 \x01(\x05\x12\x12\n\nmax_length\x18\x0e \x01(\x05\x12\x11\n\tmax_count\x18\x02 \x01(\x05\x12&\n\x08int_size\x18\x07 \x01(\x0e\x32\x08.IntSize:\nIS_DEFAULT\x12$\n\x04type\x18\x03 \x01(\x0e\x32\n.FieldType:\nFT_DEFAULT\x12\x18\n\nlong_names\x18\x04 \x01(\x08:\x04true\x12\x1c\n\rpacked_struct\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1a\n\x0bpacked_enum\x18\n \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0cskip_message\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tno_unions\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\r\n\x05msgid\x18\t \x01(\r\x12\x1e\n\x0f\x61nonymous_oneof\x18\x0b \x01(\x08:\x05\x66\x61lse\x12\x15\n\x06proto3\x18\x0c \x01(\x08:\x05\x66\x61lse\x12#\n\x14proto3_singular_msgs\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x65num_to_string\x18\r \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0c\x66ixed_length\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x1a\n\x0b\x66ixed_count\x18\x10 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0fsubmsg_callback\x18\x16 \x01(\x08:\x05\x66\x61lse\x12/\n\x0cmangle_names\x18\x11 \x01(\x0e\x32\x11.TypenameMangling:\x06M_NONE\x12(\n\x11\x63\x61llback_datatype\x18\x12 \x01(\t:\rpb_callback_t\x12\x34\n\x11\x63\x61llback_function\x18\x13 \x01(\t:\x19pb_default_field_callback\x12\x30\n\x0e\x64\x65scriptorsize\x18\x14 \x01(\x0e\x32\x0f.DescriptorSize:\x07\x44S_AUTO\x12\x1a\n\x0b\x64\x65\x66\x61ult_has\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07include\x18\x18 \x03(\t\x12\x0f\n\x07\x65xclude\x18\x1a \x03(\t\x12\x0f\n\x07package\x18\x19 \x01(\t\x12\x41\n\rtype_override\x18\x1b \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x19\n\x0bsort_by_tag\x18\x1c \x01(\x08:\x04true\x12.\n\rfallback_type\x18\x1d \x01(\x0e\x32\n.FieldType:\x0b\x46T_CALLBACK*i\n\tFieldType\x12\x0e\n\nFT_DEFAULT\x10\x00\x12\x0f\n\x0b\x46T_CALLBACK\x10\x01\x12\x0e\n\nFT_POINTER\x10\x04\x12\r\n\tFT_STATIC\x10\x02\x12\r\n\tFT_IGNORE\x10\x03\x12\r\n\tFT_INLINE\x10\x05*D\n\x07IntSize\x12\x0e\n\nIS_DEFAULT\x10\x00\x12\x08\n\x04IS_8\x10\x08\x12\t\n\x05IS_16\x10\x10\x12\t\n\x05IS_32\x10 \x12\t\n\x05IS_64\x10@*Z\n\x10TypenameMangling\x12\n\n\x06M_NONE\x10\x00\x12\x13\n\x0fM_STRIP_PACKAGE\x10\x01\x12\r\n\tM_FLATTEN\x10\x02\x12\x16\n\x12M_PACKAGE_INITIALS\x10\x03*E\n\x0e\x44\x65scriptorSize\x12\x0b\n\x07\x44S_AUTO\x10\x00\x12\x08\n\x04\x44S_1\x10\x01\x12\x08\n\x04\x44S_2\x10\x02\x12\x08\n\x04\x44S_4\x10\x04\x12\x08\n\x04\x44S_8\x10\x08:E\n\x0enanopb_fileopt\x12\x1c.google.protobuf.FileOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:G\n\rnanopb_msgopt\x12\x1f.google.protobuf.MessageOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:E\n\x0enanopb_enumopt\x12\x1c.google.protobuf.EnumOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:>\n\x06nanopb\x12\x1d.google.protobuf.FieldOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptionsB\x1a\n\x18\x66i.kapsi.koti.jpa.nanopb')
-
-_globals = globals()
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'nanopb_pb2', _globals)
-if _descriptor._USE_C_DESCRIPTORS == False:
- DESCRIPTOR._options = None
- DESCRIPTOR._serialized_options = b'\n\030fi.kapsi.koti.jpa.nanopb'
- _globals['_FIELDTYPE']._serialized_start=985
- _globals['_FIELDTYPE']._serialized_end=1090
- _globals['_INTSIZE']._serialized_start=1092
- _globals['_INTSIZE']._serialized_end=1160
- _globals['_TYPENAMEMANGLING']._serialized_start=1162
- _globals['_TYPENAMEMANGLING']._serialized_end=1252
- _globals['_DESCRIPTORSIZE']._serialized_start=1254
- _globals['_DESCRIPTORSIZE']._serialized_end=1323
- _globals['_NANOPBOPTIONS']._serialized_start=51
- _globals['_NANOPBOPTIONS']._serialized_end=983
-# @@protoc_insertion_point(module_scope)
diff --git a/MIDAS/src/hilsim/nanopb_generator/protoc b/MIDAS/src/hilsim/nanopb_generator/protoc
deleted file mode 100644
index c259702f..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/protoc
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python3
-# This file acts as a drop-in replacement of binary protoc.exe.
-# It will use either Python-based protoc from grpcio-tools package,
-# or if it is not available, protoc.exe from path if found.
-
-import sys
-import os
-import os.path
-
-# Depending on how this script is run, we may or may not have PEP366 package name
-# available for relative imports.
-if not __package__:
- from proto._utils import invoke_protoc
-else:
- from .proto._utils import invoke_protoc
-
-if __name__ == '__main__':
- # Get path of the directory where this script is stored.
- if getattr(sys, 'frozen', False):
- mypath = os.path.dirname(sys.executable) # For pyInstaller
- else:
- mypath = os.path.dirname(__file__)
-
- # Avoid recursive calls to self
- env_paths = os.environ["PATH"].split(os.pathsep)
- if mypath in env_paths:
- env_paths.remove(mypath)
- os.environ["PATH"] = os.pathsep.join(env_paths)
-
- # Add argument for finding the nanopb generator when using --nanopb_out=
- # argument to protoc.
- if os.path.isfile(os.path.join(mypath, "protoc-gen-nanopb.exe")):
- protoc_gen_nanopb = os.path.join(mypath, "protoc-gen-nanopb.exe")
- elif os.name == 'nt':
- protoc_gen_nanopb = os.path.join(mypath, "protoc-gen-nanopb.bat")
- else:
- protoc_gen_nanopb = os.path.join(mypath, "protoc-gen-nanopb")
-
- args = sys.argv[1:]
-
- if os.path.isfile(protoc_gen_nanopb):
- args = ['--plugin=protoc-gen-nanopb=%s' % protoc_gen_nanopb] + args
-
- status = invoke_protoc(['protoc'] + args)
- sys.exit(status)
diff --git a/MIDAS/src/hilsim/nanopb_generator/protoc-gen-nanopb b/MIDAS/src/hilsim/nanopb_generator/protoc-gen-nanopb
deleted file mode 100644
index 20a36c79..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/protoc-gen-nanopb
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python3
-# This file is used to invoke nanopb_generator.py as a plugin
-# to protoc on Linux and other *nix-style systems.
-# Use it like this:
-# protoc --plugin=protoc-gen-nanopb=..../protoc-gen-nanopb --nanopb_out=dir foo.proto
-
-from nanopb_generator import *
-
-if __name__ == '__main__':
- # Assume we are running as a plugin under protoc.
- main_plugin()
diff --git a/MIDAS/src/hilsim/nanopb_generator/protoc-gen-nanopb-py2 b/MIDAS/src/hilsim/nanopb_generator/protoc-gen-nanopb-py2
deleted file mode 100644
index e6427094..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/protoc-gen-nanopb-py2
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-
-# This file is used to invoke nanopb_generator.py2 as a plugin
-# to protoc on Linux and other *nix-style systems.
-#
-# The difference from protoc-gen-nanopb is that this executes with Python 2.
-#
-# Use it like this:
-# protoc --plugin=protoc-gen-nanopb=..../protoc-gen-nanopb-py2 --nanopb_out=dir foo.proto
-#
-# Note that if you use the binary package of nanopb, the protoc
-# path is already set up properly and there is no need to give
-# --plugin= on the command line.
-
-MYPATH=$(dirname "$0")
-exec "$MYPATH/nanopb_generator.py2" --protoc-plugin
diff --git a/MIDAS/src/hilsim/nanopb_generator/protoc-gen-nanopb.bat b/MIDAS/src/hilsim/nanopb_generator/protoc-gen-nanopb.bat
deleted file mode 100644
index fa5bdd2b..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/protoc-gen-nanopb.bat
+++ /dev/null
@@ -1,12 +0,0 @@
-@echo off
-:: This file is used to invoke nanopb_generator.py as a plugin
-:: to protoc on Windows.
-:: Use it like this:
-:: protoc --plugin=protoc-gen-nanopb=..../protoc-gen-nanopb.bat --nanopb_out=dir foo.proto
-::
-:: Note that if you use the binary package of nanopb, the protoc
-:: path is already set up properly and there is no need to give
-:: --plugin= on the command line.
-
-set mydir=%~dp0
-python "%mydir%\nanopb_generator.py" --protoc-plugin %*
diff --git a/MIDAS/src/hilsim/nanopb_generator/protoc.bat b/MIDAS/src/hilsim/nanopb_generator/protoc.bat
deleted file mode 100644
index 2538c94a..00000000
--- a/MIDAS/src/hilsim/nanopb_generator/protoc.bat
+++ /dev/null
@@ -1,9 +0,0 @@
-@echo off
-:: This file acts as a drop-in replacement of binary protoc.exe.
-:: It will use either Python-based protoc from grpcio-tools package,
-:: or if it is not available, protoc.exe from path if found.
-
-setLocal enableDelayedExpansion
-set mydir=%~dp0
-python "%mydir%\protoc" %*
-exit /b %ERRORLEVEL%
diff --git a/MIDAS/src/hilsim/pins.h b/MIDAS/src/hilsim/pins.h
new file mode 100644
index 00000000..e6dbc7a2
--- /dev/null
+++ b/MIDAS/src/hilsim/pins.h
@@ -0,0 +1,83 @@
+#pragma once
+
+// SPI sensor bus
+#define SPI_MISO 13
+#define SPI_MOSI 11
+#define SPI_SCK 12
+
+// barometer chip select
+#define MS5611_CS 14
+
+// gyro chip select
+#define LSM6DS3_CS 3
+
+// high g chip select
+#define KX134_CS 10
+
+// low g chip select
+#define ADXL355_CS 0
+
+// magnetometer chip select
+#define LIS3MDL_CS 9
+
+// orientation chip select, interrupt
+#define BNO086_CS 21
+#define BNO086_INT 47
+#define BNO086_RESET GpioAddress(1, 07)
+
+// voltage adc pin
+#define VOLTAGE_PIN 0
+
+// gps pins
+#define GNSS_I2C_LOCATION 0x3A
+#define GPS_RESET GpioAddress(2, 017)
+#define GPS_ENABLE 0
+
+// i2c bus pins
+#define I2C_SDA 18
+#define I2C_SCL 8
+
+// can pin
+#define CAN_CS 45
+
+// emmc pins
+#define EMMC_CLK 38
+#define EMMC_CMD 39
+#define EMMC_D0 44
+#define EMMC_D1 43
+#define EMMC_D2 2
+#define EMMC_D3 42
+
+// SD Pin(s)
+#define SD_CLK 5
+#define SD_CMD 4
+#define SD_D0 6
+
+// pyro pins
+#define PYRO_GLOBAL_ARM_PIN GpioAddress(0, 07)
+#define PYROA_ARM_PIN GpioAddress(0, 016)
+#define PYROA_FIRE_PIN GpioAddress(0, 017)
+#define PYROB_ARM_PIN GpioAddress(0, 014)
+#define PYROB_FIRE_PIN GpioAddress(0, 015)
+#define PYROC_ARM_PIN GpioAddress(0, 010)
+#define PYROC_FIRE_PIN GpioAddress(0, 011)
+#define PYROD_ARM_PIN GpioAddress(0, 012)
+#define PYROD_FIRE_PIN GpioAddress(0, 013)
+
+// Continuity Pins
+#define SENSE_PYRO 1
+#define SENSE_APOGEE 6
+#define SENSE_MAIN 7
+#define SENSE_MOTOR 4
+#define SENSE_AUX 5
+
+// Telemetry pins
+#define RFM96_CS 1
+#define RFM96_INT 7
+#define RFM96_RESET 15
+
+// LEDs
+#define LED_BLUE GpioAddress(2, 013)
+#define LED_GREEN GpioAddress(2, 014)
+#define LED_ORANGE GpioAddress(2, 015)
+#define LED_RED GpioAddress(2, 016)
diff --git a/MIDAS/src/hilsim/rocketstate.pb.c b/MIDAS/src/hilsim/rocketstate.pb.c
deleted file mode 100644
index 600e436b..00000000
--- a/MIDAS/src/hilsim/rocketstate.pb.c
+++ /dev/null
@@ -1,12 +0,0 @@
-/* Automatically generated nanopb constant definitions */
-/* Generated by nanopb-0.4.7 */
-
-#include "rocketstate.pb.h"
-#if PB_PROTO_HEADER_VERSION != 40
-#error Regenerate this file with the current version of nanopb generator.
-#endif
-
-PB_BIND(RocketState, RocketState, AUTO)
-
-
-
diff --git a/MIDAS/src/hilsim/rocketstate.pb.h b/MIDAS/src/hilsim/rocketstate.pb.h
deleted file mode 100644
index de76fd90..00000000
--- a/MIDAS/src/hilsim/rocketstate.pb.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/* Automatically generated nanopb header */
-/* Generated by nanopb-0.4.7 */
-
-#ifndef PB_ROCKETSTATE_PB_H_INCLUDED
-#define PB_ROCKETSTATE_PB_H_INCLUDED
-#include
-
-#if PB_PROTO_HEADER_VERSION != 40
-#error Regenerate this file with the current version of nanopb generator.
-#endif
-
-/* Struct definitions */
-typedef struct _RocketState {
- /* Get the state of the rocket */
- int32_t rocket_state;
-} RocketState;
-
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/* Initializer values for message structs */
-#define RocketState_init_default {0}
-#define RocketState_init_zero {0}
-
-/* Field tags (for use in manual encoding/decoding) */
-#define RocketState_rocket_state_tag 1
-
-/* Struct field encoding specification for nanopb */
-#define RocketState_FIELDLIST(X, a) \
-X(a, STATIC, REQUIRED, INT32, rocket_state, 1)
-#define RocketState_CALLBACK NULL
-#define RocketState_DEFAULT NULL
-
-extern const pb_msgdesc_t RocketState_msg;
-
-/* Defines for backwards compatibility with code written before nanopb-0.4.0 */
-#define RocketState_fields &RocketState_msg
-
-/* Maximum encoded size of messages (where known) */
-#define RocketState_size 11
-
-#ifdef __cplusplus
-} /* extern "C" */
-#endif
-
-#endif
diff --git a/MIDAS/src/hilsim/rocketstate.proto b/MIDAS/src/hilsim/rocketstate.proto
deleted file mode 100644
index 284540a3..00000000
--- a/MIDAS/src/hilsim/rocketstate.proto
+++ /dev/null
@@ -1,5 +0,0 @@
-
-message RocketState {
- // Get the state of the rocket
- required int32 rocket_state = 1;
-}
diff --git a/MIDAS/src/hilsim/rocketstate_pb2.py b/MIDAS/src/hilsim/rocketstate_pb2.py
deleted file mode 100644
index 3ca496c6..00000000
--- a/MIDAS/src/hilsim/rocketstate_pb2.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: rocketstate.proto
-"""Generated protocol buffer code."""
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf.internal import builder as _builder
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11rocketstate.proto\"#\n\x0bRocketState\x12\x14\n\x0crocket_state\x18\x01 \x02(\x05')
-
-_globals = globals()
-_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
-_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'rocketstate_pb2', _globals)
-if _descriptor._USE_C_DESCRIPTORS == False:
- DESCRIPTOR._options = None
- _globals['_ROCKETSTATE']._serialized_start=21
- _globals['_ROCKETSTATE']._serialized_end=56
-# @@protoc_insertion_point(module_scope)
diff --git a/MIDAS/src/hilsim/sensors.h b/MIDAS/src/hilsim/sensors.h
index 0e84c9ed..6ff3d7d1 100644
--- a/MIDAS/src/hilsim/sensors.h
+++ b/MIDAS/src/hilsim/sensors.h
@@ -2,87 +2,65 @@
#include "errors.h"
#include "sensor_data.h"
-#include "hardware/pins.h"
-
-/**
- * @struct LowG interface
- */
+#include "pins.h"
struct LowGSensor {
ErrorCode init();
LowGData read();
+ LowGData lowg;
};
-/**
- * @struct HighG interface
- */
struct HighGSensor {
ErrorCode init();
HighGData read();
+ HighGData highg;
};
-/**
- * @struct Magnetometer interface
- */
struct MagnetometerSensor {
ErrorCode init();
Magnetometer read();
+ Magnetometer mag;
};
-/**
- * @struct Barometer interface
- */
struct BarometerSensor {
ErrorCode init();
Barometer read();
+ Barometer barometer;
};
-/**
- * @struct LowGLSM interface
- */
struct LowGLSMSensor {
ErrorCode init();
LowGLSM read();
+ LowGLSM lowglsm;
};
-/**
- * @struct Continuity interface
- */
struct ContinuitySensor {
ErrorCode init();
Continuity read();
+ Continuity continuity;
};
-/**
- * @struct Voltage interface
- */
struct VoltageSensor {
ErrorCode init();
- Voltage read();
+ Voltage read() ;
+ Voltage voltage;
};
-/**
- * @struct BNO interface
- */
struct OrientationSensor {
- Orientation initial_orientation;
- uint8_t initial_flag;
ErrorCode init();
Orientation read();
+ Orientation initial_orientation;
+ uint8_t initial_flag;
+ Orientation orient;
};
-/**
- * @struct GPS interface
- */
struct GPSSensor {
ErrorCode init();
GPS read();
- bool is_leap = false;
+ GPS gps;
};
-/**
- * @struct Pyro interface
- */
struct Pyro {
ErrorCode init();
PyroState tick(FSMState fsm_state, Orientation orientation);
+ // We will have to set this separately
};
diff --git a/MIDAS/src/hilsim/sensors/Barometer.cpp b/MIDAS/src/hilsim/sensors/Barometer.cpp
deleted file mode 100644
index 28631093..00000000
--- a/MIDAS/src/hilsim/sensors/Barometer.cpp
+++ /dev/null
@@ -1,16 +0,0 @@
-#include "sensors.h"
-#include "../global_packet.h"
-/**
- * Initializes barometer, returns NoError
-*/
-ErrorCode BarometerSensor::init() {
- return ErrorCode::NoError;
-}
-
-/**
- * Reads the pressure and temperature from the MS5611
- * @return a barometer data packet for the thread to send to the data logger
-*/
-Barometer BarometerSensor::read() {
- return Barometer{global_packet.barometer_temperature,global_packet.barometer_pressure,global_packet.barometer_altitude};
-}
\ No newline at end of file
diff --git a/MIDAS/src/hilsim/sensors/Continuity.cpp b/MIDAS/src/hilsim/sensors/Continuity.cpp
deleted file mode 100644
index f18d0ab4..00000000
--- a/MIDAS/src/hilsim/sensors/Continuity.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-#include "sensors.h"
-#include "../global_packet.h"
-
-ErrorCode ContinuitySensor::init() {
- return ErrorCode::NoError;
-}
-
-Continuity ContinuitySensor::read() {
- return Continuity{};
-}
diff --git a/MIDAS/src/hilsim/sensors/GPSSensor.cpp b/MIDAS/src/hilsim/sensors/GPSSensor.cpp
deleted file mode 100644
index d5c17af5..00000000
--- a/MIDAS/src/hilsim/sensors/GPSSensor.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-#include "sensors.h"
-#include "../global_packet.h"
-
-ErrorCode GPSSensor::init() {
- return ErrorCode::NoError;
-}
-
-GPS GPSSensor::read() {
- return GPS{0, 0, 0.f, 0.f, 0, 0};
-}
diff --git a/MIDAS/src/hilsim/sensors/HighG.cpp b/MIDAS/src/hilsim/sensors/HighG.cpp
deleted file mode 100644
index 3b0ac9c4..00000000
--- a/MIDAS/src/hilsim/sensors/HighG.cpp
+++ /dev/null
@@ -1,17 +0,0 @@
-#include "sensors.h"
-#include "../global_packet.h"
-
-/**
- * Initializes the high G data sensor, returns ErrorCode::CANNOT_INIT_KX134_CS if cannot initialize
-*/
-ErrorCode HighGSensor::init() {
- return ErrorCode::NoError;
-}
-
-/**
- * Reads and returns the data from the sensor
- * @return a HighGData packet with current acceleration in all three axies
-*/
-HighGData HighGSensor::read() {
- return HighGData{global_packet.imu_high_ax,global_packet.imu_high_ay,global_packet.imu_high_az};
-}
\ No newline at end of file
diff --git a/MIDAS/src/hilsim/sensors/LowG.cpp b/MIDAS/src/hilsim/sensors/LowG.cpp
deleted file mode 100644
index 1e027464..00000000
--- a/MIDAS/src/hilsim/sensors/LowG.cpp
+++ /dev/null
@@ -1,12 +0,0 @@
-#include "sensors.h"
-#include "../global_packet.h"
-
-ErrorCode LowGSensor::init()
-{
- return ErrorCode::NoError;
-}
-
-LowGData LowGSensor::read()
-{
- return LowGData{global_packet.imu_low_ax,global_packet.imu_low_ay,global_packet.imu_low_az};
-}
\ No newline at end of file
diff --git a/MIDAS/src/hilsim/sensors/LowGLSM.cpp b/MIDAS/src/hilsim/sensors/LowGLSM.cpp
deleted file mode 100644
index 157374db..00000000
--- a/MIDAS/src/hilsim/sensors/LowGLSM.cpp
+++ /dev/null
@@ -1,13 +0,0 @@
-#include "sensors.h"
-#include "../global_packet.h"
-
-ErrorCode LowGLSMSensor::init() {
- return ErrorCode::NoError;
-}
-
-LowGLSM LowGLSMSensor::read() {
- return LowGLSM{
- global_packet.imu_low_lsm_ax,global_packet.imu_low_lsm_ay,global_packet.imu_low_lsm_az,
- global_packet.imu_low_lsm_gx,global_packet.imu_low_lsm_gy,global_packet.imu_low_lsm_gz,
- };
-}
\ No newline at end of file
diff --git a/MIDAS/src/hilsim/sensors/Magnetometer.cpp b/MIDAS/src/hilsim/sensors/Magnetometer.cpp
deleted file mode 100644
index 6c26fccf..00000000
--- a/MIDAS/src/hilsim/sensors/Magnetometer.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-#include "sensors.h"
-#include "../global_packet.h"
-
-ErrorCode MagnetometerSensor::init() {
- return ErrorCode::NoError;
-}
-
-Magnetometer MagnetometerSensor::read() {
- return Magnetometer{global_packet.mag_x,global_packet.mag_y,global_packet.mag_z};
-}
\ No newline at end of file
diff --git a/MIDAS/src/hilsim/sensors/Orientation.cpp b/MIDAS/src/hilsim/sensors/Orientation.cpp
deleted file mode 100644
index 1ca3b029..00000000
--- a/MIDAS/src/hilsim/sensors/Orientation.cpp
+++ /dev/null
@@ -1,31 +0,0 @@
-#include "sensors.h"
-#include "../global_packet.h"
-// #include sensor library
-
-// global static instance of the sensor
-
-
-ErrorCode OrientationSensor::init() {
- // do whatever steps to initialize the sensor
- // if it errors, return the relevant error code
- return ErrorCode::NoError;
-}
-
-Orientation OrientationSensor::read() {
- // read from aforementioned global instance of sensor
- Velocity ang_vel = Velocity{
- .vx = global_packet.ornt_rollv,
- .vy = global_packet.ornt_pitchv,
- .vz = global_packet.ornt_yawv};
- Acceleration ang_accel = Acceleration{global_packet.ornt_rolla,global_packet.ornt_pitcha,global_packet.ornt_yawa};
- Magnetometer mag = Magnetometer{global_packet.ornt_mx,global_packet.ornt_my,global_packet.ornt_mz};
- Acceleration lin_accel = Acceleration{global_packet.ornt_ax,global_packet.ornt_ay,global_packet.ornt_az};
-
- return Orientation{
- false, global_packet.ornt_yaw,global_packet.ornt_pitch,global_packet.ornt_rollv,
- ang_vel, ang_accel, lin_accel,
- global_packet.ornt_gx,global_packet.ornt_gy,global_packet.ornt_gz,
- mag,
- global_packet.ornt_temp, global_packet.barometer_pressure
- };
-}
diff --git a/MIDAS/src/hilsim/sensors/Pyro.cpp b/MIDAS/src/hilsim/sensors/Pyro.cpp
deleted file mode 100644
index 1134df08..00000000
--- a/MIDAS/src/hilsim/sensors/Pyro.cpp
+++ /dev/null
@@ -1,11 +0,0 @@
-#include "sensors.h"
-#include "../global_packet.h"
-
-ErrorCode Pyro::init() {
- return ErrorCode::NoError;
-}
-
-PyroState Pyro::tick(FSMState fsm_state, Orientation orientation) {
- return PyroState();
- //tick
-} // No new line for rhbog >:(
\ No newline at end of file
diff --git a/MIDAS/src/hilsim/sensors/Voltage.cpp b/MIDAS/src/hilsim/sensors/Voltage.cpp
deleted file mode 100644
index ccb3ce7b..00000000
--- a/MIDAS/src/hilsim/sensors/Voltage.cpp
+++ /dev/null
@@ -1,16 +0,0 @@
-#include "sensors.h"
-
-/**
- * "Initializes" the voltage sensor. Since it reads directly from a pin without a library, there is no specific initialization.
-*/
-ErrorCode VoltageSensor::init() {
- return ErrorCode::NoError;
-}
-
-/**
- * Reads the value of the given analog pin and converts it to a battery voltage with the assumption that the voltage sensor is plugged into that pin
- * \return The scaled voltage given by the voltage sensor
-*/
-Voltage VoltageSensor::read() {
- return Voltage{};
-}
diff --git a/MIDAS/src/hilsim/sensors/sensors.h b/MIDAS/src/hilsim/sensors/sensors.h
deleted file mode 100644
index 174f4e5d..00000000
--- a/MIDAS/src/hilsim/sensors/sensors.h
+++ /dev/null
@@ -1,54 +0,0 @@
-#pragma once
-
-#include "errors.h"
-#include "sensor_data.h"
-
-struct LowGSensor {
- ErrorCode init();
- LowGData read();
-};
-
-struct HighGSensor {
- ErrorCode init();
- HighGData read();
-};
-
-struct MagnetometerSensor {
- ErrorCode init();
- Magnetometer read();
-};
-
-struct BarometerSensor {
- ErrorCode init();
- Barometer read();
-};
-
-struct LowGLSMSensor {
- ErrorCode init();
- LowGLSM read();
-};
-
-struct ContinuitySensor {
- ErrorCode init();
- Continuity read();
-};
-
-struct VoltageSensor {
- ErrorCode init();
- Voltage read();
-};
-
-struct OrientationSensor {
- ErrorCode init();
- Orientation read();
-};
-
-struct GPSSensor {
- ErrorCode init();
- GPS read();
-};
-
-struct Pyro {
- ErrorCode init();
- PyroState tick(FSMState fsm_state, Orientation orientation);
-};
diff --git a/MIDAS/src/hilsim/telemetry_backend.h b/MIDAS/src/hilsim/telemetry_backend.h
index 9540a427..92c4d5d8 100644
--- a/MIDAS/src/hilsim/telemetry_backend.h
+++ b/MIDAS/src/hilsim/telemetry_backend.h
@@ -21,7 +21,8 @@ class TelemetryBackend {
}
template
- bool read(T* write) {
+ bool read(T* write, int wait_milliseconds) {
+ // We might still just want to transmit our telemetry in the future
return false;
}
diff --git a/MIDAS/src/log_checksum.h b/MIDAS/src/log_checksum.h
new file mode 100644
index 00000000..ed42c457
--- /dev/null
+++ b/MIDAS/src/log_checksum.h
@@ -0,0 +1,2 @@
+// autogenerated on build by applying crc32 on the concatenation of log_format.h and sensor_data.h
+#define LOG_CHECKSUM (0xf3626b6a)
diff --git a/MIDAS/src/systems.cpp b/MIDAS/src/systems.cpp
index eaebefea..030eb78d 100644
--- a/MIDAS/src/systems.cpp
+++ b/MIDAS/src/systems.cpp
@@ -181,11 +181,117 @@ DECLARE_THREAD(telemetry, RocketSystems* arg) {
}
}
- } else {
- THREAD_SLEEP(1);
}
+ THREAD_SLEEP(1);
+ }
+}
+
+#ifdef HILSIM
+DECLARE_THREAD(hilsim, RocketSystems* arg) {
+ int n = 0;
+ // Debug kamaji output to verify if we're reading the correct packets
+
+ while (true) {
+ while (!Serial.available()) { taskYIELD(); }
+ int tag = Serial.read();
+
+ if (tag == 1) {
+ // LowGData: ax, ay, az
+ Serial.readBytes(reinterpret_cast(&(arg->sensors.low_g.lowg)), sizeof(LowGData));
+ // arg->rocket_data.low_g.update(lowgdata);
+ // Serial.print("LowG");
+ }
+ else if (tag == 2) {
+ // HighGData: ax, ay, az
+ HighGData highgdata;
+ Serial.readBytes(reinterpret_cast(&(arg->sensors.high_g.highg)), sizeof(HighGData));
+ // arg->rocket_data.high_g.update(highgdata);
+ // Serial.print("HighG");
+ }
+ else if (tag == 9) {
+ // LowGLSM: gx, gy, gz, ax, ay, az
+ LowGLSM lowglsm;
+ Serial.readBytes(reinterpret_cast(&(arg->sensors.low_g_lsm.lowglsm)), sizeof(LowGLSM));
+ // arg->rocket_data.low_g_lsm.update(lowglsm);
+ // Serial.print("LowGLSM");
+ }
+ else if (tag == 3) {
+ // Barometer: temperature, pressure, altitude
+ Barometer barometer;
+ Serial.readBytes(reinterpret_cast(&(arg->sensors.barometer.barometer)), sizeof(Barometer));
+ // arg->rocket_data.barometer.update(barometer);
+ // Serial.print("BArometer");
+ }
+ else if (tag == 4) {
+ // Continuity: sense_pyro and pin continuity data
+ Continuity continuity;
+ Serial.readBytes(reinterpret_cast(&(arg->sensors.continuity.continuity)), sizeof(Continuity));
+ // arg->rocket_data.continuity.update(continuity);
+ // Serial.print("Continuity");
+ }
+ else if (tag == 5) {
+ // Voltage: single float value
+ Voltage voltage;
+ Serial.readBytes(reinterpret_cast(&(arg->sensors.voltage.voltage)), sizeof(Voltage));
+ // arg->rocket_data.voltage.update(voltage);
+ // Serial.print("Voltage");
+ }
+ else if (tag == 6) {
+ // GPS: latitude, longitude, altitude, speed, satellite_count, timestamp
+ GPS gps;
+ Serial.readBytes(reinterpret_cast(&(arg->sensors.gps.gps)), sizeof(GPS));
+ // arg->rocket_data.gps.update(gps);
+ // Serial.print("GPS");
+ }
+ else if (tag == 7) {
+ // Magnetometer: mx, my, mz
+ Magnetometer magnetometer;
+ Serial.readBytes(reinterpret_cast(&(arg->sensors.magnetometer.mag)), sizeof(Magnetometer));
+ // arg->rocket_data.magnetometer.update(magnetometer);
+ // Serial.print("Magnetometer");
+ }
+ else if (tag == 8) {
+ // Orientation: yaw, pitch, roll, etc.
+ Orientation orientation;
+ Serial.readBytes(reinterpret_cast(&(arg->sensors.orientation.orient)), sizeof(Orientation));
+ // arg->rocket_data.orientation.update(orientation);
+ // Serial.print("Orientation");
+ }
+ else if (tag == 10) {
+ FSMState fsm_state;
+ Serial.readBytes(reinterpret_cast(&(fsm_state)), sizeof(FSMState));
+ // Serial.print("FSM state");
+ // We should ignore fsm state lol
+ }
+ else if (tag == 11) {
+ // KalmanData: position, velocity, acceleration, altitude
+ KalmanData kalman_data;
+ Serial.readBytes(reinterpret_cast(&(kalman_data)), sizeof(KalmanData));
+ // arg->rocket_data.kalman_data.update(kalman_data);
+
+ // Serial.print("kf data"); // We also ignore kf data
+ }
+ else if (tag == 12) {
+ // PyroState: global armed state and channel data
+ PyroState pyro_state;
+ Serial.readBytes(reinterpret_cast(&(pyro_state)), sizeof(PyroState));
+ // arg->rocket_data.pyro_state.update(pyro_state);
+ }
+ else {
+ // Unknown tag, handle error
+ // Serial.print("Error: Unknown tag received!");
+ // Serial.print(tag);
+ }
+
+ // Serial.println("Read line");
+ // Print fsm state
+ Serial.write(arg->rocket_data.fsm_state.getRecentUnsync());
+ PyroState data = arg->rocket_data.pyro.getRecentUnsync();
+ Serial.write((char*) &data, sizeof(PyroState));
+ Serial.flush();
}
}
+#endif
#define INIT_SYSTEM(s) do { ErrorCode code = (s).init(); if (code != NoError) { return code; } } while (0)
@@ -211,6 +317,7 @@ ErrorCode init_systems(RocketSystems& systems) {
INIT_SYSTEM(systems.buzzer);
INIT_SYSTEM(systems.tlm);
INIT_SYSTEM(systems.sensors.gps);
+ THREAD_SLEEP(1000);
gpioDigitalWrite(LED_ORANGE, LOW);
return NoError;
}
@@ -222,7 +329,7 @@ ErrorCode init_systems(RocketSystems& systems) {
* If initialization fails, then this enters an infinite loop.
*/
[[noreturn]] void begin_systems(RocketSystems* config) {
- Serial.println("Starting Systems...");
+ // Serial.println("Starting Systems...");
ErrorCode init_error_code = init_systems(*config);
if (init_error_code != NoError) {
// todo some message probably
@@ -248,14 +355,12 @@ ErrorCode init_systems(RocketSystems& systems) {
START_THREAD(fsm, SENSOR_CORE, config, 8);
START_THREAD(buzzer, SENSOR_CORE, config, 6);
START_THREAD(telemetry, SENSOR_CORE, config, 15);
-
+ START_THREAD(hilsim, DATA_CORE, config, 15);
config->buzzer.play_tune(free_bird, FREE_BIRD_LENGTH);
+
while (true) {
THREAD_SLEEP(1000);
- Serial.print("Running (Log Latency: ");
- Serial.print(config->rocket_data.log_latency.getLatency());
- Serial.println(")");
}
}
diff --git a/MIDAS/src/systems.h b/MIDAS/src/systems.h
index 03e11f02..b34c520f 100644
--- a/MIDAS/src/systems.h
+++ b/MIDAS/src/systems.h
@@ -16,6 +16,7 @@
#elif defined(HILSIM)
#include "TCAL9539.h"
#include "hilsim/sensors.h"
+#include "hilsim/pins.h"
#else
#include "hardware/sensors.h"
#endif