diff --git a/Data/Migrations/20251209203710_RemoveUnusedImageDataFields.Designer.cs b/Data/Migrations/20251209203710_RemoveUnusedImageDataFields.Designer.cs
new file mode 100644
index 0000000..29b6018
--- /dev/null
+++ b/Data/Migrations/20251209203710_RemoveUnusedImageDataFields.Designer.cs
@@ -0,0 +1,552 @@
+//
+using System;
+using System.Text.Json;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.EntityFrameworkCore.Migrations;
+using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
+using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
+using SatOps.Data;
+
+#nullable disable
+
+namespace SatOps.Data.migrations
+{
+ [DbContext(typeof(SatOpsDbContext))]
+ [Migration("20251209203710_RemoveUnusedImageDataFields")]
+ partial class RemoveUnusedImageDataFields
+ {
+ ///
+ protected override void BuildTargetModel(ModelBuilder modelBuilder)
+ {
+#pragma warning disable 612, 618
+ modelBuilder
+ .HasAnnotation("ProductVersion", "8.0.8")
+ .HasAnnotation("Relational:MaxIdentifierLength", 63);
+
+ NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
+
+ modelBuilder.Entity("SatOps.Modules.FlightPlan.FlightPlan", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id"));
+
+ b.Property("ApprovalDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("ApprovedById")
+ .HasColumnType("integer");
+
+ b.Property("Commands")
+ .IsRequired()
+ .HasColumnType("jsonb");
+
+ b.Property("CreatedAt")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("timestamp with time zone")
+ .HasDefaultValueSql("timezone('utc', now())");
+
+ b.Property("CreatedById")
+ .HasColumnType("integer");
+
+ b.Property("FailureReason")
+ .HasColumnType("text");
+
+ b.Property("GroundStationId")
+ .HasColumnType("integer");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("PreviousPlanId")
+ .HasColumnType("integer");
+
+ b.Property("SatelliteId")
+ .HasColumnType("integer");
+
+ b.Property("ScheduledAt")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Status")
+ .HasColumnType("integer");
+
+ b.Property("UpdatedAt")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("timestamp with time zone")
+ .HasDefaultValueSql("timezone('utc', now())");
+
+ b.HasKey("Id");
+
+ b.HasIndex("ApprovedById");
+
+ b.HasIndex("CreatedById");
+
+ b.HasIndex("GroundStationId");
+
+ b.HasIndex("PreviousPlanId");
+
+ b.HasIndex("SatelliteId");
+
+ b.HasIndex("Status");
+
+ b.ToTable("flight_plans", (string)null);
+ });
+
+ modelBuilder.Entity("SatOps.Modules.GroundStationLink.ImageData", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id"));
+
+ b.Property("CaptureTime")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("ContentType")
+ .IsRequired()
+ .HasMaxLength(100)
+ .HasColumnType("character varying(100)");
+
+ b.Property("FileName")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("FileSize")
+ .HasColumnType("bigint");
+
+ b.Property("FlightPlanId")
+ .HasColumnType("integer");
+
+ b.Property("GroundStationId")
+ .HasColumnType("integer");
+
+ b.Property("Latitude")
+ .HasPrecision(9, 6)
+ .HasColumnType("double precision");
+
+ b.Property("Longitude")
+ .HasPrecision(9, 6)
+ .HasColumnType("double precision");
+
+ b.Property("ReceivedAt")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("timestamp with time zone")
+ .HasDefaultValueSql("timezone('utc', now())");
+
+ b.Property("S3ObjectPath")
+ .IsRequired()
+ .HasMaxLength(500)
+ .HasColumnType("character varying(500)");
+
+ b.Property("SatelliteId")
+ .HasColumnType("integer");
+
+ b.HasKey("Id");
+
+ b.HasIndex("CaptureTime");
+
+ b.HasIndex("FlightPlanId");
+
+ b.HasIndex("GroundStationId");
+
+ b.HasIndex("SatelliteId");
+
+ b.HasIndex("Latitude", "Longitude");
+
+ b.ToTable("image_data", (string)null);
+ });
+
+ modelBuilder.Entity("SatOps.Modules.Groundstation.GroundStation", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id"));
+
+ b.Property("ApiKeyHash")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("ApplicationId")
+ .HasColumnType("uuid");
+
+ b.Property("CreatedAt")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("timestamp with time zone")
+ .HasDefaultValueSql("timezone('utc', now())");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("UpdatedAt")
+ .HasColumnType("timestamp with time zone");
+
+ b.HasKey("Id");
+
+ b.HasIndex("ApplicationId")
+ .IsUnique();
+
+ b.ToTable("ground_stations", (string)null);
+
+ b.HasData(
+ new
+ {
+ Id = 1,
+ ApiKeyHash = "",
+ ApplicationId = new Guid("250749f0-4728-4a87-8d94-a83df2bffe77"),
+ CreatedAt = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6090),
+ Name = "Aarhus",
+ UpdatedAt = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6090)
+ });
+ });
+
+ modelBuilder.Entity("SatOps.Modules.Overpass.Entity", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id"));
+
+ b.Property("DurationSeconds")
+ .HasColumnType("integer");
+
+ b.Property("EndAzimuth")
+ .HasColumnType("double precision");
+
+ b.Property("EndTime")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("FlightPlanId")
+ .HasColumnType("integer");
+
+ b.Property("GroundStationId")
+ .HasColumnType("integer");
+
+ b.Property("MaxElevation")
+ .HasColumnType("double precision");
+
+ b.Property("MaxElevationTime")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("SatelliteId")
+ .HasColumnType("integer");
+
+ b.Property("StartAzimuth")
+ .HasColumnType("double precision");
+
+ b.Property("StartTime")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("TleLine1")
+ .HasColumnType("text");
+
+ b.Property("TleLine2")
+ .HasColumnType("text");
+
+ b.Property("TleUpdateTime")
+ .HasColumnType("timestamp with time zone");
+
+ b.HasKey("Id");
+
+ b.HasIndex("FlightPlanId")
+ .IsUnique();
+
+ b.HasIndex("GroundStationId");
+
+ b.HasIndex("SatelliteId", "GroundStationId", "StartTime");
+
+ b.ToTable("overpasses", (string)null);
+ });
+
+ modelBuilder.Entity("SatOps.Modules.Satellite.Satellite", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id"));
+
+ b.Property("CreatedAt")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("timestamp with time zone")
+ .HasDefaultValueSql("timezone('utc', now())");
+
+ b.Property("LastUpdate")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("timestamp with time zone")
+ .HasDefaultValueSql("timezone('utc', now())");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("NoradId")
+ .HasColumnType("integer");
+
+ b.Property("Status")
+ .HasColumnType("integer");
+
+ b.Property("TleLine1")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.Property("TleLine2")
+ .IsRequired()
+ .HasColumnType("text");
+
+ b.HasKey("Id");
+
+ b.HasIndex("NoradId")
+ .IsUnique();
+
+ b.HasIndex("Status");
+
+ b.ToTable("satellites", (string)null);
+
+ b.HasData(
+ new
+ {
+ Id = 1,
+ CreatedAt = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260),
+ LastUpdate = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260),
+ Name = "International Space Station (ISS)",
+ NoradId = 25544,
+ Status = 0,
+ TleLine1 = "1 25544U 98067A 23256.90616898 .00020137 00000-0 35438-3 0 9992",
+ TleLine2 = "2 25544 51.6416 339.0970 0003835 48.3825 73.2709 15.50030022414673"
+ },
+ new
+ {
+ Id = 2,
+ CreatedAt = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260),
+ LastUpdate = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260),
+ Name = "SENTINEL-2C",
+ NoradId = 60989,
+ Status = 0,
+ TleLine1 = "1 60989U 24157A 25270.79510520 .00000303 00000-0 13232-3 0 9996",
+ TleLine2 = "2 60989 98.5675 344.4033 0001006 86.9003 273.2295 14.30815465 55465"
+ });
+ });
+
+ modelBuilder.Entity("SatOps.Modules.User.User", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("integer");
+
+ NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property("Id"));
+
+ b.Property("Auth0UserId")
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("CreatedAt")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("timestamp with time zone")
+ .HasDefaultValueSql("timezone('utc', now())");
+
+ b.Property("Email")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("Role")
+ .HasColumnType("integer");
+
+ b.Property("UpdatedAt")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("timestamp with time zone")
+ .HasDefaultValueSql("timezone('utc', now())");
+
+ b.HasKey("Id");
+
+ b.HasIndex("Email")
+ .IsUnique();
+
+ b.HasIndex("Role");
+
+ b.ToTable("users", (string)null);
+ });
+
+ modelBuilder.Entity("SatOps.Modules.FlightPlan.FlightPlan", b =>
+ {
+ b.HasOne("SatOps.Modules.User.User", "ApprovedBy")
+ .WithMany("ApprovedFlightPlans")
+ .HasForeignKey("ApprovedById")
+ .OnDelete(DeleteBehavior.SetNull);
+
+ b.HasOne("SatOps.Modules.User.User", "CreatedBy")
+ .WithMany("CreatedFlightPlans")
+ .HasForeignKey("CreatedById")
+ .OnDelete(DeleteBehavior.Restrict)
+ .IsRequired();
+
+ b.HasOne("SatOps.Modules.Groundstation.GroundStation", "GroundStation")
+ .WithMany("FlightPlans")
+ .HasForeignKey("GroundStationId")
+ .OnDelete(DeleteBehavior.SetNull);
+
+ b.HasOne("SatOps.Modules.FlightPlan.FlightPlan", "PreviousPlan")
+ .WithMany()
+ .HasForeignKey("PreviousPlanId")
+ .OnDelete(DeleteBehavior.SetNull);
+
+ b.HasOne("SatOps.Modules.Satellite.Satellite", "Satellite")
+ .WithMany("FlightPlans")
+ .HasForeignKey("SatelliteId")
+ .OnDelete(DeleteBehavior.Restrict)
+ .IsRequired();
+
+ b.Navigation("ApprovedBy");
+
+ b.Navigation("CreatedBy");
+
+ b.Navigation("GroundStation");
+
+ b.Navigation("PreviousPlan");
+
+ b.Navigation("Satellite");
+ });
+
+ modelBuilder.Entity("SatOps.Modules.GroundStationLink.ImageData", b =>
+ {
+ b.HasOne("SatOps.Modules.FlightPlan.FlightPlan", "FlightPlan")
+ .WithMany()
+ .HasForeignKey("FlightPlanId")
+ .OnDelete(DeleteBehavior.SetNull);
+
+ b.HasOne("SatOps.Modules.Groundstation.GroundStation", "GroundStation")
+ .WithMany("Images")
+ .HasForeignKey("GroundStationId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.HasOne("SatOps.Modules.Satellite.Satellite", "Satellite")
+ .WithMany("Images")
+ .HasForeignKey("SatelliteId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.Navigation("FlightPlan");
+
+ b.Navigation("GroundStation");
+
+ b.Navigation("Satellite");
+ });
+
+ modelBuilder.Entity("SatOps.Modules.Groundstation.GroundStation", b =>
+ {
+ b.OwnsOne("SatOps.Modules.Groundstation.Location", "Location", b1 =>
+ {
+ b1.Property("GroundStationId")
+ .HasColumnType("integer");
+
+ b1.Property("Altitude")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("double precision")
+ .HasDefaultValue(0.0)
+ .HasColumnName("altitude");
+
+ b1.Property("Latitude")
+ .HasColumnType("double precision")
+ .HasColumnName("latitude");
+
+ b1.Property("Longitude")
+ .HasColumnType("double precision")
+ .HasColumnName("longitude");
+
+ b1.HasKey("GroundStationId");
+
+ b1.ToTable("ground_stations");
+
+ b1.WithOwner()
+ .HasForeignKey("GroundStationId");
+
+ b1.HasData(
+ new
+ {
+ GroundStationId = 1,
+ Altitude = 62.0,
+ Latitude = 56.171972897990663,
+ Longitude = 10.191659216036516
+ });
+ });
+
+ b.Navigation("Location")
+ .IsRequired();
+ });
+
+ modelBuilder.Entity("SatOps.Modules.Overpass.Entity", b =>
+ {
+ b.HasOne("SatOps.Modules.FlightPlan.FlightPlan", "FlightPlan")
+ .WithOne("Overpass")
+ .HasForeignKey("SatOps.Modules.Overpass.Entity", "FlightPlanId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.HasOne("SatOps.Modules.Groundstation.GroundStation", "GroundStation")
+ .WithMany("Overpasses")
+ .HasForeignKey("GroundStationId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.HasOne("SatOps.Modules.Satellite.Satellite", "Satellite")
+ .WithMany("Overpasses")
+ .HasForeignKey("SatelliteId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+
+ b.Navigation("FlightPlan");
+
+ b.Navigation("GroundStation");
+
+ b.Navigation("Satellite");
+ });
+
+ modelBuilder.Entity("SatOps.Modules.FlightPlan.FlightPlan", b =>
+ {
+ b.Navigation("Overpass");
+ });
+
+ modelBuilder.Entity("SatOps.Modules.Groundstation.GroundStation", b =>
+ {
+ b.Navigation("FlightPlans");
+
+ b.Navigation("Images");
+
+ b.Navigation("Overpasses");
+ });
+
+ modelBuilder.Entity("SatOps.Modules.Satellite.Satellite", b =>
+ {
+ b.Navigation("FlightPlans");
+
+ b.Navigation("Images");
+
+ b.Navigation("Overpasses");
+ });
+
+ modelBuilder.Entity("SatOps.Modules.User.User", b =>
+ {
+ b.Navigation("ApprovedFlightPlans");
+
+ b.Navigation("CreatedFlightPlans");
+ });
+#pragma warning restore 612, 618
+ }
+ }
+}
diff --git a/Data/Migrations/20251209203710_RemoveUnusedImageDataFields.cs b/Data/Migrations/20251209203710_RemoveUnusedImageDataFields.cs
new file mode 100644
index 0000000..83c561f
--- /dev/null
+++ b/Data/Migrations/20251209203710_RemoveUnusedImageDataFields.cs
@@ -0,0 +1,91 @@
+using System;
+using Microsoft.EntityFrameworkCore.Migrations;
+
+#nullable disable
+
+namespace SatOps.Data.migrations
+{
+ ///
+ public partial class RemoveUnusedImageDataFields : Migration
+ {
+ ///
+ protected override void Up(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.DropColumn(
+ name: "ImageHeight",
+ table: "image_data");
+
+ migrationBuilder.DropColumn(
+ name: "ImageWidth",
+ table: "image_data");
+
+ migrationBuilder.DropColumn(
+ name: "Metadata",
+ table: "image_data");
+
+ migrationBuilder.UpdateData(
+ table: "ground_stations",
+ keyColumn: "Id",
+ keyValue: 1,
+ columns: new[] { "ApplicationId", "CreatedAt", "UpdatedAt" },
+ values: new object[] { new Guid("250749f0-4728-4a87-8d94-a83df2bffe77"), new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6090), new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6090) });
+
+ migrationBuilder.UpdateData(
+ table: "satellites",
+ keyColumn: "Id",
+ keyValue: 1,
+ columns: new[] { "CreatedAt", "LastUpdate" },
+ values: new object[] { new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260), new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260) });
+
+ migrationBuilder.UpdateData(
+ table: "satellites",
+ keyColumn: "Id",
+ keyValue: 2,
+ columns: new[] { "CreatedAt", "LastUpdate" },
+ values: new object[] { new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260), new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260) });
+ }
+
+ ///
+ protected override void Down(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.AddColumn(
+ name: "ImageHeight",
+ table: "image_data",
+ type: "integer",
+ nullable: true);
+
+ migrationBuilder.AddColumn(
+ name: "ImageWidth",
+ table: "image_data",
+ type: "integer",
+ nullable: true);
+
+ migrationBuilder.AddColumn(
+ name: "Metadata",
+ table: "image_data",
+ type: "jsonb",
+ nullable: true);
+
+ migrationBuilder.UpdateData(
+ table: "ground_stations",
+ keyColumn: "Id",
+ keyValue: 1,
+ columns: new[] { "ApplicationId", "CreatedAt", "UpdatedAt" },
+ values: new object[] { new Guid("c4bd6c61-8e11-43cf-868a-fdd0a4be81b2"), new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9380), new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9380) });
+
+ migrationBuilder.UpdateData(
+ table: "satellites",
+ keyColumn: "Id",
+ keyValue: 1,
+ columns: new[] { "CreatedAt", "LastUpdate" },
+ values: new object[] { new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9710), new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9710) });
+
+ migrationBuilder.UpdateData(
+ table: "satellites",
+ keyColumn: "Id",
+ keyValue: 2,
+ columns: new[] { "CreatedAt", "LastUpdate" },
+ values: new object[] { new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9710), new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9710) });
+ }
+ }
+}
diff --git a/Data/Migrations/SatOpsDbContextModelSnapshot.cs b/Data/Migrations/SatOpsDbContextModelSnapshot.cs
index 6fd0124..36b9a32 100644
--- a/Data/Migrations/SatOpsDbContextModelSnapshot.cs
+++ b/Data/Migrations/SatOpsDbContextModelSnapshot.cs
@@ -123,12 +123,6 @@ protected override void BuildModel(ModelBuilder modelBuilder)
b.Property("GroundStationId")
.HasColumnType("integer");
- b.Property("ImageHeight")
- .HasColumnType("integer");
-
- b.Property("ImageWidth")
- .HasColumnType("integer");
-
b.Property("Latitude")
.HasPrecision(9, 6)
.HasColumnType("double precision");
@@ -137,9 +131,6 @@ protected override void BuildModel(ModelBuilder modelBuilder)
.HasPrecision(9, 6)
.HasColumnType("double precision");
- b.Property("Metadata")
- .HasColumnType("jsonb");
-
b.Property("ReceivedAt")
.ValueGeneratedOnAdd()
.HasColumnType("timestamp with time zone")
@@ -207,10 +198,10 @@ protected override void BuildModel(ModelBuilder modelBuilder)
{
Id = 1,
ApiKeyHash = "",
- ApplicationId = new Guid("c4bd6c61-8e11-43cf-868a-fdd0a4be81b2"),
- CreatedAt = new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9380),
+ ApplicationId = new Guid("250749f0-4728-4a87-8d94-a83df2bffe77"),
+ CreatedAt = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6090),
Name = "Aarhus",
- UpdatedAt = new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9380)
+ UpdatedAt = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6090)
});
});
@@ -322,8 +313,8 @@ protected override void BuildModel(ModelBuilder modelBuilder)
new
{
Id = 1,
- CreatedAt = new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9710),
- LastUpdate = new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9710),
+ CreatedAt = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260),
+ LastUpdate = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260),
Name = "International Space Station (ISS)",
NoradId = 25544,
Status = 0,
@@ -333,8 +324,8 @@ protected override void BuildModel(ModelBuilder modelBuilder)
new
{
Id = 2,
- CreatedAt = new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9710),
- LastUpdate = new DateTime(2025, 12, 9, 14, 19, 32, 968, DateTimeKind.Utc).AddTicks(9710),
+ CreatedAt = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260),
+ LastUpdate = new DateTime(2025, 12, 9, 20, 37, 9, 846, DateTimeKind.Utc).AddTicks(6260),
Name = "SENTINEL-2C",
NoradId = 60989,
Status = 0,
diff --git a/Data/SatOpsDbContext.cs b/Data/SatOpsDbContext.cs
index 4a78152..33dc6cd 100644
--- a/Data/SatOpsDbContext.cs
+++ b/Data/SatOpsDbContext.cs
@@ -171,7 +171,6 @@ protected override void OnModelCreating(ModelBuilder modelBuilder)
entity.Property(e => e.ReceivedAt).HasDefaultValueSql("timezone('utc', now())");
entity.Property(e => e.Latitude).HasPrecision(9, 6);
entity.Property(e => e.Longitude).HasPrecision(9, 6);
- entity.Property(e => e.Metadata).HasColumnType("jsonb");
// --- Relationships ---
// Images are dependent data. Cascade deletes are appropriate.
diff --git a/Modules/FlightPlan/Command.cs b/Modules/FlightPlan/Command.cs
index fdfc3dd..f97ab45 100644
--- a/Modules/FlightPlan/Command.cs
+++ b/Modules/FlightPlan/Command.cs
@@ -3,6 +3,7 @@
using System.Text.Json.Serialization;
using SatOps.Modules.FlightPlan.Commands;
using SatOps.Configuration;
+using Microsoft.Extensions.Logging;
namespace SatOps.Modules.FlightPlan
{
@@ -203,11 +204,22 @@ public static (bool IsValid, List Errors) ValidateAll(this List
/// Calculates execution times for commands that require it (e.g., TriggerCaptureCommand).
/// This should be called before CompileAllToCsh() for flight plans containing such commands.
///
+ /// The list of commands to calculate execution times for.
+ /// The satellite to use for orbital calculations.
+ /// The imaging calculation service.
+ /// Configuration options for imaging calculations.
+ /// Optional list of blocked time windows (from other active flight plans) to avoid conflicts.
+ /// Time margin around blocked times to avoid conflicts. Defaults to 2 minutes.
+ /// Optional logger for diagnostic output.
+ /// Optional start time for searching. Defaults to UTC now.
public static async Task CalculateExecutionTimesAsync(
this List commands,
Satellite.Satellite satellite,
IImagingCalculation imagingCalculation,
ImagingCalculationOptions options,
+ List? blockedTimes = null,
+ TimeSpan? conflictMargin = null,
+ ILogger? logger = null,
DateTime? commandReceptionTime = null)
{
if (string.IsNullOrWhiteSpace(satellite.TleLine1) || string.IsNullOrWhiteSpace(satellite.TleLine2))
@@ -218,9 +230,20 @@ public static async Task CalculateExecutionTimesAsync(
var tle = new SGPdotNET.TLE.Tle(satellite.Name, satellite.TleLine1, satellite.TleLine2);
var sgp4Satellite = new SGPdotNET.Observation.Satellite(tle);
var receptionTime = commandReceptionTime ?? DateTime.UtcNow;
+ var margin = conflictMargin ?? TimeSpan.FromMinutes(2);
+ var maxRetries = 10; // Limit retries to avoid infinite loops
+
+ logger?.LogDebug("Starting execution time calculation for {CommandCount} commands on satellite '{SatelliteName}'",
+ commands.Count, satellite.Name);
+ logger?.LogDebug("Blocked times from other flight plans: {BlockedCount}", blockedTimes?.Count ?? 0);
+
+ // Collect execution times from this flight plan's commands to avoid internal conflicts
+ var usedTimes = new List();
+ var commandIndex = 0;
foreach (var command in commands)
{
+ commandIndex++;
if (command is TriggerCaptureCommand captureCommand && captureCommand.RequiresExecutionTimeCalculation)
{
if (captureCommand.CaptureLocation == null)
@@ -229,38 +252,126 @@ public static async Task CalculateExecutionTimesAsync(
"TriggerCaptureCommand requires CaptureLocation to calculate execution time.");
}
+ logger?.LogDebug("Processing command {Index}/{Total}: TRIGGER_CAPTURE at ({Lat:F4}, {Lon:F4})",
+ commandIndex, commands.Count,
+ captureCommand.CaptureLocation.Latitude,
+ captureCommand.CaptureLocation.Longitude);
+
// Create target coordinate
var targetCoordinate = new SGPdotNET.CoordinateSystem.GeodeticCoordinate(
SGPdotNET.Util.Angle.FromDegrees(captureCommand.CaptureLocation.Latitude),
SGPdotNET.Util.Angle.FromDegrees(captureCommand.CaptureLocation.Longitude),
0); // Ground level
-
var maxSearchDuration = TimeSpan.FromHours(options.MaxSearchDurationHours);
var minOffNadirDegrees = options.MaxOffNadirDegrees;
+ var searchStartTime = receptionTime;
+ var retryCount = 0;
+
+ ImagingCalculation.ImagingOpportunity? validOpportunity = null;
- var imagingOpportunity = await Task.Run(() =>
- imagingCalculation.FindBestImagingOpportunity(
- sgp4Satellite,
- targetCoordinate,
- receptionTime,
- maxSearchDuration
- )
- );
-
- // Check if the opportunity is within acceptable off-nadir angle
- if (imagingOpportunity.OffNadirDegrees > minOffNadirDegrees)
+ while (validOpportunity == null && retryCount < maxRetries)
{
+ var imagingOpportunity = await Task.Run(() =>
+ imagingCalculation.FindBestImagingOpportunity(
+ sgp4Satellite,
+ targetCoordinate,
+ searchStartTime,
+ maxSearchDuration
+ )
+ );
+
+ logger?.LogDebug("Found imaging opportunity at {Time:O} with off-nadir {OffNadir:F2}° (attempt {Attempt})",
+ imagingOpportunity.ImagingTime, imagingOpportunity.OffNadirDegrees, retryCount + 1);
+
+ // Check if the opportunity is within acceptable off-nadir angle
+ if (imagingOpportunity.OffNadirDegrees > minOffNadirDegrees)
+ {
+ logger?.LogWarning("Imaging opportunity rejected: off-nadir {OffNadir:F2}° exceeds limit of {Limit}°",
+ imagingOpportunity.OffNadirDegrees, minOffNadirDegrees);
+ throw new InvalidOperationException(
+ $"No imaging opportunity found within the off-nadir limit of {minOffNadirDegrees} degrees. " +
+ $"Best opportunity found was {imagingOpportunity.OffNadirDegrees:F2} degrees off-nadir at {imagingOpportunity.ImagingTime:yyyy-MM-dd HH:mm:ss} UTC. " +
+ $"Consider increasing MaxOffNadirDegrees or choosing a different target location.");
+ }
+
+ // Check for conflicts with blocked times from other flight plans
+ var hasConflict = false;
+ DateTime? conflictTime = null;
+ var conflictSource = "";
+
+ if (blockedTimes != null)
+ {
+ foreach (var blockedTime in blockedTimes)
+ {
+ var timeDiff = Math.Abs((imagingOpportunity.ImagingTime - blockedTime).TotalSeconds);
+ if (timeDiff < margin.TotalSeconds)
+ {
+ hasConflict = true;
+ conflictTime = blockedTime;
+ conflictSource = "another flight plan";
+ break;
+ }
+ }
+ }
+
+ // Also check for conflicts with other commands in this same flight plan
+ if (!hasConflict)
+ {
+ foreach (var usedTime in usedTimes)
+ {
+ var timeDiff = Math.Abs((imagingOpportunity.ImagingTime - usedTime).TotalSeconds);
+ if (timeDiff < margin.TotalSeconds)
+ {
+ hasConflict = true;
+ conflictTime = usedTime;
+ conflictSource = "this flight plan";
+ break;
+ }
+ }
+ }
+
+ if (hasConflict && conflictTime.HasValue)
+ {
+ logger?.LogInformation(
+ "Conflict detected at {OpportunityTime:O} with command from {Source} at {ConflictTime:O}. " +
+ "Searching for next opportunity after {NewStartTime:O}",
+ imagingOpportunity.ImagingTime, conflictSource, conflictTime.Value,
+ conflictTime.Value.Add(margin));
+
+ // Move search start time past the conflict and try again
+ searchStartTime = conflictTime.Value.Add(margin).Add(TimeSpan.FromSeconds(1));
+ retryCount++;
+ }
+ else
+ {
+ validOpportunity = imagingOpportunity;
+ }
+ }
+
+ if (validOpportunity == null)
+ {
+ logger?.LogError(
+ "Failed to find non-conflicting imaging opportunity for target at ({Lat:F4}, {Lon:F4}) after {Retries} attempts",
+ captureCommand.CaptureLocation.Latitude, captureCommand.CaptureLocation.Longitude, maxRetries);
throw new InvalidOperationException(
- $"No imaging opportunity found within the off-nadir limit of {minOffNadirDegrees} degrees. " +
- $"Best opportunity found was {imagingOpportunity.OffNadirDegrees:F2} degrees off-nadir at {imagingOpportunity.ImagingTime:yyyy-MM-dd HH:mm:ss} UTC. " +
- $"Consider increasing MaxOffNadirDegrees or choosing a different target location.");
+ $"Could not find a non-conflicting imaging opportunity for target at " +
+ $"({captureCommand.CaptureLocation.Latitude:F4}, {captureCommand.CaptureLocation.Longitude:F4}) " +
+ $"after {maxRetries} attempts. Consider rescheduling conflicting flight plans.");
}
- // Set the calculated execution time
- captureCommand.ExecutionTime = imagingOpportunity.ImagingTime;
+ // Set the calculated execution time and track it
+ captureCommand.ExecutionTime = validOpportunity.ImagingTime;
+ usedTimes.Add(validOpportunity.ImagingTime);
+
+ logger?.LogInformation(
+ "Scheduled TRIGGER_CAPTURE for ({Lat:F4}, {Lon:F4}) at {Time:O} with off-nadir {OffNadir:F2}°",
+ captureCommand.CaptureLocation.Latitude, captureCommand.CaptureLocation.Longitude,
+ validOpportunity.ImagingTime, validOpportunity.OffNadirDegrees);
}
}
+
+ logger?.LogDebug("Execution time calculation completed. Assigned {Count} execution times.", usedTimes.Count);
}
///
diff --git a/Modules/FlightPlan/Repository.cs b/Modules/FlightPlan/Repository.cs
index 04d3865..8f477c2 100644
--- a/Modules/FlightPlan/Repository.cs
+++ b/Modules/FlightPlan/Repository.cs
@@ -12,6 +12,7 @@ public interface IFlightPlanRepository
Task UpdateAsync(FlightPlan entity);
Task> GetPlansReadyForTransmissionAsync(DateTime horizon);
Task> GetActivePlansBySatelliteAsync(int satelliteId);
+ Task> GetTransmittedPlansBySatelliteAsync(int satelliteId);
}
public class FlightPlanRepository(SatOpsDbContext dbContext) : IFlightPlanRepository
@@ -67,5 +68,19 @@ public Task> GetActivePlansBySatelliteAsync(int satelliteId)
fp.Status == FlightPlanStatus.Transmitted))
.ToListAsync();
}
+
+ ///
+ /// Gets only transmitted flight plans for a satellite.
+ /// Used for execution time conflict checking - only transmitted plans have
+ /// finalized execution times that must be avoided.
+ ///
+ public Task> GetTransmittedPlansBySatelliteAsync(int satelliteId)
+ {
+ return dbContext.FlightPlans
+ .AsNoTracking()
+ .Where(fp => fp.SatelliteId == satelliteId &&
+ fp.Status == FlightPlanStatus.Transmitted)
+ .ToListAsync();
+ }
}
}
\ No newline at end of file
diff --git a/Modules/FlightPlan/Service.cs b/Modules/FlightPlan/Service.cs
index b8d0bb8..9b70808 100644
--- a/Modules/FlightPlan/Service.cs
+++ b/Modules/FlightPlan/Service.cs
@@ -31,7 +31,8 @@ public class FlightPlanService(
IOverpassService overpassService,
IImagingCalculation imagingCalculation,
ICurrentUserProvider currentUserProvider,
- IOptions imagingOptions
+ IOptions imagingOptions,
+ ILogger logger
) : IFlightPlanService
{
public Task> ListAsync() => repository.GetAllAsync();
@@ -208,52 +209,32 @@ public async Task CreateAsync(CreateFlightPlanDto createDto)
if (selectedOverpass == null)
return (false, $"No overpass found within {matchToleranceMinutes}-minute tolerance.");
- var currentPlanCommands = flightPlan.GetCommands();
-
- await currentPlanCommands.CalculateExecutionTimesAsync(satellite, imagingCalculation, imagingOptions.Value);
-
- flightPlan.SetCommands(currentPlanCommands);
-
- foreach (var cmd in currentPlanCommands)
- {
- if (cmd.ExecutionTime.HasValue)
- {
- if (cmd.ExecutionTime.Value <= selectedOverpass.EndTime)
- {
- return (false, $"Chronology Error: Command '{cmd.CommandType}' scheduled for {cmd.ExecutionTime.Value:O} occurs before or during the upload overpass (Ends: {selectedOverpass.EndTime:O}). Time travel is not supported.");
- }
- }
- }
-
+ // Check for overpass conflicts: prevent two ground stations from uploading to the same satellite
+ // at overlapping times. This prevents race conditions where nearby ground stations could
+ // both attempt to communicate with the satellite simultaneously.
+ var overpassConflictMargin = TimeSpan.FromMinutes(5);
var activePlans = await repository.GetActivePlansBySatelliteAsync(flightPlan.SatelliteId);
- var conflictMargin = TimeSpan.FromMinutes(2);
-
foreach (var activePlan in activePlans)
{
if (activePlan.Id == flightPlan.Id) continue;
+ if (!activePlan.ScheduledAt.HasValue) continue;
- var activeCommands = activePlan.GetCommands();
+ // Check if the scheduled transmission times overlap (with margin)
+ var timeDiff = Math.Abs((selectedOverpass.MaxElevationTime - activePlan.ScheduledAt.Value).TotalSeconds);
- foreach (var newCmd in currentPlanCommands)
+ if (timeDiff < overpassConflictMargin.TotalSeconds)
{
- if (!newCmd.ExecutionTime.HasValue) continue;
-
- foreach (var existingCmd in activeCommands)
- {
- if (!existingCmd.ExecutionTime.HasValue) continue;
-
- var timeDiff = Math.Abs((newCmd.ExecutionTime.Value - existingCmd.ExecutionTime.Value).TotalSeconds);
-
- if (timeDiff < conflictMargin.TotalSeconds)
- {
- return (false, $"Conflict Error: This plan conflicts with active Flight Plan #{activePlan.Id} ('{activePlan.Name}'). " +
- $"Command execution times overlap at {newCmd.ExecutionTime.Value:O} (Margin: {conflictMargin.TotalMinutes} min).");
- }
- }
+ return (false, $"Overpass Conflict: This overpass conflicts with Flight Plan #{activePlan.Id} ('{activePlan.Name}'). " +
+ $"Scheduled transmission times overlap at {selectedOverpass.MaxElevationTime:O} vs {activePlan.ScheduledAt.Value:O} " +
+ $"(Margin: {overpassConflictMargin.TotalMinutes} min). Choose a different overpass window.");
}
}
+ // Note: Execution time calculation and conflict checking for command overlap is deferred
+ // to transmission time in CompileFlightPlanToCshAsync. This avoids duplicate calculations
+ // and ensures we use the most up-to-date TLE data when the plan is actually transmitted.
+
var (success, overpassEntity, message) = await overpassService.FindOrCreateOverpassForFlightPlanAsync(
selectedOverpass,
flightPlan.Id,
@@ -289,6 +270,8 @@ public async Task CreateAsync(CreateFlightPlanDto createDto)
public async Task> CompileFlightPlanToCshAsync(int flightPlanId)
{
+ logger.LogInformation("Compiling flight plan {FlightPlanId} to CSH", flightPlanId);
+
var flightPlan = await repository.GetByIdAsync(flightPlanId);
if (flightPlan == null) throw new ArgumentException($"Flight plan with ID {flightPlanId} not found.");
@@ -299,16 +282,54 @@ public async Task> CompileFlightPlanToCshAsync(int flightPlanId)
var (isValid, errors) = commands.ValidateAll();
if (!isValid) throw new InvalidOperationException($"Cannot compile invalid flight plan. Errors: {string.Join("; ", errors)}");
+ // Collect blocked times from transmitted flight plans for this satellite.
+ // Only transmitted plans have finalized execution times - plans still in
+ // AssignedToOverpass status will calculate their own times when transmitted.
+ var conflictMargin = TimeSpan.FromMinutes(2);
+ var blockedTimes = new List();
+ var transmittedPlans = await repository.GetTransmittedPlansBySatelliteAsync(flightPlan.SatelliteId);
+
+ logger.LogDebug("Found {TransmittedPlanCount} transmitted flight plans for satellite {SatelliteId}",
+ transmittedPlans.Count, flightPlan.SatelliteId);
+
+ foreach (var transmittedPlan in transmittedPlans)
+ {
+ var transmittedCommands = transmittedPlan.GetCommands();
+ foreach (var cmd in transmittedCommands)
+ {
+ if (cmd.ExecutionTime.HasValue)
+ {
+ blockedTimes.Add(cmd.ExecutionTime.Value);
+ logger.LogDebug("Blocked time from transmitted flight plan {PlanId}: {Time:O}",
+ transmittedPlan.Id, cmd.ExecutionTime.Value);
+ }
+ }
+ }
+
+ logger.LogInformation("Calculating execution times for {CommandCount} commands with {BlockedCount} blocked time slots",
+ commands.Count, blockedTimes.Count);
+
try
{
- await commands.CalculateExecutionTimesAsync(satellite, imagingCalculation, imagingOptions.Value);
+ await commands.CalculateExecutionTimesAsync(
+ satellite,
+ imagingCalculation,
+ imagingOptions.Value,
+ blockedTimes,
+ conflictMargin,
+ logger);
}
catch (InvalidOperationException ex)
{
+ logger.LogError(ex, "Failed to calculate execution times for flight plan {FlightPlanId}", flightPlanId);
throw new InvalidOperationException($"Failed to calculate execution times: {ex.Message}", ex);
}
- return await commands.CompileAllToCsh();
+ var cshCommands = await commands.CompileAllToCsh();
+ logger.LogInformation("Successfully compiled flight plan {FlightPlanId} to {CshCommandCount} CSH commands",
+ flightPlanId, cshCommands.Count);
+
+ return cshCommands;
}
public async Task GetImagingOpportunity(int satelliteId, double targetLatitude, double targetLongitude, DateTime? commandReceptionTime = null)
diff --git a/Modules/GroundStationLink/Entity.cs b/Modules/GroundStationLink/Entity.cs
index 3eb5f43..4b10b61 100644
--- a/Modules/GroundStationLink/Entity.cs
+++ b/Modules/GroundStationLink/Entity.cs
@@ -26,8 +26,5 @@ public class ImageData
public DateTime ReceivedAt { get; set; }
public double? Latitude { get; set; }
public double? Longitude { get; set; }
- public int? ImageWidth { get; set; }
- public int? ImageHeight { get; set; }
- public string? Metadata { get; set; }
}
}
\ No newline at end of file
diff --git a/tests/SatOps.Tests/FlightPlan/FlightPlanServiceTests.cs b/tests/SatOps.Tests/FlightPlan/FlightPlanServiceTests.cs
index a7ef651..46b7b18 100644
--- a/tests/SatOps.Tests/FlightPlan/FlightPlanServiceTests.cs
+++ b/tests/SatOps.Tests/FlightPlan/FlightPlanServiceTests.cs
@@ -12,6 +12,7 @@
using SatOps.Configuration;
using SatOps.Modules.FlightPlan.Commands;
using SGPdotNET.CoordinateSystem;
+using Microsoft.Extensions.Logging;
namespace SatOps.Tests
{
@@ -24,6 +25,7 @@ public class FlightPlanServiceTests
private readonly Mock> _mockImagingOptions;
private readonly Mock _mockOverpassService;
private readonly Mock _mockImagingCalculation;
+ private readonly Mock> _mockLogger;
private readonly FlightPlanService _sut;
@@ -35,6 +37,7 @@ public FlightPlanServiceTests()
_mockOverpassService = new Mock();
_mockImagingCalculation = new Mock();
_mockCurrentUserProvider = new Mock();
+ _mockLogger = new Mock>();
_mockImagingOptions = new Mock>();
_mockImagingOptions.Setup(o => o.Value).Returns(new ImagingCalculationOptions());
@@ -46,7 +49,8 @@ public FlightPlanServiceTests()
_mockOverpassService.Object,
_mockImagingCalculation.Object,
_mockCurrentUserProvider.Object,
- _mockImagingOptions.Object
+ _mockImagingOptions.Object,
+ _mockLogger.Object
);
}
@@ -168,70 +172,9 @@ public async Task GetPlansReadyForTransmissionAsync_CallsRepositoryWithCorrectHo
}
[Fact]
- public async Task AssignOverpassAsync_WhenCommandExecutesDuringOverpass_ReturnsChronologyError()
+ public async Task AssignOverpassAsync_WhenConflictingOverpassExists_ReturnsConflictError()
{
- // Arrange (Time Travel Scenario)
- var planId = 1;
- var satId = 100;
- var gsId = 5;
-
- // FIX: Use valid TLE strings so SGP4 library doesn't crash
- var validTle1 = "1 25544U 98067A 23256.90616898 .00020137 00000-0 35438-3 0 9992";
- var validTle2 = "2 25544 51.6416 339.0970 0003835 48.3825 73.2709 15.50030022414673";
-
- // The Overpass (Upload Window) is 10:00 -> 10:10 (Future)
- var overpassStart = DateTime.UtcNow.AddHours(1);
- var overpassEnd = overpassStart.AddMinutes(10);
-
- // The Command executes at 10:05 (During upload -> Impossible/Time Travel)
- var commandExecutionTime = overpassStart.AddMinutes(5);
-
- var flightPlan = new FlightPlanEntity
- {
- Id = planId,
- Status = FlightPlanStatus.Approved,
- SatelliteId = satId,
- GroundStationId = gsId
- };
-
- var cmd = new TriggerCaptureCommand
- {
- CaptureLocation = new CaptureLocation { Latitude = 0, Longitude = 0 },
- CameraSettings = new CameraSettings { CameraId = "Cam1", Type = CameraType.VMB, NumImages = 1, Iso = 1, IntervalMicroseconds = 1, ObservationId = 1, PipelineId = 1 }
- };
- flightPlan.SetCommands([cmd]);
-
- // Mocks
- _mockFlightPlanRepo.Setup(r => r.GetByIdAsync(planId)).ReturnsAsync(flightPlan);
- _mockSatelliteService.Setup(s => s.GetAsync(satId)).ReturnsAsync(new SatelliteEntity { Id = satId, TleLine1 = validTle1, TleLine2 = validTle2 });
-
- _mockOverpassService.Setup(s => s.CalculateOverpassesAsync(It.IsAny()))
- .ReturnsAsync([
- new OverpassWindowDto {
- StartTime = overpassStart,
- EndTime = overpassEnd,
- MaxElevationTime = overpassStart.AddMinutes(5)
- }
- ]);
-
- _mockImagingCalculation.Setup(c => c.FindBestImagingOpportunity(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()))
- .Returns(new ImagingCalculation.ImagingOpportunity { ImagingTime = commandExecutionTime });
-
- var dto = new AssignOverpassDto { StartTime = overpassStart, EndTime = overpassEnd };
-
- // Act
- var (success, message) = await _sut.AssignOverpassAsync(planId, dto);
-
- // Assert
- success.Should().BeFalse();
- message.Should().Contain("Chronology Error");
- message.Should().Contain("occurs before or during the upload overpass");
- }
-
- [Fact]
- public async Task AssignOverpassAsync_WhenConflictingPlanExists_ReturnsConflictError()
- {
- // Arrange (Conflict Scenario)
+ // Arrange - Test overpass scheduling conflict (two ground stations trying to upload at same time)
var planId = 1;
var existingPlanId = 2;
var satId = 100;
@@ -239,9 +182,8 @@ public async Task AssignOverpassAsync_WhenConflictingPlanExists_ReturnsConflictE
var validTle1 = "1 25544U 98067A 23256.90616898 .00020137 00000-0 35438-3 0 9992";
var validTle2 = "2 25544 51.6416 339.0970 0003835 48.3825 73.2709 15.50030022414673";
- // FIX: Ensure start time is definitively in the future to pass "past time" validation
var baseTime = DateTime.UtcNow.AddHours(2);
- var executionTime = baseTime.AddHours(1);
+ var scheduledTime = baseTime.AddMinutes(5);
var flightPlan = new FlightPlanEntity
{
@@ -252,12 +194,16 @@ public async Task AssignOverpassAsync_WhenConflictingPlanExists_ReturnsConflictE
Name = "New Plan"
};
- var cmd1 = new TriggerPipelineCommand { Mode = 1, ExecutionTime = executionTime };
- flightPlan.SetCommands([cmd1]);
-
- var existingPlan = new FlightPlanEntity { Id = existingPlanId, Status = FlightPlanStatus.AssignedToOverpass, Name = "Existing Plan" };
- var cmd2 = new TriggerPipelineCommand { Mode = 2, ExecutionTime = executionTime };
- existingPlan.SetCommands([cmd2]);
+ // Existing plan already scheduled at the same time (overpass conflict)
+ var existingPlan = new FlightPlanEntity
+ {
+ Id = existingPlanId,
+ Status = FlightPlanStatus.AssignedToOverpass,
+ SatelliteId = satId,
+ GroundStationId = 2, // Different ground station
+ Name = "Existing Plan",
+ ScheduledAt = scheduledTime // Scheduled at same time!
+ };
// Mocks
_mockFlightPlanRepo.Setup(r => r.GetByIdAsync(planId)).ReturnsAsync(flightPlan);
@@ -265,7 +211,7 @@ public async Task AssignOverpassAsync_WhenConflictingPlanExists_ReturnsConflictE
_mockOverpassService.Setup(s => s.CalculateOverpassesAsync(It.IsAny()))
.ReturnsAsync([
- new OverpassWindowDto { StartTime = baseTime, EndTime = baseTime.AddMinutes(10), MaxElevationTime = baseTime.AddMinutes(5) }
+ new OverpassWindowDto { StartTime = baseTime, EndTime = baseTime.AddMinutes(10), MaxElevationTime = scheduledTime }
]);
_mockFlightPlanRepo.Setup(r => r.GetActivePlansBySatelliteAsync(satId))
@@ -278,7 +224,7 @@ public async Task AssignOverpassAsync_WhenConflictingPlanExists_ReturnsConflictE
// Assert
success.Should().BeFalse();
- message.Should().Contain("Conflict Error");
+ message.Should().Contain("Overpass Conflict");
message.Should().Contain("Existing Plan");
}
diff --git a/tests/SatOps.Tests/FlightPlan/FlightPlanServiceValidationTests.cs b/tests/SatOps.Tests/FlightPlan/FlightPlanServiceValidationTests.cs
index 0b7bf50..3e4483f 100644
--- a/tests/SatOps.Tests/FlightPlan/FlightPlanServiceValidationTests.cs
+++ b/tests/SatOps.Tests/FlightPlan/FlightPlanServiceValidationTests.cs
@@ -10,6 +10,7 @@
using SatOps.Modules.FlightPlan.Commands;
using Microsoft.Extensions.Options;
using SatOps.Configuration;
+using Microsoft.Extensions.Logging;
namespace SatOps.Tests.FlightPlan
{
@@ -29,6 +30,7 @@ public FlightPlanServiceValidationTests()
var mockOverpassService = new Mock();
var mockImagingCalculation = new Mock();
_mockCurrentUserProvider = new Mock();
+ var mockLogger = new Mock>();
var mockImagingOptions = new Mock>();
mockImagingOptions.Setup(o => o.Value).Returns(new ImagingCalculationOptions());
@@ -40,7 +42,8 @@ public FlightPlanServiceValidationTests()
mockOverpassService.Object,
mockImagingCalculation.Object,
_mockCurrentUserProvider.Object,
- mockImagingOptions.Object
+ mockImagingOptions.Object,
+ mockLogger.Object
);
_mockCurrentUserProvider.Setup(p => p.GetUserId()).Returns(1);
diff --git a/tests/performance/dummy-50mb.bin b/tests/performance/dummy-50mb.bin
new file mode 100644
index 0000000..22d1f9c
Binary files /dev/null and b/tests/performance/dummy-50mb.bin differ
diff --git a/tests/performance/general-api-performance-test.js b/tests/performance/general-api-performance-test.js
index 56f9948..4aa57e6 100644
--- a/tests/performance/general-api-performance-test.js
+++ b/tests/performance/general-api-performance-test.js
@@ -4,7 +4,7 @@ import { Trend, Rate } from 'k6/metrics';
// --- Configuration ---
// PASTE VALID JWT HERE
-const AUTH_TOKEN = 'MyTokenIsUpHereYouCreep';
+const AUTH_TOKEN = 'eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IlljcXZJZm9rQ2ZlZWMzVFpxTEdQOSJ9.eyJpc3MiOiJodHRwczovL2Rldi03Nmd2djhpMHp1N2NxYzNkLnVzLmF1dGgwLmNvbS8iLCJzdWIiOiJnb29nbGUtb2F1dGgyfDExMTY3MDc4ODY1NjA5MjY1MzYzNSIsImF1ZCI6WyJodHRwOi8vbG9jYWxob3N0OjUxMTEiLCJodHRwczovL2Rldi03Nmd2djhpMHp1N2NxYzNkLnVzLmF1dGgwLmNvbS91c2VyaW5mbyJdLCJpYXQiOjE3NjUzMTU3NDMsImV4cCI6MTc2NTQwMjE0Mywic2NvcGUiOiJvcGVuaWQgcHJvZmlsZSBlbWFpbCIsImF6cCI6ImRzNXB5ZE5OcjFrQlJaM3N5eHJLS1FIdkRqN09GYkJRIn0.flBJT2RLhsQAB27-K0Fooa2pI3WXgSrqb3IbN_Nz_ymyoWOKavGHJqFxpLeT6g3ezZVmaZBtgoNedrMQIu27v8Mbq7rYv54g0D4zkxjmW7sa8Jk9Rh2EgkG6LS_EhXmqRIisfxCmosbgF7-e6JR6GJa96I26dH5KXd68983-bB10tE8reEk6SVAQRNs45O-ncE3zzmM496u9SsHMxf8A-8paJhC7ItrFnIOKUXPqFVABQIzs42zrCkARS3MTZ43l05DGenaM_1wy77Qaf5uUkWnJq9l3EUEAwiVedpqTK9S43hgpuzPOeF7-x5kzOSf2f9mIAtmdzRtJ-fumf_zHwg';
const BASE_URL = 'http://localhost:5111';
diff --git a/tests/performance/overpass-performance-test.js b/tests/performance/overpass-performance-test.js
index 90f1bc3..058a652 100644
--- a/tests/performance/overpass-performance-test.js
+++ b/tests/performance/overpass-performance-test.js
@@ -4,7 +4,7 @@ import { Trend, Rate } from 'k6/metrics';
// --- Configuration ---
// PASTE VALID JWT HERE
-const AUTH_TOKEN = 'TOKEN_HERE_YES_VERY_NICE_TOKEN_HERE_YES_VERY_NICE_TOKEN_HERE_YES_VERY_NICE_TOKEN_HERE_YES_VERY_NICE_TOKEN_HERE_YES_VERY_NICE_';
+const AUTH_TOKEN = 'eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IlljcXZJZm9rQ2ZlZWMzVFpxTEdQOSJ9.eyJpc3MiOiJodHRwczovL2Rldi03Nmd2djhpMHp1N2NxYzNkLnVzLmF1dGgwLmNvbS8iLCJzdWIiOiJnb29nbGUtb2F1dGgyfDExMTY3MDc4ODY1NjA5MjY1MzYzNSIsImF1ZCI6WyJodHRwOi8vbG9jYWxob3N0OjUxMTEiLCJodHRwczovL2Rldi03Nmd2djhpMHp1N2NxYzNkLnVzLmF1dGgwLmNvbS91c2VyaW5mbyJdLCJpYXQiOjE3NjUzMTU3NDMsImV4cCI6MTc2NTQwMjE0Mywic2NvcGUiOiJvcGVuaWQgcHJvZmlsZSBlbWFpbCIsImF6cCI6ImRzNXB5ZE5OcjFrQlJaM3N5eHJLS1FIdkRqN09GYkJRIn0.flBJT2RLhsQAB27-K0Fooa2pI3WXgSrqb3IbN_Nz_ymyoWOKavGHJqFxpLeT6g3ezZVmaZBtgoNedrMQIu27v8Mbq7rYv54g0D4zkxjmW7sa8Jk9Rh2EgkG6LS_EhXmqRIisfxCmosbgF7-e6JR6GJa96I26dH5KXd68983-bB10tE8reEk6SVAQRNs45O-ncE3zzmM496u9SsHMxf8A-8paJhC7ItrFnIOKUXPqFVABQIzs42zrCkARS3MTZ43l05DGenaM_1wy77Qaf5uUkWnJq9l3EUEAwiVedpqTK9S43hgpuzPOeF7-x5kzOSf2f9mIAtmdzRtJ-fumf_zHwg';
const BASE_URL = 'http://localhost:5111';
const SATELLITE_ID = 2;
@@ -35,7 +35,17 @@ export const options = {
// --- Test Logic ---
// This is the main function that each virtual user will execute repeatedly.
export default function () {
- const url = `${BASE_URL}/api/v1/overpasses/satellite/${SATELLITE_ID}/groundstation/${GROUND_STATION_ID}`;
+ // Calculate 7-day window from current moment
+ const startTime = new Date();
+ const endTime = new Date(startTime);
+ endTime.setDate(endTime.getDate() + 7); // Add 7 days
+
+ // Format as ISO 8601 strings
+ const startTimeStr = startTime.toISOString();
+ const endTimeStr = endTime.toISOString();
+
+ // Build URL with query parameters
+ const url = `${BASE_URL}/api/v1/overpasses/satellite/${SATELLITE_ID}/groundstation/${GROUND_STATION_ID}?startTime=${encodeURIComponent(startTimeStr)}&endTime=${encodeURIComponent(endTimeStr)}`;
const params = {
headers: {
@@ -44,7 +54,6 @@ export default function () {
},
};
- // The controller defaults to a 7-day window
const res = http.get(url, params);
// 1. Check if the request was successful (HTTP 200)
diff --git a/tests/performance/upload-test.mjs b/tests/performance/upload-test.mjs
index 7e00dde..44cb998 100644
--- a/tests/performance/upload-test.mjs
+++ b/tests/performance/upload-test.mjs
@@ -4,8 +4,8 @@ import crypto from 'node:crypto';
const API_URL = 'http://localhost:5111';
-const APPLICATION_ID = '7db1e716-8c60-4ac8-bfcf-b7aa565907c1';
-const API_KEY = 'aY1SZM6LYZyzagn__dKIq5A-gLvI1oG58YqG89zewtI=';
+const APPLICATION_ID = '05ea40f1-e0c9-49ac-a92f-fc975a86dfc8';
+const API_KEY = '-X1GAmZfq8SBzqmVlDHjQbJ1WpyZ8dz9jWaS6U5yhEs=';
const SATELLITE_ID = 1;
const GROUND_STATION_ID = 1;
@@ -14,6 +14,9 @@ const GROUND_STATION_ID = 1;
const FILE_NAME = 'dummy-50mb.bin';
const FILE_SIZE_BYTES = 50 * 1024 * 1024; // 50 MiB
+const TEST_DURATION_MS = 30 * 1000; // 30 seconds
+const UPLOAD_THRESHOLD_MS = 3000; // 3 seconds - performance requirement
+
/**
* Creates a large dummy file for testing.
*/
@@ -68,38 +71,122 @@ async function getToken() {
}
/**
- * Uploads the test file to the API.
+ * Uploads the test file to the API and returns the duration in ms.
*/
-async function uploadFile(token) {
- console.log(`Step 2: Uploading ${FILE_NAME}... (this may take a moment)`);
- try {
- const fileContent = await fs.readFile(FILE_NAME);
- const fileBlob = new Blob([fileContent]);
-
- const formData = new FormData();
- formData.append('SatelliteId', SATELLITE_ID);
- formData.append('GroundStationId', GROUND_STATION_ID);
- formData.append('CaptureTime', new Date().toISOString());
- formData.append('ImageFile', fileBlob, FILE_NAME);
+async function uploadFile(token, fileContent, uploadNumber) {
+ const fileBlob = new Blob([fileContent]);
+
+ const formData = new FormData();
+ formData.append('SatelliteId', SATELLITE_ID);
+ formData.append('GroundStationId', GROUND_STATION_ID);
+ formData.append('CaptureTime', new Date().toISOString());
+ formData.append('ImageFile', fileBlob, FILE_NAME);
+
+ const startTime = performance.now();
+
+ const response = await fetch(`${API_URL}/api/v1/ground-station-link/images`, {
+ method: 'POST',
+ headers: {
+ 'Authorization': `Bearer ${token}`,
+ },
+ body: formData,
+ });
+
+ const endTime = performance.now();
+ const durationMs = endTime - startTime;
+
+ if (!response.ok) {
+ const errorText = await response.text();
+ throw new Error(`Upload #${uploadNumber} failed. Status: ${response.status}. Body: ${errorText}`);
+ }
- const response = await fetch(`${API_URL}/api/v1/ground-station-link/images`, {
- method: 'POST',
- headers: {
- 'Authorization': `Bearer ${token}`,
- },
- body: formData,
- });
+ return durationMs;
+}
- if (!response.ok) {
- const errorText = await response.text();
- throw new Error(`Upload failed. Status: ${response.status}. Body: ${errorText}`);
+/**
+ * Runs multiple uploads over a specified duration and reports statistics.
+ */
+async function runUploadTest(token) {
+ console.log(`\nStep 2: Running upload test for ${TEST_DURATION_MS / 1000} seconds...`);
+ console.log(`Performance requirement: Each upload should complete within ${UPLOAD_THRESHOLD_MS / 1000}s\n`);
+
+ // Pre-load file content once
+ const fileContent = await fs.readFile(FILE_NAME);
+
+ const durations = [];
+ const startTime = performance.now();
+ let uploadCount = 0;
+ let successCount = 0;
+ let failCount = 0;
+
+ while (performance.now() - startTime < TEST_DURATION_MS) {
+ uploadCount++;
+ const elapsedSec = ((performance.now() - startTime) / 1000).toFixed(1);
+ console.log(`[${elapsedSec}s] Upload #${uploadCount} starting...`);
+
+ try {
+ const durationMs = await uploadFile(token, fileContent, uploadCount);
+ durations.push(durationMs);
+
+ const durationSec = (durationMs / 1000).toFixed(2);
+ const passed = durationMs < UPLOAD_THRESHOLD_MS;
+
+ if (passed) {
+ successCount++;
+ console.log(` ✓ Upload #${uploadCount} completed in ${durationSec}s`);
+ } else {
+ failCount++;
+ console.log(` ⚠ Upload #${uploadCount} completed in ${durationSec}s (exceeded ${UPLOAD_THRESHOLD_MS / 1000}s threshold)`);
+ }
+ } catch (error) {
+ failCount++;
+ console.error(` ✗ Upload #${uploadCount} failed: ${error.message}`);
}
+ }
- const successText = await response.text();
- console.log('✓ Upload successful! The API responded with:');
- console.log(successText);
- } catch (error) {
- console.error(`✗ UPLOAD FAILED. Error: ${error.message}`);
+ // Calculate and display statistics
+ console.log('\n' + '='.repeat(60));
+ console.log('UPLOAD TEST RESULTS');
+ console.log('='.repeat(60));
+
+ if (durations.length === 0) {
+ console.log('No successful uploads completed.');
+ process.exit(1);
+ }
+
+ const totalDuration = durations.reduce((sum, d) => sum + d, 0);
+ const avgDuration = totalDuration / durations.length;
+ const minDuration = Math.min(...durations);
+ const maxDuration = Math.max(...durations);
+
+ // Calculate percentiles
+ const sortedDurations = [...durations].sort((a, b) => a - b);
+ const p50 = sortedDurations[Math.floor(sortedDurations.length * 0.5)];
+ const p95 = sortedDurations[Math.floor(sortedDurations.length * 0.95)];
+ const p99 = sortedDurations[Math.floor(sortedDurations.length * 0.99)];
+
+ const passedThreshold = durations.filter(d => d < UPLOAD_THRESHOLD_MS).length;
+
+ console.log(`\nTest Duration: ${TEST_DURATION_MS / 1000}s`);
+ console.log(`Total Uploads: ${uploadCount}`);
+ console.log(`Successful: ${durations.length}`);
+ console.log(`Failed: ${uploadCount - durations.length}`);
+ console.log(`\nUpload Duration Statistics:`);
+ console.log(` Average: ${(avgDuration / 1000).toFixed(2)}s`);
+ console.log(` Min: ${(minDuration / 1000).toFixed(2)}s`);
+ console.log(` Max: ${(maxDuration / 1000).toFixed(2)}s`);
+ console.log(` P50 (median): ${(p50 / 1000).toFixed(2)}s`);
+ console.log(` P95: ${(p95 / 1000).toFixed(2)}s`);
+ console.log(` P99: ${(p99 / 1000).toFixed(2)}s`);
+ console.log(`\nPerformance Threshold (${UPLOAD_THRESHOLD_MS / 1000}s):`);
+ console.log(` Passed: ${passedThreshold}/${durations.length} (${((passedThreshold / durations.length) * 100).toFixed(1)}%)`);
+
+ console.log('\n' + '='.repeat(60));
+
+ if (avgDuration < UPLOAD_THRESHOLD_MS) {
+ console.log(`✓ PASSED: Average upload time (${(avgDuration / 1000).toFixed(2)}s) is within ${UPLOAD_THRESHOLD_MS / 1000}s threshold`);
+ } else {
+ console.log(`✗ FAILED: Average upload time (${(avgDuration / 1000).toFixed(2)}s) exceeds ${UPLOAD_THRESHOLD_MS / 1000}s threshold`);
process.exit(1);
}
}
@@ -110,7 +197,7 @@ async function uploadFile(token) {
async function main() {
await createTestFile();
const token = await getToken();
- await uploadFile(token);
+ await runUploadTest(token);
}
main();
\ No newline at end of file
diff --git a/tests/performance/websocket-load-test/go.mod b/tests/performance/websocket-load-test/go.mod
new file mode 100644
index 0000000..0de11eb
--- /dev/null
+++ b/tests/performance/websocket-load-test/go.mod
@@ -0,0 +1,7 @@
+module websocket-load-test
+
+go 1.21
+
+require github.com/gorilla/websocket v1.5.1
+
+require golang.org/x/net v0.17.0 // indirect
diff --git a/tests/performance/websocket-load-test/go.sum b/tests/performance/websocket-load-test/go.sum
new file mode 100644
index 0000000..272772f
--- /dev/null
+++ b/tests/performance/websocket-load-test/go.sum
@@ -0,0 +1,4 @@
+github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
+github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
+golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
+golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
diff --git a/tests/performance/websocket-load-test/main.go b/tests/performance/websocket-load-test/main.go
new file mode 100644
index 0000000..b66f5b9
--- /dev/null
+++ b/tests/performance/websocket-load-test/main.go
@@ -0,0 +1,433 @@
+package main
+
+import (
+ "bytes"
+ "encoding/json"
+ "flag"
+ "fmt"
+ "io"
+ "log"
+ "net/http"
+ "os"
+ "os/signal"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "github.com/gorilla/websocket"
+)
+
+const (
+ defaultBaseURL = "http://localhost:5111"
+ defaultWsURL = "ws://localhost:5111"
+)
+
+// Configuration
+var (
+ baseURL string
+ wsURL string
+ numClients int
+ testDuration time.Duration
+ accessToken string
+ cleanupAfterTest bool
+ reportIntervalSec int
+)
+
+// Metrics
+var (
+ connectionsEstablished atomic.Int64
+ connectionsFailed atomic.Int64
+ connectionsActive atomic.Int64
+ messagesReceived atomic.Int64
+ errors atomic.Int64
+)
+
+// API Types
+type LocationDto struct {
+ Latitude float64 `json:"latitude"`
+ Longitude float64 `json:"longitude"`
+ Altitude float64 `json:"altitude"`
+}
+
+type GroundStationCreateDto struct {
+ Name string `json:"name"`
+ Location LocationDto `json:"location"`
+}
+
+type GroundStationWithApiKeyDto struct {
+ Id int `json:"id"`
+ Name string `json:"name"`
+ ApplicationId string `json:"applicationId"`
+ RawApiKey string `json:"rawApiKey"`
+ Location LocationDto `json:"location"`
+ CreatedAt time.Time `json:"createdAt"`
+}
+
+type TokenRequest struct {
+ ApplicationID string `json:"applicationId"`
+ ApiKey string `json:"apiKey"`
+}
+
+type TokenResponse struct {
+ AccessToken string `json:"accessToken"`
+}
+
+type WebSocketConnectMessage struct {
+ Type string `json:"type"`
+ Token string `json:"token"`
+}
+
+type GroundStationCredentials struct {
+ ID int
+ Name string
+ ApplicationID string
+ ApiKey string
+}
+
+func main() {
+ // Parse command line arguments
+ flag.StringVar(&baseURL, "base-url", defaultBaseURL, "Base URL of the SatOps API")
+ flag.StringVar(&wsURL, "ws-url", defaultWsURL, "WebSocket URL of the SatOps API")
+ flag.IntVar(&numClients, "clients", 10, "Number of concurrent WebSocket clients")
+ flag.DurationVar(&testDuration, "duration", 5*time.Minute, "Duration of the test")
+ flag.StringVar(&accessToken, "token", "", "Access token for creating ground stations (required)")
+ flag.BoolVar(&cleanupAfterTest, "cleanup", true, "Delete created ground stations after test")
+ flag.IntVar(&reportIntervalSec, "report-interval", 10, "Interval in seconds for status reports")
+ flag.Parse()
+
+ if accessToken == "" {
+ log.Fatal("Error: --token is required. Please provide an access token for the SatOps platform.")
+ }
+
+ log.Println("╔══════════════════════════════════════════════════════════════╗")
+ log.Println("║ SatOps WebSocket Load Test ║")
+ log.Println("╚══════════════════════════════════════════════════════════════╝")
+ log.Printf("Configuration:")
+ log.Printf(" • Base URL: %s", baseURL)
+ log.Printf(" • WebSocket URL: %s", wsURL)
+ log.Printf(" • Number of clients: %d", numClients)
+ log.Printf(" • Test duration: %s", testDuration)
+ log.Printf(" • Cleanup after test: %v", cleanupAfterTest)
+ log.Println()
+
+ // Step 1: Create ground stations
+ log.Println("Step 1: Creating ground station credentials...")
+ credentials, err := createGroundStations(numClients)
+ if err != nil {
+ log.Fatalf("Failed to create ground stations: %v", err)
+ }
+ log.Printf("✓ Successfully created %d ground stations\n", len(credentials))
+
+ // Step 2: Launch WebSocket connections
+ log.Println("\nStep 2: Launching WebSocket connections...")
+
+ var wg sync.WaitGroup
+ stopChan := make(chan struct{})
+
+ // Handle interrupt signal
+ interrupt := make(chan os.Signal, 1)
+ signal.Notify(interrupt, os.Interrupt)
+
+ // Start metrics reporter
+ go reportMetrics(stopChan, reportIntervalSec)
+
+ // Launch all clients
+ for i, cred := range credentials {
+ wg.Add(1)
+ go func(clientID int, cred GroundStationCredentials) {
+ defer wg.Done()
+ runClient(clientID, cred, stopChan)
+ }(i+1, cred)
+ // Small delay between client launches to avoid thundering herd
+ time.Sleep(100 * time.Millisecond)
+ }
+
+ // Wait for test duration or interrupt
+ select {
+ case <-time.After(testDuration):
+ log.Println("\n⏱️ Test duration completed")
+ case <-interrupt:
+ log.Println("\n⚠️ Received interrupt signal")
+ }
+
+ // Signal all clients to stop
+ close(stopChan)
+
+ // Wait for all clients to finish
+ log.Println("Waiting for all clients to disconnect...")
+ wg.Wait()
+
+ // Print final results
+ printFinalResults()
+
+ // Cleanup
+ if cleanupAfterTest {
+ log.Println("\nStep 3: Cleaning up ground stations...")
+ cleanupGroundStations(credentials)
+ }
+
+ log.Println("\n Load test completed!")
+}
+
+func createGroundStations(count int) ([]GroundStationCredentials, error) {
+ credentials := make([]GroundStationCredentials, 0, count)
+
+ for i := 0; i < count; i++ {
+ name := fmt.Sprintf("LoadTest-GS-%d", i+1)
+
+ // Create unique locations for each ground station
+ createDto := GroundStationCreateDto{
+ Name: name,
+ Location: LocationDto{
+ Latitude: 55.0 + float64(i)*0.1, // Spread across Denmark
+ Longitude: 12.0 + float64(i)*0.1,
+ Altitude: 100.0,
+ },
+ }
+
+ jsonData, err := json.Marshal(createDto)
+ if err != nil {
+ return nil, fmt.Errorf("failed to marshal request: %w", err)
+ }
+
+ req, err := http.NewRequest("POST", baseURL+"/api/v1/ground-stations", bytes.NewBuffer(jsonData))
+ if err != nil {
+ return nil, fmt.Errorf("failed to create request: %w", err)
+ }
+ req.Header.Set("Content-Type", "application/json")
+ req.Header.Set("Authorization", "Bearer "+accessToken)
+
+ resp, err := http.DefaultClient.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create ground station %s: %w", name, err)
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusCreated {
+ body, _ := io.ReadAll(resp.Body)
+ return nil, fmt.Errorf("failed to create ground station %s: status %d, body: %s", name, resp.StatusCode, string(body))
+ }
+
+ var gsResponse GroundStationWithApiKeyDto
+ if err := json.NewDecoder(resp.Body).Decode(&gsResponse); err != nil {
+ return nil, fmt.Errorf("failed to decode response for %s: %w", name, err)
+ }
+
+ credentials = append(credentials, GroundStationCredentials{
+ ID: gsResponse.Id,
+ Name: gsResponse.Name,
+ ApplicationID: gsResponse.ApplicationId,
+ ApiKey: gsResponse.RawApiKey,
+ })
+
+ log.Printf(" ✓ Created ground station: %s (ID: %d)", name, gsResponse.Id)
+ }
+
+ return credentials, nil
+}
+
+func runClient(clientID int, cred GroundStationCredentials, stopChan <-chan struct{}) {
+ log.Printf("[Client %d] Starting for ground station: %s", clientID, cred.Name)
+
+ // Get WebSocket token
+ token, err := getWebSocketToken(cred.ApplicationID, cred.ApiKey)
+ if err != nil {
+ log.Printf("[Client %d] Failed to get token: %v", clientID, err)
+ connectionsFailed.Add(1)
+ errors.Add(1)
+ return
+ }
+
+ // Connect to WebSocket
+ conn, err := connectWebSocket(token)
+ if err != nil {
+ log.Printf("[Client %d] Failed to connect WebSocket: %v", clientID, err)
+ connectionsFailed.Add(1)
+ errors.Add(1)
+ return
+ }
+ defer conn.Close()
+
+ connectionsEstablished.Add(1)
+ connectionsActive.Add(1)
+ defer connectionsActive.Add(-1)
+
+ log.Printf("[Client %d] ✓ WebSocket connected", clientID)
+
+ // Read messages until stop signal
+ messageChan := make(chan []byte)
+ errorChan := make(chan error)
+
+ go func() {
+ for {
+ _, message, err := conn.ReadMessage()
+ if err != nil {
+ errorChan <- err
+ return
+ }
+ messageChan <- message
+ }
+ }()
+
+ // Set up ping ticker to keep connection alive
+ pingTicker := time.NewTicker(30 * time.Second)
+ defer pingTicker.Stop()
+
+ for {
+ select {
+ case <-stopChan:
+ log.Printf("[Client %d] Received stop signal, closing connection", clientID)
+ conn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, ""))
+ return
+ case msg := <-messageChan:
+ messagesReceived.Add(1)
+ log.Printf("[Client %d] 📩 Received message: %s", clientID, truncateString(string(msg), 100))
+ case err := <-errorChan:
+ if websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseGoingAway) {
+ log.Printf("[Client %d] Connection closed normally", clientID)
+ } else {
+ log.Printf("[Client %d] ❌ WebSocket error: %v", clientID, err)
+ errors.Add(1)
+ }
+ return
+ case <-pingTicker.C:
+ if err := conn.WriteMessage(websocket.PingMessage, nil); err != nil {
+ log.Printf("[Client %d] ❌ Failed to send ping: %v", clientID, err)
+ errors.Add(1)
+ return
+ }
+ }
+ }
+}
+
+func getWebSocketToken(applicationID, apiKey string) (string, error) {
+ reqBody := TokenRequest{
+ ApplicationID: applicationID,
+ ApiKey: apiKey,
+ }
+
+ jsonData, err := json.Marshal(reqBody)
+ if err != nil {
+ return "", err
+ }
+
+ resp, err := http.Post(
+ baseURL+"/api/v1/ground-station-link/token",
+ "application/json",
+ bytes.NewBuffer(jsonData),
+ )
+ if err != nil {
+ return "", err
+ }
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ body, _ := io.ReadAll(resp.Body)
+ return "", fmt.Errorf("token request failed with status %d: %s", resp.StatusCode, string(body))
+ }
+
+ var tokenResp TokenResponse
+ if err := json.NewDecoder(resp.Body).Decode(&tokenResp); err != nil {
+ return "", err
+ }
+
+ return tokenResp.AccessToken, nil
+}
+
+func connectWebSocket(token string) (*websocket.Conn, error) {
+ conn, _, err := websocket.DefaultDialer.Dial(wsURL+"/api/v1/ground-station-link/connect", nil)
+ if err != nil {
+ return nil, err
+ }
+
+ connect := WebSocketConnectMessage{
+ Type: "connect",
+ Token: token,
+ }
+
+ if err := conn.WriteJSON(connect); err != nil {
+ conn.Close()
+ return nil, err
+ }
+
+ // Read confirmation
+ _, confirmMsg, err := conn.ReadMessage()
+ if err != nil {
+ conn.Close()
+ return nil, err
+ }
+
+ log.Printf("Server confirmation: %s", truncateString(string(confirmMsg), 100))
+
+ return conn, nil
+}
+
+func cleanupGroundStations(credentials []GroundStationCredentials) {
+ for _, cred := range credentials {
+ req, err := http.NewRequest("DELETE", fmt.Sprintf("%s/api/v1/ground-stations/%d", baseURL, cred.ID), nil)
+ if err != nil {
+ log.Printf("Failed to create delete request for %s: %v", cred.Name, err)
+ continue
+ }
+ req.Header.Set("Authorization", "Bearer "+accessToken)
+
+ resp, err := http.DefaultClient.Do(req)
+ if err != nil {
+ log.Printf("Failed to delete ground station %s: %v", cred.Name, err)
+ continue
+ }
+ resp.Body.Close()
+
+ if resp.StatusCode == http.StatusNoContent || resp.StatusCode == http.StatusOK {
+ log.Printf(" ✓ Deleted ground station: %s", cred.Name)
+ } else {
+ log.Printf("Failed to delete ground station %s: status %d", cred.Name, resp.StatusCode)
+ }
+ }
+}
+
+func reportMetrics(stopChan <-chan struct{}, intervalSec int) {
+ ticker := time.NewTicker(time.Duration(intervalSec) * time.Second)
+ defer ticker.Stop()
+
+ startTime := time.Now()
+
+ for {
+ select {
+ case <-stopChan:
+ return
+ case <-ticker.C:
+ elapsed := time.Since(startTime).Round(time.Second)
+ log.Printf("[%s] Active: %d | Established: %d | Failed: %d | Messages: %d | Errors: %d",
+ elapsed,
+ connectionsActive.Load(),
+ connectionsEstablished.Load(),
+ connectionsFailed.Load(),
+ messagesReceived.Load(),
+ errors.Load(),
+ )
+ }
+ }
+}
+
+func printFinalResults() {
+ log.Println("\nLoad Test Summary:")
+ log.Printf("║ Connections Established: %-35d ║", connectionsEstablished.Load())
+ log.Printf("║ Connections Failed: %-35d ║", connectionsFailed.Load())
+ log.Printf("║ Total Messages Received: %-35d ║", messagesReceived.Load())
+ log.Printf("║ Total Errors: %-35d ║", errors.Load())
+
+ // Determine test pass/fail
+ if connectionsFailed.Load() == 0 && errors.Load() == 0 && connectionsEstablished.Load() == int64(numClients) {
+ log.Println("\nTEST PASSED: All clients connected successfully with no errors!")
+ } else {
+ log.Println("\nTEST FAILED: Some connections failed or errors occurred.")
+ }
+}
+
+func truncateString(s string, maxLen int) string {
+ if len(s) <= maxLen {
+ return s
+ }
+ return s[:maxLen] + "..."
+}