-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathmesh_runtime.py
More file actions
1353 lines (1162 loc) · 50.3 KB
/
mesh_runtime.py
File metadata and controls
1353 lines (1162 loc) · 50.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python3
import json
import math
import os
import random
import sqlite3
import threading
import time
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, List, Optional
from local_deps import bootstrap_local_dependency, ensure_dependency_version
bootstrap_local_dependency("meshdb")
bootstrap_local_dependency("mudp")
bootstrap_local_dependency("vnode")
ensure_dependency_version("meshdb", "0.2.0")
ensure_dependency_version("mudp", "1.5.7")
ensure_dependency_version("vnode", "0.1.10")
import meshdb
from meshtastic import mesh_pb2, portnums_pb2
from mudp import UDPPacketStream
from mudp.encryption import decrypt_packet, encrypt_packet
from mudp.reliability import build_routing_ack_data, compute_reply_hop_limit, register_pending_ack
from mudp.singleton import conn
from pubsub import pub
from vnode import VirtualNode, parse_node_id, resolve_hw_model
from vnode.crypto import b64_decode, decrypt_dm, derive_public_key
from encryption import generate_hash
from firefly_logging import configure_logging, get_logger, make_log_print
configure_logging()
logger = get_logger("firefly.mesh")
print = make_log_print(logger)
BROADCAST_NODE_NUM = 0xFFFFFFFF
STALE_NODE_RETENTION_SECONDS = 7 * 24 * 60 * 60
def _normalized_profile_node_id(profile: Dict) -> str:
node_id = (profile or {}).get("node_id")
if not isinstance(node_id, str):
raise ValueError(f"invalid node_id {node_id!r}")
normalized = node_id.strip().lower()
if len(normalized) != 9 or not normalized.startswith("!"):
raise ValueError(f"invalid node_id {node_id!r}")
int(normalized[1:], 16)
return normalized
def _runtime_root() -> Path:
runtime_dir = os.getenv("FIREFLY_RUNTIME_DIR")
if runtime_dir:
root = Path(runtime_dir).expanduser()
else:
db_path = Path(os.getenv("FIREFLY_DATABASE_FILE", "firefly.db")).expanduser()
db_parent = db_path.parent if db_path.parent != Path("") else Path.cwd()
root = db_parent / "firefly_runtime"
root.mkdir(parents=True, exist_ok=True)
return root.resolve()
def _profile_root(profile: Dict) -> Path:
root = _runtime_root() / "profiles" / str(profile["id"])
root.mkdir(parents=True, exist_ok=True)
return root
def _config_path(profile: Dict) -> Path:
return _profile_root(profile) / "node.json"
def _meshdb_root(profile: Optional[Dict] = None) -> Path:
root = _runtime_root() / "meshdb"
root.mkdir(parents=True, exist_ok=True)
return root
def _legacy_meshdb_root(profile: Dict) -> Path:
root = _profile_root(profile) / "meshdb"
root.mkdir(parents=True, exist_ok=True)
return root
def _firefly_db_path() -> Path:
return Path(os.getenv("FIREFLY_DATABASE_FILE", "firefly.db")).expanduser()
def owner_node_num(profile: Dict) -> int:
normalized_node_id = _normalized_profile_node_id(profile)
return int(parse_node_id(normalized_node_id))
def node_id_from_num(node_num: int) -> str:
return f"!{int(node_num):08x}"
def _profile_channel_hashes(profile: Dict) -> List[int]:
channels = profile.get("channels")
if isinstance(channels, list) and channels:
hashes = []
for channel in channels:
if not isinstance(channel, dict):
continue
channel_name = (channel.get("name") or "").strip()
channel_key = (channel.get("key") or "").strip()
if not channel_name or not channel_key:
continue
hashes.append(generate_hash(channel_name, channel_key))
if hashes:
return hashes
channel_name = (profile.get("channel") or "").strip()
channel_key = (profile.get("key") or "").strip()
if channel_name and channel_key:
return [generate_hash(channel_name, channel_key)]
return []
def _profile_channel_map(profile: Dict) -> Dict[int, str]:
mapping: Dict[int, str] = {}
channels = profile.get("channels")
if isinstance(channels, list) and channels:
for channel in channels:
if not isinstance(channel, dict):
continue
channel_name = (channel.get("name") or "").strip()
channel_key = (channel.get("key") or "").strip()
if not channel_name or not channel_key:
continue
try:
mapping[generate_hash(channel_name, channel_key)] = channel_name
except Exception:
continue
else:
channel_name = (profile.get("channel") or "").strip()
channel_key = (profile.get("key") or "").strip()
if channel_name and channel_key:
mapping[generate_hash(channel_name, channel_key)] = channel_name
return mapping
def _meshdb_storage_name(channel_name: Optional[str]) -> Optional[str]:
if not isinstance(channel_name, str):
return None
slug = "".join(ch for ch in channel_name.strip().lower() if ch.isalnum())
return slug or None
def _resolve_profile_channel_for_packet(
profile: Dict, request_packet: Optional[mesh_pb2.MeshPacket] = None
) -> tuple[Optional[str], Optional[str]]:
requested_channel = None
if request_packet is not None:
try:
requested_channel = int(getattr(request_packet, "channel", 0) or 0)
except Exception:
requested_channel = None
channels = profile.get("channels")
if isinstance(channels, list):
fallback = None
for channel in channels:
if not isinstance(channel, dict):
continue
channel_name = (channel.get("name") or "").strip()
channel_key = (channel.get("key") or "").strip()
if not channel_name or not channel_key:
continue
if fallback is None:
fallback = (channel_name, channel_key)
if requested_channel is None:
continue
try:
if generate_hash(channel_name, channel_key) == requested_channel:
return channel_name, channel_key
except Exception:
continue
if fallback is not None:
return fallback
channel_name = (profile.get("channel") or "").strip()
channel_key = (profile.get("key") or "").strip()
if channel_name and channel_key:
return channel_name, channel_key
return None, None
def ensure_profile_config(profile: Dict, mcast_group: str, mcast_port: int) -> Path:
config_path = _config_path(profile)
meshdb_root = _meshdb_root(profile)
existing_security = {}
if config_path.exists():
try:
existing_payload = json.loads(config_path.read_text(encoding="utf-8"))
existing_security = dict(existing_payload.get("security", {}))
except Exception:
existing_security = {}
payload = {
"node_id": profile["node_id"],
"long_name": profile["long_name"],
"short_name": profile["short_name"],
"hw_model": "PRIVATE_HW",
"role": "CLIENT",
"is_licensed": False,
"hop_limit": int(profile.get("hop_limit", 3) or 3),
"broadcasts": {
"send_startup_nodeinfo": False,
"nodeinfo_interval_seconds": 900,
},
"position": {
"enabled": False,
"latitude": None,
"longitude": None,
"altitude": None,
"position_interval_seconds": 900,
},
"channel": {
"name": profile["channel"],
"psk": profile["key"],
},
"udp": {
"mcast_group": mcast_group,
"mcast_port": int(mcast_port),
},
"meshdb": {
"path": str(meshdb_root),
},
"security": {
"private_key": existing_security.get("private_key", ""),
},
}
config_path.write_text(json.dumps(payload, indent=2) + "\n", encoding="utf-8")
return config_path
class VirtualNodeManager:
def __init__(self, mcast_group: str, mcast_port: int, shared_receiver=None):
self.mcast_group = mcast_group
self.mcast_port = int(mcast_port)
self.virtual_node: Optional[VirtualNode] = None
self.current_profile_id: Optional[str] = None
self.shared_receiver = shared_receiver
def start(self, profile: Dict) -> VirtualNode:
self.stop()
config_path = ensure_profile_config(profile, self.mcast_group, self.mcast_port)
vnode = FireflyVirtualNode(config_path, shared_receiver=self.shared_receiver)
if self.shared_receiver is None or not self.shared_receiver.running:
print("[VNODE] Starting full virtual node runtime")
vnode.start()
else:
print("[VNODE] Shared receiver active; preparing send-only virtual node identity")
self.virtual_node = vnode
self.current_profile_id = str(profile["id"])
return vnode
def stop(self) -> None:
if self.virtual_node is not None:
self.virtual_node.stop()
self.virtual_node = None
self.current_profile_id = None
@property
def running(self) -> bool:
return self.virtual_node is not None
def send_text(
self,
message: str,
destination: int = BROADCAST_NODE_NUM,
hop_limit: Optional[int] = None,
reply_id: Optional[int] = None,
) -> int:
if self.virtual_node is None:
raise RuntimeError("Virtual node is not running")
return self.virtual_node.send_text(int(destination), message, hop_limit=hop_limit, reply_id=reply_id)
def send_nodeinfo(self, destination: int = BROADCAST_NODE_NUM, *, want_response: bool = False) -> int:
if self.virtual_node is None:
raise RuntimeError("Virtual node is not running")
return self.virtual_node.send_nodeinfo(int(destination), want_response=want_response)
def send_traceroute(self, destination: int, hop_limit: Optional[int] = None) -> int:
if self.virtual_node is None:
raise RuntimeError("Virtual node is not running")
route_discovery = mesh_pb2.RouteDiscovery()
packet = self.virtual_node.sendData(
route_discovery,
destinationId=int(destination),
portNum=portnums_pb2.PortNum.TRACEROUTE_APP,
wantResponse=True,
hopLimit=hop_limit,
)
return int(packet.id)
def get_profile_public_key(self, profile: Dict) -> Optional[bytes]:
private_key_b64 = self._get_profile_private_key(profile)
if not private_key_b64:
return None
try:
return derive_public_key(b64_decode(private_key_b64))
except Exception:
return None
def decode_packet_for_profile(
self,
profile: Dict,
packet: mesh_pb2.MeshPacket,
*,
sender_public_key: Optional[bytes] = None,
) -> Optional[mesh_pb2.MeshPacket]:
decoded_packet = mesh_pb2.MeshPacket()
decoded_packet.CopyFrom(packet)
if decoded_packet.HasField("decoded"):
return decoded_packet
if int(getattr(decoded_packet, "channel", 0) or 0) == 0:
private_key_b64 = self._get_profile_private_key(profile)
if not private_key_b64 or sender_public_key is None:
return None
try:
plaintext = decrypt_dm(
receiver_private_key=b64_decode(private_key_b64),
sender_public_key=sender_public_key,
packet_id=int(getattr(decoded_packet, "id", 0) or 0),
from_node=int(getattr(decoded_packet, "from", 0) or 0),
payload=bytes(decoded_packet.encrypted),
)
data = mesh_pb2.Data()
data.ParseFromString(plaintext)
decoded_packet.decoded.CopyFrom(data)
decoded_packet.pki_encrypted = True
decoded_packet.public_key = sender_public_key
return decoded_packet
except Exception:
return None
channel_name, channel_key = _resolve_profile_channel_for_packet(profile, decoded_packet)
if not channel_name or not channel_key:
return None
try:
data = decrypt_packet(decoded_packet, channel_key, silent=True)
except Exception:
data = None
if data is None:
return None
decoded_packet.decoded.CopyFrom(data)
return decoded_packet
def send_ack_for_profile(self, profile: Dict, request_packet: mesh_pb2.MeshPacket) -> int:
channel_name, channel_key = _resolve_profile_channel_for_packet(profile, request_packet)
if not channel_name or not channel_key:
raise ValueError("Profile is missing a usable channel configuration")
profile_node_num = owner_node_num(profile)
destination = int(getattr(request_packet, "from", 0) or 0)
if destination <= 0:
raise ValueError("Request packet is missing a valid sender")
ack_data = build_routing_ack_data(
request_id=int(getattr(request_packet, "id", 0) or 0),
error_reason=mesh_pb2.Routing.Error.NONE,
)
packet = mesh_pb2.MeshPacket()
packet.id = random.getrandbits(32)
setattr(packet, "from", profile_node_num)
packet.to = destination
packet.want_ack = bool(
getattr(request_packet, "want_ack", False)
and request_packet.HasField("decoded")
and int(request_packet.decoded.portnum or 0) == int(portnums_pb2.PortNum.TEXT_MESSAGE_APP)
and int(getattr(request_packet, "to", BROADCAST_NODE_NUM) or BROADCAST_NODE_NUM) == profile_node_num
)
packet.channel = generate_hash(channel_name, channel_key)
hop_limit = int(compute_reply_hop_limit(request_packet))
packet.hop_limit = hop_limit
packet.hop_start = hop_limit
packet.priority = mesh_pb2.MeshPacket.Priority.ACK
packet.encrypted = encrypt_packet(channel_name, channel_key, packet, ack_data)
if getattr(conn, "socket", None) is None:
conn.setup_multicast(self.mcast_group, int(self.mcast_port))
raw_packet = packet.SerializeToString()
register_pending_ack(packet, raw_packet)
conn.sendto(raw_packet, (conn.host, conn.port))
return int(packet.id)
def _get_profile_private_key(self, profile: Dict) -> Optional[str]:
config_path = ensure_profile_config(profile, self.mcast_group, self.mcast_port)
try:
payload = json.loads(config_path.read_text(encoding="utf-8"))
except Exception:
return None
private_key = str(payload.get("security", {}).get("private_key", "")).strip()
return private_key or None
class SharedPacketReceiver:
def __init__(self, mcast_group: str, mcast_port: int):
self.mcast_group = mcast_group
self.mcast_port = int(mcast_port)
self.stream: Optional[UDPPacketStream] = None
@property
def running(self) -> bool:
return self.stream is not None
def start(self) -> None:
if self.stream is not None:
return
stream = UDPPacketStream(
self.mcast_group,
self.mcast_port,
key=None,
parse_payload=False,
)
stream.start()
self.stream = stream
def stop(self) -> None:
if self.stream is None:
return
self.stream.stop()
self.stream = None
class FireflyVirtualNode(VirtualNode):
def __init__(self, config_path, *, shared_receiver: Optional[SharedPacketReceiver] = None) -> None:
super().__init__(config_path)
self._shared_receiver = shared_receiver
self._firefly_started = False
def _channel_hash(self) -> int:
return generate_hash(self.config.channel.name, self.config.channel.psk)
def _channel_name(self) -> str:
return str(self.config.channel.name or "").strip()
def _storage_name(self) -> Optional[str]:
return _meshdb_storage_name(self._channel_name())
def _storage_channel_for_packet(self, packet: mesh_pb2.MeshPacket) -> int:
try:
packet_channel = int(getattr(packet, "channel", 0) or 0)
except Exception:
packet_channel = 0
return packet_channel if packet_channel > 0 else self._channel_hash()
def _profile_record_for_node_num(self, node_num: int) -> Optional[Dict]:
if node_num in (None, 0):
return None
db_path = _firefly_db_path()
if not db_path.exists():
return None
normalized_node_id = node_id_from_num(int(node_num))
try:
with sqlite3.connect(db_path) as con:
con.row_factory = sqlite3.Row
row = con.execute(
"""
SELECT id, user_id, node_id, long_name, short_name, channel, key, hop_limit
FROM profiles
WHERE LOWER(node_id) = ?
LIMIT 1
""",
(normalized_node_id.lower(),),
).fetchone()
except Exception:
return None
return dict(row) if row else None
def _local_profile_public_key(self, node_num: int) -> Optional[bytes]:
profile = self._profile_record_for_node_num(node_num)
if not profile:
return None
config_path = _config_path(profile)
try:
payload = json.loads(config_path.read_text(encoding="utf-8"))
except Exception:
return None
private_key_b64 = str(payload.get("security", {}).get("private_key", "")).strip()
if not private_key_b64:
return None
try:
return derive_public_key(b64_decode(private_key_b64))
except Exception:
return None
def _seed_owner_record(self) -> None:
Path(self.meshdb_path).mkdir(parents=True, exist_ok=True)
meshdb.set_default_db_path(self.meshdb_path)
meshdb.NodeDB(
self.node_num,
self.meshdb_path,
channel=self._channel_hash(),
channel_name=self._channel_name(),
storage_name=self._storage_name(),
).upsert(
node_num=self.node_num,
long_name=self.config.long_name,
short_name=self.config.short_name,
hw_model=str(resolve_hw_model(self.config.hw_model)),
role=str(self.config.role),
is_licensed=int(self.config.is_licensed),
public_key=self._public_key_b64,
)
def _resolve_destination(self, destination):
if isinstance(destination, int):
return destination
text = str(destination).strip()
if text.startswith("!"):
return parse_node_id(text)
resolved = meshdb.get_node_num(
text,
owner_node_num=self.node_num,
db_path=self.meshdb_path,
channel=self._channel_hash(),
channel_name=self._channel_name(),
storage_name=self._storage_name(),
)
if isinstance(resolved, list):
raise ValueError(f"Destination '{destination}' is ambiguous: {resolved}")
if resolved is None:
raise ValueError(f"Unknown destination '{destination}'")
return int(resolved)
def _lookup_public_key(self, node_num: int) -> Optional[bytes]:
if int(node_num) == self.node_num:
key = self._public_key_b64
return b64_decode(key) if key else None
row = meshdb.get_nodeinfo(
int(node_num),
owner_node_num=self.node_num,
db_path=self.meshdb_path,
channel=self._channel_hash(),
channel_name=self._channel_name(),
storage_name=self._storage_name(),
)
if isinstance(row, dict):
public_key = str(row.get("public_key", "")).strip()
if public_key:
try:
return b64_decode(public_key)
except Exception:
pass
return self._local_profile_public_key(int(node_num))
def _known_node_count(self) -> int:
try:
node_db = meshdb.NodeDB(
self.node_num,
self.meshdb_path,
channel=self._channel_hash(),
channel_name=self._channel_name(),
storage_name=self._storage_name(),
)
node_db.ensure_table()
with node_db.connect() as con:
row = con.execute(f"SELECT COUNT(*) FROM {node_db.table}").fetchone()
return max(int(row[0]) if row else 0, 1)
except (sqlite3.Error, ValueError, TypeError):
return max(len(self._last_nodeinfo_seen), 1)
def _persist_packet(self, packet: mesh_pb2.MeshPacket) -> None:
normalized = meshdb.normalize_packet(packet, "udp")
meshdb.handle_packet(
normalized,
node_database_number=self.node_num,
db_path=self.meshdb_path,
channel=self._storage_channel_for_packet(packet),
channel_name=self._channel_name(),
storage_name=self._storage_name(),
)
def start(self) -> None:
if self._firefly_started:
return
self._stop.clear()
pub.subscribe(self._handle_raw_packet, self.RAW_PACKET_TOPIC)
pub.subscribe(self._handle_unique_packet, self.PACKET_TOPIC)
pub.subscribe(self._handle_compat_response_packet, self.RECEIVE_TOPIC)
pub.subscribe(self._handle_compat_ack, self.ACK_TOPIC)
pub.subscribe(self._handle_compat_nak, self.NAK_TOPIC)
if self._shared_receiver is None or not self._shared_receiver.running:
self.stream = UDPPacketStream(
self.config.udp.mcast_group,
int(self.config.udp.mcast_port),
key=self.config.channel.psk,
parse_payload=False,
)
self.stream.start()
if self.config.broadcasts.send_startup_nodeinfo:
self.send_nodeinfo()
self._broadcast_thread = threading.Thread(
target=self._broadcast_loop,
name="vnode-nodeinfo-broadcast",
daemon=True,
)
self._broadcast_thread.start()
self._firefly_started = True
def stop(self) -> None:
if not self._firefly_started and self.stream is None:
return
super().stop()
self._firefly_started = False
class MeshNodeStore:
def __init__(self, profile: Dict):
self.profile = profile
self.profile_id = str(profile["id"])
self.owner_node_num = owner_node_num(profile)
self.db_path = str(_meshdb_root(profile))
self.firefly_db_path = str(_firefly_db_path())
self.channel_names = _profile_channel_map(profile)
self.channel_nums = list(dict.fromkeys(_profile_channel_hashes(profile)))
if not self.channel_nums:
self.channel_nums = [generate_hash(profile["channel"], profile["key"])]
self.channel_num = self.channel_nums[0]
self._migrate_legacy_profile_nodes()
self.prune_stale_nodes()
def _iter_channel_nums(self) -> List[int]:
return list(dict.fromkeys(self.channel_nums or [self.channel_num]))
def _channel_scope(self, channel_num=None) -> int:
normalized = self._coerce_int(channel_num)
if normalized is not None and normalized > 0:
return normalized
return self.channel_num
def _channel_name_for(self, channel_num=None) -> Optional[str]:
return self.channel_names.get(self._channel_scope(channel_num))
def _storage_name_for(self, channel_num=None) -> Optional[str]:
return _meshdb_storage_name(self._channel_name_for(channel_num))
def _node_db(self, channel_num=None):
node_db = meshdb.NodeDB(
self.owner_node_num,
self.db_path,
channel=self._channel_scope(channel_num),
channel_name=self._channel_name_for(channel_num),
storage_name=self._storage_name_for(channel_num),
)
node_db.ensure_table()
return node_db
def _location_db(self, channel_num=None):
location_db = meshdb.LocationDB(
self.owner_node_num,
self.db_path,
channel=self._channel_scope(channel_num),
channel_name=self._channel_name_for(channel_num),
storage_name=self._storage_name_for(channel_num),
)
location_db.ensure_table()
return location_db
def _legacy_migration_marker(self) -> Path:
return Path(self.db_path) / f".legacy_nodes_profile_{self.profile_id}"
def _migrate_legacy_profile_nodes(self) -> None:
marker_path = self._legacy_migration_marker()
if marker_path.exists():
return
try:
if len(self._iter_channel_nums()) != 1:
return
legacy_root = _legacy_meshdb_root(self.profile)
legacy_db_file = legacy_root / f"{self.owner_node_num}.db"
if not legacy_db_file.exists():
return
legacy_node_db = meshdb.NodeDB(self.owner_node_num, str(legacy_root))
legacy_node_db.ensure_table()
shared_node_db = self._node_db(self.channel_num)
with legacy_node_db.connect() as con:
con.row_factory = sqlite3.Row
rows = con.execute(f"SELECT * FROM {legacy_node_db.table}").fetchall()
for row in rows:
shared_node_db.upsert(
node_num=row["node_num"],
long_name=row["long_name"],
short_name=row["short_name"],
macaddr=row["macaddr"],
hw_model=row["hw_model"],
role=row["role"],
is_licensed=row["is_licensed"],
public_key=row["public_key"],
is_unmessagable=row["is_unmessagable"],
last_heard=row["last_heard"],
hops_away=row["hops_away"],
snr=row["snr"],
)
except Exception:
pass
finally:
marker_path.touch(exist_ok=True)
def ensure_owner_node(self) -> None:
for channel_num in self._iter_channel_nums():
self._node_db(channel_num).upsert(
node_num=self.owner_node_num,
long_name=self.profile.get("long_name") or None,
short_name=self.profile.get("short_name") or None,
hw_model="PRIVATE_HW",
role="CLIENT",
last_heard=int(time.time()),
hops_away=0,
)
def prune_stale_nodes(self) -> int:
cutoff = int(time.time()) - STALE_NODE_RETENTION_SECONDS
pruned_rows = 0
for channel_num in self._iter_channel_nums():
node_db = self._node_db(channel_num)
try:
with node_db.connect() as con:
cursor = con.execute(
f"""
DELETE FROM {node_db.table}
WHERE node_num != ?
AND last_heard IS NOT NULL
AND last_heard < ?
""",
(str(self.owner_node_num), cutoff),
)
con.commit()
pruned_rows += int(cursor.rowcount or 0)
except Exception:
continue
if pruned_rows:
print(
f"[MESHDB] Pruned {pruned_rows} stale node(s) for profile {self.profile_id}; "
f"messages retained"
)
return pruned_rows
def record_packet(self, packet) -> Dict[str, bool]:
packet_dict = meshdb.normalize_packet(packet, "mudp")
storage_channel = self._channel_scope(packet_dict.get("channel"))
stored = meshdb.handle_packet(
packet_dict,
node_database_number=self.owner_node_num,
db_path=self.db_path,
channel=storage_channel,
channel_name=self._channel_name_for(storage_channel),
storage_name=self._storage_name_for(storage_channel),
)
self._persist_hops_from_packet(packet_dict, storage_channel=storage_channel)
return stored
def get_node(self, node_num: int) -> Optional[Dict]:
normalized_node_num = int(node_num)
fallback_hops = self._get_fallback_hops(normalized_node_num)
merged_node = None
for channel_num in self._iter_channel_nums():
node_db = self._node_db(channel_num)
try:
with node_db.connect() as con:
con.row_factory = sqlite3.Row
row = con.execute(
f"SELECT * FROM {node_db.table} WHERE node_num = ?",
(str(normalized_node_num),),
).fetchone()
except Exception:
row = None
if not row:
continue
merged_node = self._merge_node_records(
merged_node,
self._row_to_dict(
row,
fallback_hops=fallback_hops,
location=self._get_latest_location(normalized_node_num, channel_num=channel_num),
channel_num=channel_num,
),
)
return merged_node
def list_nodes(self) -> List[Dict]:
fallback_hops = self._get_fallback_hops_map()
location_map = self._get_latest_locations_map()
nodes_by_num: Dict[int, Dict] = {}
for channel_num in self._iter_channel_nums():
node_db = self._node_db(channel_num)
try:
with node_db.connect() as con:
con.row_factory = sqlite3.Row
rows = con.execute(
f"SELECT * FROM {node_db.table} ORDER BY COALESCE(last_heard, 0) DESC, long_name ASC, node_num ASC"
).fetchall()
except Exception:
rows = []
for row in rows:
node_num = int(row["node_num"])
if node_num == self.owner_node_num:
continue
nodes_by_num[node_num] = self._merge_node_records(
nodes_by_num.get(node_num),
self._row_to_dict(
row,
fallback_hops=fallback_hops.get(node_num),
location=location_map.get(node_num),
channel_num=channel_num,
),
)
return sorted(
nodes_by_num.values(),
key=lambda node: (
str(node.get("last_seen") or ""),
str(node.get("long_name") or ""),
int(node.get("node_num") or 0),
),
reverse=True,
)
def count_nodes(self) -> int:
return len(self.list_nodes())
def _get_fallback_hops(self, node_num: int) -> Optional[int]:
hops_map = self._get_fallback_hops_map(node_num=node_num)
return hops_map.get(int(node_num))
def _get_fallback_hops_map(self, node_num: Optional[int] = None) -> Dict[int, int]:
hops_map = self._get_meshdb_message_hops_map(node_num=node_num)
if node_num is not None and int(node_num) in hops_map:
return hops_map
firefly_hops = self._get_firefly_message_hops_map(node_num=node_num)
for sender_num, hops_away in firefly_hops.items():
hops_map.setdefault(sender_num, hops_away)
return hops_map
def _persist_hops_from_packet(self, packet_dict: Dict, storage_channel: Optional[int] = None) -> None:
sender_num = self._coerce_int(packet_dict.get("from"))
if sender_num is None or sender_num == self.owner_node_num:
return
hops_away = self._hops_from_metadata(packet_dict.get("hopStart"), packet_dict.get("hopLimit"))
if hops_away is None:
return
self._persist_node_hops(sender_num, hops_away, channel_num=storage_channel)
def _persist_node_hops(self, node_num: int, hops_away: int, channel_num: Optional[int] = None) -> None:
if node_num == self.owner_node_num:
return
try:
self._node_db(channel_num).upsert(node_num=node_num, hops_away=hops_away)
except Exception:
pass
def _get_meshdb_message_hops_map(self, node_num: Optional[int] = None) -> Dict[int, int]:
latest_hops: Dict[int, tuple[int, int]] = {}
node_value = str(int(node_num)) if node_num is not None else None
for channel_num in self._iter_channel_nums():
node_db = self._node_db(channel_num)
try:
with node_db.connect() as con:
con.row_factory = sqlite3.Row
for table_name in self._message_table_names(con):
query = f"""
SELECT node_num, hop_start, hop_limit, timestamp
FROM "{table_name}"
WHERE hop_start IS NOT NULL
AND hop_limit IS NOT NULL
"""
params: List[object] = []
if node_value is not None:
query += " AND node_num = ?"
params.append(node_value)
for row in con.execute(query, params):
sender_num = self._coerce_int(row["node_num"])
if sender_num is None or sender_num == self.owner_node_num:
continue
hops_away = self._hops_from_metadata(row["hop_start"], row["hop_limit"])
if hops_away is None:
continue
timestamp = self._coerce_int(row["timestamp"]) or 0
self._update_latest_hops(latest_hops, sender_num, timestamp, hops_away)
except Exception:
continue
return {sender_num: hops_away for sender_num, (_, hops_away) in latest_hops.items()}
def _get_firefly_message_hops_map(self, node_num: Optional[int] = None) -> Dict[int, int]:
if not os.path.exists(self.firefly_db_path):
return {}
scope_clauses: List[str] = []
params: List[object] = []
if self.profile_id:
scope_clauses.append("owner_profile_id = ?")
params.append(self.profile_id)
if self.channel_nums:
placeholders = ", ".join("?" for _ in self.channel_nums)
scope_clauses.append(f"channel IN ({placeholders})")
params.extend(self.channel_nums)
if not scope_clauses:
return {}
query = f"""
SELECT sender_num, hop_start, hop_limit, timestamp
FROM messages
WHERE sender_num IS NOT NULL
AND hop_start IS NOT NULL
AND hop_limit IS NOT NULL
AND ({' OR '.join(scope_clauses)})
"""
if node_num is not None:
query += " AND sender_num = ?"
params.append(int(node_num))
latest_hops: Dict[int, tuple[str, int]] = {}
try:
with sqlite3.connect(self.firefly_db_path) as con:
con.row_factory = sqlite3.Row
cursor = con.execute(query, params)
for row in cursor.fetchall():
sender_num = self._coerce_int(row["sender_num"])
if sender_num is None or sender_num == self.owner_node_num:
continue
hops_away = self._hops_from_metadata(row["hop_start"], row["hop_limit"])
if hops_away is None:
continue
timestamp = str(row["timestamp"] or "")
self._update_latest_hops(latest_hops, sender_num, timestamp, hops_away)
except Exception:
return {}
return {sender_num: hops_away for sender_num, (_, hops_away) in latest_hops.items()}
def _message_table_names(self, con: sqlite3.Connection) -> List[str]:
rows = con.execute(
"""
SELECT name
FROM sqlite_master
WHERE type = 'table'
AND name LIKE ?
ORDER BY name
""",
("%_messages",),
).fetchall()
return [str(row[0]) for row in rows]
def _hops_from_metadata(self, hop_start, hop_limit) -> Optional[int]:
hop_start_value = self._coerce_int(hop_start)
hop_limit_value = self._coerce_int(hop_limit)
if hop_start_value is None or hop_limit_value is None:
return None
return max(hop_start_value - hop_limit_value, 0)
def _coerce_int(self, value) -> Optional[int]:
try: