Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
262 changes: 262 additions & 0 deletions .github/workflows/debug-keys.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,262 @@
name: Extract Binaries for Analysis
on:
workflow_dispatch:
inputs:
version:
description: 'RouterOS version to compare against v6.49.17'
required: true
default: '6.49.18'
type: string

jobs:
extract:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.x'

- name: Install tools
run: |
sudo apt-get update
sudo apt-get install -y squashfs-tools p7zip-full radare2 --no-install-recommends
pip install pefile

- name: Download ISOs
run: |
VERSION="${{ github.event.inputs.version }}"
echo "Downloading v6.49.17..."
wget -nv -O mikrotik17.iso "https://download.mikrotik.com/routeros/6.49.17/mikrotik-6.49.17.iso"
echo "Downloading v$VERSION..."
wget -nv -O mikrotik18.iso "https://download.mikrotik.com/routeros/$VERSION/mikrotik-$VERSION.iso"

- name: Extract NPK and Squashfs
run: |
VERSION="${{ github.event.inputs.version }}"

# Extract v17
7z x mikrotik17.iso -oiso17 -r > /dev/null
find iso17 -name "system-*.npk" -o -name "routeros-*.npk" | head -n 1 | xargs -I {} cp {} system17.npk

# Extract v18
7z x mikrotik18.iso -oiso18 -r > /dev/null
find iso18 -name "system-*.npk" -o -name "routeros-*.npk" | head -n 1 | xargs -I {} cp {} system18.npk

# Extract squashfs from NPK
python3 -c "
import sys; sys.path.insert(0,'.')
from npk import NovaPackage, NpkPartID
for v,f in [('17','system17.npk'),('18','system18.npk')]:
pkg = NovaPackage.load(f)
for part in pkg:
if part.id == NpkPartID.SQUASHFS:
open(f'sfs{v}.sfs','wb').write(part.data)
print(f'v{v} squashfs: {len(part.data)} bytes')
"

# Unsquash
unsquashfs -d sq17 sfs17.sfs
unsquashfs -d sq18 sfs18.sfs

- name: Prepare binaries for download
run: |
VERSION="${{ github.event.inputs.version }}"
mkdir -p artifacts/v6.49.17 artifacts/v$VERSION artifacts/analysis

# Copy key binaries
for bin in loader keyman fileman sys2 installer; do
if [ -f "sq17/nova/bin/$bin" ]; then
cp "sq17/nova/bin/$bin" "artifacts/v6.49.17/$bin"
fi
if [ -f "sq18/nova/bin/$bin" ]; then
cp "sq18/nova/bin/$bin" "artifacts/v$VERSION/$bin"
fi
done

echo "=== v6.49.17 binaries ==="
ls -la artifacts/v6.49.17/
echo ""
echo "=== v$VERSION binaries ==="
ls -la artifacts/v$VERSION/

- name: Deep analysis
run: |
VERSION="${{ github.event.inputs.version }}"

echo "============================================" > artifacts/analysis/report.txt
echo " Binary Analysis Report" >> artifacts/analysis/report.txt
echo " v6.49.17 vs v$VERSION" >> artifacts/analysis/report.txt
echo "============================================" >> artifacts/analysis/report.txt

for bin in loader keyman; do
f17="artifacts/v6.49.17/$bin"
f18="artifacts/v$VERSION/$bin"

if [ ! -f "$f17" ] || [ ! -f "$f18" ]; then
echo "$bin: Missing in one or both versions" >> artifacts/analysis/report.txt
continue
fi

echo "" >> artifacts/analysis/report.txt
echo "============================================" >> artifacts/analysis/report.txt
echo " $bin" >> artifacts/analysis/report.txt
echo "============================================" >> artifacts/analysis/report.txt

# File info
echo "" >> artifacts/analysis/report.txt
echo "--- File Info ---" >> artifacts/analysis/report.txt
echo "v17: $(wc -c < $f17) bytes" >> artifacts/analysis/report.txt
echo "v18: $(wc -c < $f18) bytes" >> artifacts/analysis/report.txt
echo "v17 md5: $(md5sum $f17 | cut -d' ' -f1)" >> artifacts/analysis/report.txt
echo "v18 md5: $(md5sum $f18 | cut -d' ' -f1)" >> artifacts/analysis/report.txt

# Strings comparison
echo "" >> artifacts/analysis/report.txt
echo "--- Strings unique to v17 ---" >> artifacts/analysis/report.txt
strings "$f17" | sort -u > "/tmp/${bin}_str17.txt"
strings "$f18" | sort -u > "/tmp/${bin}_str18.txt"
diff "/tmp/${bin}_str17.txt" "/tmp/${bin}_str18.txt" | grep "^<" | head -30 >> artifacts/analysis/report.txt

echo "" >> artifacts/analysis/report.txt
echo "--- Strings unique to v$VERSION ---" >> artifacts/analysis/report.txt
diff "/tmp/${bin}_str17.txt" "/tmp/${bin}_str18.txt" | grep "^>" | head -30 >> artifacts/analysis/report.txt

# Save full string lists
cp "/tmp/${bin}_str17.txt" "artifacts/analysis/${bin}_strings_v17.txt"
cp "/tmp/${bin}_str18.txt" "artifacts/analysis/${bin}_strings_v18.txt"

# Binary diff summary using radare2
echo "" >> artifacts/analysis/report.txt
echo "--- Binary Diff (radiff2) ---" >> artifacts/analysis/report.txt
radiff2 -s "$f17" "$f18" 2>/dev/null | head -50 >> artifacts/analysis/report.txt || echo "radiff2 failed" >> artifacts/analysis/report.txt
done

# Python deep analysis
python3 << 'PYEOF'
import os

LK = bytes.fromhex("8E1067E4305FCDC0CFBF95C10F96E5DFE8C49AEF486BD1A4E2E96C27F01E3E32")

version = os.environ.get("VERSION", "6.49.18")

with open("artifacts/analysis/key_analysis.txt", "w") as report:
for bname in ['loader', 'keyman']:
f17 = f"artifacts/v6.49.17/{bname}"
f18 = f"artifacts/v{version}/{bname}"

if not os.path.exists(f17) or not os.path.exists(f18):
continue

d17 = open(f17, 'rb').read()
d18 = open(f18, 'rb').read()

report.write(f"\n{'='*60}\n")
report.write(f" {bname} - Key Location Analysis\n")
report.write(f"{'='*60}\n\n")

if LK in d17:
idx = d17.index(LK)
report.write(f"OLD KEY found in v17 at offset {hex(idx)}\n")
report.write(f"Hex dump around key in v17:\n")

# Show 64 bytes before and after
start = max(0, idx - 64)
end = min(len(d17), idx + 32 + 64)
for off in range(start, end, 16):
chunk = d17[off:off+16]
hex_str = ' '.join(f'{b:02x}' for b in chunk)
ascii_str = ''.join(chr(b) if 32 <= b < 127 else '.' for b in chunk)
marker = " <<< KEY START" if off == (idx // 16) * 16 else ""
report.write(f" v17 {hex(off)}: {hex_str} |{ascii_str}|{marker}\n")

report.write(f"\nSame region in v18:\n")
# Try to find the same region in v18 with multiple context sizes
for ctx_len in [64, 48, 32, 16, 8, 4]:
ctx = d17[max(0, idx-ctx_len):idx]
if len(ctx) < ctx_len:
continue

pos = 0
while True:
found = d18.find(ctx, pos)
if found == -1:
break
key_pos = found + len(ctx)
potential = d18[key_pos:key_pos+32]

report.write(f"\n Context match ({ctx_len} bytes before): v18 offset {hex(found)}\n")
report.write(f" Potential replacement (32 bytes): {potential.hex().upper()}\n")

# Hex dump around this location in v18
start18 = max(0, key_pos - 32)
end18 = min(len(d18), key_pos + 64)
for off in range(start18, end18, 16):
chunk = d18[off:off+16]
hex_str = ' '.join(f'{b:02x}' for b in chunk)
ascii_str = ''.join(chr(b) if 32 <= b < 127 else '.' for b in chunk)
marker = " <<< POTENTIAL KEY" if off == (key_pos // 16) * 16 else ""
report.write(f" v18 {hex(off)}: {hex_str} |{ascii_str}|{marker}\n")

pos = found + 1
break # Only first match per context size

if found != -1:
break
else:
report.write(" No context match found in v18\n")

# Also search all 32-byte sequences that look like keys (high entropy)
report.write(f"\n Scanning v18 for potential EC public keys (32-byte high-entropy)...\n")
import hashlib
candidates = []
for i in range(0, len(d18) - 32, 4):
chunk = d18[i:i+32]
# Skip all-zero, all-ff, and low-entropy
if chunk == b'\x00'*32 or chunk == b'\xff'*32:
continue
# Check if it looks like a key (no ASCII, high byte diversity)
unique_bytes = len(set(chunk))
if unique_bytes >= 20: # High diversity = likely key material
# Check it's not in v17 at the same offset
if i < len(d17) and d17[i:i+32] == chunk:
continue # Same as v17, skip
candidates.append((i, chunk))

report.write(f" Found {len(candidates)} high-entropy 32-byte candidates unique to v18\n")
for i, (off, chunk) in enumerate(candidates[:20]):
report.write(f" #{i+1} offset {hex(off)}: {chunk.hex().upper()}\n")

else:
report.write(f"OLD KEY not found in v17!\n")

print("Key analysis complete")
PYEOF
env:
VERSION: ${{ github.event.inputs.version }}

- name: Print analysis report
run: |
echo "=== MAIN REPORT ==="
cat artifacts/analysis/report.txt
echo ""
echo "=== KEY ANALYSIS ==="
cat artifacts/analysis/key_analysis.txt

- name: Upload v6.49.17 binaries
uses: actions/upload-artifact@v4
with:
name: binaries-v6.49.17
path: artifacts/v6.49.17/

- name: Upload v${{ github.event.inputs.version }} binaries
uses: actions/upload-artifact@v4
with:
name: binaries-v${{ github.event.inputs.version }}
path: artifacts/v${{ github.event.inputs.version }}/

- name: Upload analysis report
uses: actions/upload-artifact@v4
with:
name: analysis-report
path: artifacts/analysis/
67 changes: 52 additions & 15 deletions .github/workflows/patch6.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,25 @@ name: Patch v6

on:
workflow_dispatch:
inputs:
channel:
description: 'Channel'
required: true
default: 'stable'
type: choice
options:
- stable
- long-term
version:
description: 'Versi spesifik (contoh: 6.49.17), kosongkan untuk latest'
required: false
default: ''
type: string
release:
description: 'Buat GitHub Release'
required: false
default: true
type: boolean

permissions:
contents: write
Expand Down Expand Up @@ -36,21 +55,28 @@ jobs:
id: get_latest
run: |
echo $(uname -a)
NEWEST=$(curl -s https://upgrade.mikrotik.com/routeros/NEWEST6.stable)
LATEST_VERSION=$(echo "$NEWEST" | cut -d' ' -f1)
BUILD_TIME=$(echo "$NEWEST" | cut -d' ' -f2)
echo Latest Version: $LATEST_VERSION
echo Build Time: $BUILD_TIME
_LATEST_VERSION=$(cat latest6.txt | cut -d ' ' -f1)
if [ "$_LATEST_VERSION" == "$LATEST_VERSION" ]; then
echo "No new version found"
echo "has_new_version=false" >> $GITHUB_OUTPUT
exit 0
CHANNEL="${{ github.event.inputs.channel }}"
INPUT_VERSION="${{ github.event.inputs.version }}"

if [ -n "$INPUT_VERSION" ]; then
# Versi spesifik dipilih
LATEST_VERSION="$INPUT_VERSION"
BUILD_TIME=$(date +'%s')
NEWEST="$LATEST_VERSION $BUILD_TIME"
echo "Using specified version: $LATEST_VERSION"
else
# Ambil versi terbaru dari channel yang dipilih
NEWEST=$(curl -s https://upgrade.mikrotik.com/routeros/NEWEST6.${CHANNEL})
LATEST_VERSION=$(echo "$NEWEST" | cut -d' ' -f1)
BUILD_TIME=$(echo "$NEWEST" | cut -d' ' -f2)
echo "Latest $CHANNEL version: $LATEST_VERSION"
echo "Build Time: $BUILD_TIME"
fi
echo "NEWEST=${NEWEST}" >> $GITHUB_ENV

echo "has_new_version=true" >> $GITHUB_OUTPUT
curl -s -o CHANGELOG https://upgrade.mikrotik.com/routeros/$LATEST_VERSION/CHANGELOG
cat CHANGELOG
echo "NEWEST=${NEWEST}" >> $GITHUB_ENV
echo "LATEST_VERSION=${LATEST_VERSION}" >> $GITHUB_ENV

- name: Cache mikrotik-${{ env.LATEST_VERSION }}.iso
Expand Down Expand Up @@ -203,17 +229,16 @@ jobs:
sudo rm chr-$LATEST_VERSION-patched.img

- name: Update latest version
if: steps.get_latest.outputs.has_new_version == 'true'
if: steps.get_latest.outputs.has_new_version == 'true' && github.event.inputs.release == 'true'
run: |
echo $NEWEST > latest6.txt
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add latest6.txt
git commit -m "Update latest version"
git push
git diff --cached --quiet && echo "No changes to commit" || (git commit -m "Update latest version to $LATEST_VERSION" && git push)

- name: Create Release tag ${{ env.LATEST_VERSION }}
if: steps.get_latest.outputs.has_new_version == 'true'
if: steps.get_latest.outputs.has_new_version == 'true' && github.event.inputs.release == 'true'
uses: softprops/action-gh-release@v2
with:
name: "RouterOS ${{ env.LATEST_VERSION }}"
Expand All @@ -226,3 +251,15 @@ jobs:
chr-${{ env.LATEST_VERSION }}-patched.*.zip
routeros-x86-${{ env.LATEST_VERSION }}-patched.npk
all_packages-x86-${{ env.LATEST_VERSION }}-patched.zip

- name: Upload as Artifact (tanpa Release)
if: steps.get_latest.outputs.has_new_version == 'true' && github.event.inputs.release != 'true'
uses: actions/upload-artifact@v4
with:
name: mikrotik-${{ env.LATEST_VERSION }}-patched
path: |
mikrotik-${{ env.LATEST_VERSION }}-patched.iso
install-image-${{ env.LATEST_VERSION }}-patched.*.zip
chr-${{ env.LATEST_VERSION }}-patched.*.zip
routeros-x86-${{ env.LATEST_VERSION }}-patched.npk
all_packages-x86-${{ env.LATEST_VERSION }}-patched.zip
2 changes: 1 addition & 1 deletion .github/workflows/patch7.yml
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ jobs:
- name: Get refind
if: steps.get_latest.outputs.has_new_version == 'true' && steps.cache_refind.outputs.cache-hit != 'true' && matrix.arch == 'x86'
run: |
sudo curl -s -o refind-bin-0.14.2.zip https://nchc.dl.sourceforge.net/project/refind/0.14.2/refind-bin-0.14.2.zip
sudo curl -s -o refind-bin-0.14.2.zip https://onboardcloud.dl.sourceforge.net/project/refind/0.14.2/refind-bin-0.14.2.zip

- name: Cache install-image-${{ env.LATEST_VERSION }}${{ env.ARCH }}.zip
if: steps.get_latest.outputs.has_new_version == 'true' && matrix.arch == 'x86'
Expand Down
Loading