Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 11 additions & 10 deletions src/bin.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@ class bin:
magnitudes = [10e-24, 10e-21, 10e-18, 10e-15, 10e-12, 10e-9, 10e-6, 10e-3, 1, 10e3, 10e6, 10e9, 10e12, 10e15]
file = ''

def __init__(self, file):
def __init__(self, file, logger):
self.file = file
self.logger = logger

def data_to_unit(self, data):
result = []
Expand Down Expand Up @@ -46,15 +47,15 @@ def convert(self):
data = file_data[2048:]
f.close()

print('channel_states: ', channel_states)
print('channel_volt_division: ', channel_volt_division)
print('channel_offset: ', channel_offset)
#print('digital_states: ', digital_states)
print('horizontal_list: ', horizontal_list)
print('wave_length: ', wave_length)
print('sample_rate: ', sample_rate)
#print('digital_wave_length: ', digital_wave_length)
#print('digital_sample_rate: ', digital_sample_rate)
self.logger.log('channel_states: ' + str(channel_states))
self.logger.log('channel_volt_division: ' + str(channel_volt_division))
self.logger.log('channel_offset: ' + str(channel_offset))
#self.logger.log('digital_states: ' + str(digital_states))
self.logger.log('horizontal_list: ' + str(horizontal_list))
self.logger.log('wave_length: ' + str(wave_length))
self.logger.log('sample_rate: ' + str(sample_rate))
#print('digital_wave_length: ' + str(digital_wave_length))
#print('digital_sample_rate: ' + str(digital_sample_rate))

block_length = (1000000 if len(data) >= 14e6 or wave_length >= 1E6 else wave_length)
block_number = int(wave_length // block_length)
Expand Down
80 changes: 51 additions & 29 deletions src/decode.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# Parameter: decode.py <input_file_or_folder> <output_folder>
# Example: decode.py ../test/samples ./

from multiprocessing import Process, Manager
import time
import os as os
from bin import bin
Expand All @@ -13,9 +14,11 @@
import json
import statistics
import argparse
from logger import Logger

start_time = 0
last_time = 0
logger = Logger()


def calculate_error(current_pulse_length, single_pulse_length, pulse_count):
Expand Down Expand Up @@ -60,9 +63,9 @@ def write_output(path, data):
return name


def time_round():
def time_round(logger):
global last_time
print("Elapsed time:", round((time.perf_counter() - last_time)*1000), "ms")
logger.log("Elapsed time: " + str(round((time.perf_counter() - last_time)*1000)) + "ms")
last_time = time.perf_counter()


Expand All @@ -75,47 +78,47 @@ def time_start():

def time_overall():
global start_time
print("Overall elapsed time:", round((time.perf_counter() - start_time) * 1000), "ms")
print("Overall elapsed time: " + str(round((time.perf_counter() - start_time) * 1000)) + "ms")


def calculate(file):
print("# # # # # # # # # # # # # # # # # # # # # #")
print("Reading and converting binary data", file, "...")
raw_data = bin(file).convert()[0] #doesnt work for multiple channels, have to test if channel 2, 3 and 4 works. only testet first i think
def calculate(file, logger):
logger.log("# # # # # # # # # # # # # # # # # # # # # #")
logger.log("Reading and converting binary data " + file + "...")
raw_data = bin(file, logger).convert()[0] #doesnt work for multiple channels, have to test if channel 2, 3 and 4 works. only testet first i think
x, y = reformat_raw_data(raw_data)
global ui_raw_data
ui_raw_data = [x, y]
time_round()
time_round(logger)

print("Calculating threshold...")
logger.log("Calculating threshold...")
threshold = max(y) / 2
print("Threshold:", threshold, "Volt")
time_round()
logger.log("Threshold: " + str(threshold) + "Volt")
time_round(logger)

print("Correct time offset...")
logger.log("Correct time offset...")
y = [(True if i > threshold else False) for i in y]
time_round()
time_round(logger)

print("Removing doubled data points...")
logger.log("Removing doubled data points...")
x, y = remove_redundant_data_points(x, y)
time_round()
time_round(logger)

print("Shifting time value so it starts at 0...")
logger.log("Shifting time value so it starts at 0...")
x = [x[i] - min(x) for i in range(len(x))]
time_round()
time_round(logger)
global ui_cleaned_data
ui_cleaned_data = [x, y]

print("Getting shortest pulse length...")
logger.log("Getting shortest pulse length...")
single_pulse_length = min([abs(x[i] - x[i + 2]) for i in range(len(y) - 2)])
print("Shortest pulse length:", format(single_pulse_length, '.12f'))
time_round()
logger.log("Shortest pulse length: " + format(single_pulse_length, '.12f'))
time_round(logger)

print("Decoding data...")
logger.log("Decoding data...")
decoded_data, error = decode_normalized_data(x, y, single_pulse_length)
decoded_string_data = ''.join(str(val) for val in decoded_data)
print("Decoded data:", decoded_string_data)
time_round()
logger.log("Decoded data: " + decoded_string_data)
time_round(logger)

return {
"date": datetime.now().strftime("%d.%m.%Y %H:%M:%S"),
Expand Down Expand Up @@ -146,6 +149,10 @@ def getArguments():
return parser.parse_args()


def calculateAsnyc(file, return_dict, logger):
return_dict[os.path.basename(file)] = calculate(file, logger)


def main():
time_start()
args = getArguments()
Expand All @@ -156,16 +163,31 @@ def main():

#Get all files
file_paths = ([os.path.join(args.input, f) for f in os.listdir(args.input) if f.endswith('.bin')] if os.path.isdir(args.input) else [args.input])

result = {}
for file in file_paths:
result[os.path.basename(file)] = calculate(file)

print("# # # # # # # # # # # # # # # # # # # # # #")
print("Writing output file...")
# Turn off logging and run multiple threads when there is more then 1 file
if len(file_paths) > 1:
logger.off()
threads = []
tempResult = Manager().dict()
for file in file_paths:
t = Process(target=calculateAsnyc, args=(file, tempResult, logger))
t.start()
threads.append(t)
for thread in threads:
thread.join()
result.update(tempResult)
else:
for file in file_paths:
result[os.path.basename(file)] = calculate(file, logger)

logger.log("# # # # # # # # # # # # # # # # # # # # # #")
logger.log("Writing output file...")
path = write_output(args.output if args.output else "./", result)
print("Written file:", path)
logger.log("Written file: " + path)
time_overall()
print("# # # # # # # # # # # # # # # # # # # # # #")
logger.log("# # # # # # # # # # # # # # # # # # # # # #")
if len(file_paths) is 1 and args.graph:
ui.showTwoGraphs(ui_raw_data, ui_cleaned_data)

Expand Down
14 changes: 14 additions & 0 deletions src/logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
class Logger:

def __init__(self):
self.loggerActive = True

def log(self, value):
if self.loggerActive:
print(value)

def on(self):
self.loggerActive = True

def off(self):
self.loggerActive = False
2 changes: 1 addition & 1 deletion test/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
mainPath = args.script if args.script else mainPath
inputPath = os.path.join(basePath, 'test\samples')
outputPath = os.path.join(basePath, 'test\output')
startCommand = ' '.join([mainPath, "-i=" + inputPath, "-o=" + outputPath])
startCommand = ' '.join([mainPath, '-i="' + inputPath + '"', '-o="' + outputPath + '"'])

#webfetchMainPath = os.path.join(basePath, 'src\webfetch.py')
#webfetchOutputPath = os.path.join(basePath, 'test\webfetch_output')
Expand Down