Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion doc/sql/nightmare.sql
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ CREATE TABLE `crashes` (
`total_samples` int(11) NOT NULL,
`additional` mediumtext,
`crash_hash` varchar(48),
`status` int(1) DEFAULT 0
PRIMARY KEY (`crash_id`)
) ENGINE=InnoDB AUTO_INCREMENT=826 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
Expand Down Expand Up @@ -252,7 +253,12 @@ CREATE TABLE `triggers` (

LOCK TABLES `config` WRITE;
/*!40000 ALTER TABLE `config` DISABLE KEYS */;
INSERT INTO `config` VALUES (7,'SAMPLES_PATH','/home/joxean/Documentos/research/nightmare/results',NULL,NULL),(8,'TEMPLATES_PATH','/home/joxean/Documentos/research/nightmare/samples',NULL,NULL),(9,'NIGHTMARE_PATH','/home/joxean/Documentos/research/nightmare',NULL,NULL),(10,'QUEUE_HOST','localhost',NULL,NULL),(11,'QUEUE_PORT','11300',NULL,NULL),(12,'TEMPORARY_PATH','/tmp/',NULL,NULL);
INSERT INTO `config` VALUES
(1,'NIGHTMARE_PATH','/nightmare/',NULL,NULL),
(2,'WORKING_PATH','/nightmare/data/',NULL,NULL),
(3,'TEMPORARY_PATH','/tmp/',NULL,NULL),
(4,'QUEUE_HOST','localhost',NULL,NULL),
(5,'QUEUE_PORT','11300',NULL,NULL);
/*!40000 ALTER TABLE `config` ENABLE KEYS */;
UNLOCK TABLES;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
Expand Down
21 changes: 15 additions & 6 deletions fuzzers/generic_fuzzer.py
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import json
import base64
import tempfile
import zlib
import ConfigParser

from multiprocessing import Process, cpu_count
Expand Down Expand Up @@ -75,7 +76,6 @@ def read_configuration(self):
try:
self.tube_name = parser.get(self.section, 'tube')
except:
raise
raise Exception("No tube specified in the configuration file for section %s" % self.section)

try:
Expand Down Expand Up @@ -198,7 +198,7 @@ def launch_sample(self, buf):
os.system(self.pre_command)

crash = None
for i in range(0,3):
for i in range(0, 3):
try:
crash = self.launch_debugger(self.timeout, self.command, filename)
break
Expand All @@ -222,14 +222,23 @@ def fuzz(self):
value = self.q.stats_tube(self.tube_name)["current-jobs-ready"]
debug("Total of %d job(s) in queue" % value)
job = self.q.reserve()
buf, temp_file = json.loads(job.body)
buf = base64.b64decode(buf)
d = json.loads(job.body)
sample = zlib.decompress(base64.b64decode(d['sample']))
temp_file = d['temp_file']
template_hash = d['template_hash']

debug("Launching sample %s..." % os.path.basename(temp_file))
if self.launch_sample(buf):
if self.launch_sample(sample):
log("We have a crash, moving to %s queue..." % self.crash_tube)
crash = self.crash_info
d = {temp_file:self.crash_info}

d = {
"temp_file": temp_file,
"crash_info": crash,
"template_hash": template_hash,
"data": None
}

self.crash_q.put(json.dumps(d))
self.crash_info = None

Expand Down
135 changes: 85 additions & 50 deletions runtime/nfp_engine.py
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ def read_config(self):
log("Configuration value %s is %s" % (row.name, row.value))

# Create the corresponding directory if it doesn't exists
if not os.path.exists(self.config["SAMPLES_PATH"]):
os.makedirs(self.config["SAMPLES_PATH"])
if not os.path.exists(self.config["WORKING_PATH"]):
os.makedirs(self.config["WORKING_PATH"])

# In Linux, it's recommended to use /dev/shm for speed improvements
if not "TEMPORARY_PATH" in self.config:
Expand Down Expand Up @@ -92,10 +92,9 @@ def get_project_engines(self):
return res

def read_random_file(self, folder):
basepath = os.path.join(self.config["TEMPLATES_PATH"], folder)
files = os.listdir(basepath)
files = os.listdir(folder)
filename = random.choice(files)
return os.path.join(basepath, filename)
return os.path.join(folder, filename)

def get_command(self, cmd, filename, subfolder):
cmd = cmd.replace("%INPUT%", '"%s"' % filename)
Expand All @@ -108,15 +107,16 @@ def get_command(self, cmd, filename, subfolder):
return cmd, temp_file

def create_sample(self, pe):
subfolder = pe.subfolder
template_folder = os.path.join(self.config["WORKING_PATH"], pe.subfolder, "templates")
tube_prefix = pe.tube_prefix
command = pe.command
project_id = pe.project_id
mutation_engine_id = pe.mutation_engine_id

filename = self.read_random_file(subfolder)
filename = self.read_random_file(template_folder)
template_hash = os.path.basename(filename)
debug("Random template file %s" % filename)
cmd, temp_file = self.get_command(command, filename, subfolder)
cmd, temp_file = self.get_command(command, filename, template_folder)
log("Generating mutated file %s" % temp_file)
debug("*** Command: %s" % cmd)
os.system(cmd)
Expand All @@ -126,8 +126,14 @@ def create_sample(self, pe):
log("Putting it in queue and updating statistics...")
buf = file(temp_file, "rb").read()
q = get_queue(watch=False, name="%s-samples" % tube_prefix)
json_buf = json.dumps([base64.b64encode(buf), temp_file])
q.put(json_buf)

data = {
'sample': base64.b64encode(zlib.compress(buf)),
'temp_file': temp_file,
'template_hash': template_hash
}

q.put(json.dumps(data))
self.update_statistics(project_id, mutation_engine_id)
self.update_iteration(project_id)
except:
Expand Down Expand Up @@ -181,15 +187,13 @@ def update_statistics(self, project_id, mutation_engine_id):
where = "statistic_id = $id"
total = self.db.update("statistics", total=row.total+1, iteration=row.iteration+1, where=where, vars=vars)

def queue_is_full(self, prefix, maximum):
tube_name = "%s-samples" % prefix
def queue_is_full(self, tube_name, maximum):
q = get_queue(watch=True, name=tube_name)
value = q.stats_tube(tube_name)["current-jobs-ready"]
debug("Total of %d job(s) in queue" % value)
return value > maximum-1

def get_pending_elements(self, prefix, maximum):
tube_name = "%s-samples" % prefix
def get_pending_elements(self, tube_name, maximum):
q = get_queue(watch=True, name=tube_name)
value = q.stats_tube(tube_name)["current-jobs-ready"]
debug("Total of %d job(s) in queue" % value)
Expand All @@ -213,16 +217,16 @@ def remove_obsolete_files(self):
log("Error removing temporary file: %s" % str(sys.exc_info()[1]))
job.delete()

def calculate_crash_hash(self, data):
def calculate_crash_hash(self, crash_info):
crash_hash = []
if "additional" in data:
if "stack trace" in data["additional"]:
st = data["additional"]["stack trace"]
if "additional" in crash_info:
if "stack trace" in crash_info["additional"]:
st = crash_info["additional"]["stack trace"]
last = max(map(int, st.keys()))

# First element in the crash hash contains the last 3 nibbles
# of the $PC.
tmp = hex(data["pc"])
tmp = hex(crash_info["pc"])
crash_hash = [tmp[len(tmp)-3:]]

# Next elements, will be the last 3 nibbles of each address in
Expand Down Expand Up @@ -261,17 +265,21 @@ def should_store_crash(self, project_id, crash_hash):
return False
return True

def insert_crash(self, project_id, temp_file, data):
has_file = "has_file" in data
crash_path = os.path.join(self.config["SAMPLES_PATH"], "crashes")
if not os.path.exists(temp_file) and not has_file:
def insert_crash(self, project_id, subfolder, d):
samples_path = os.path.join(self.config["WORKING_PATH"], subfolder, "samples")
temp_file = d['temp_file']
crash_info = d['crash_info']
template_hash = d['template_hash']
data = d['data']

if data is None and not os.path.exists(temp_file):
log("Test case file %s does not exists!!!!" % temp_file)
return False
elif has_file:
elif data is not None:
# There is no file path but, rather, a whole zlib compressed file
# encoded in base64 so, create a temporary file and write to it
# the decoded base64 and decompressed zlib stream of data.
buf = temp_file
buf = data
temp_file = tempfile.mktemp(dir=self.config["TEMPORARY_PATH"])

try:
Expand All @@ -281,11 +289,12 @@ def insert_crash(self, project_id, temp_file, data):
os.remove(temp_file)
raise

buf = open(temp_file, "rb").read()
file_hash = sha1(buf).hexdigest()
new_path = os.path.join(crash_path, file_hash)
with open(temp_file, "rb") as f:
buf = f.read()

sample_id = self.db.insert("samples", sample_hash=file_hash)
file_hash = sha1(buf).hexdigest()
new_path = os.path.join(samples_path, file_hash)
sample_id = self.db.insert("samples", sample_hash=file_hash, template_hash=template_hash)

what = "count(*) cnt"
vars = {"id":project_id}
Expand All @@ -294,35 +303,39 @@ def insert_crash(self, project_id, temp_file, data):
row = res[0]
total = row.cnt

crash_hash = self.calculate_crash_hash(data)
crash_hash = self.calculate_crash_hash(crash_info)
store_crash = self.should_store_crash(project_id, crash_hash)

if store_crash:
log("Saving test file %s" % new_path)
shutil.move(temp_file, new_path)
shutil.copy(temp_file, new_path)

if os.path.exists(temp_file + ".diff"):
shutil.move(temp_file + ".diff", new_path + ".diff")
shutil.copy(temp_file + ".diff", new_path + ".diff")

with self.db.transaction():
log("Inserting crash $PC 0x%08x Signal %s Exploitability %s Hash %s" % (data["pc"], data["signal"], data["exploitable"], crash_hash))
if data["disasm"] is not None:
disasm = "%08x %s" % (data["disasm"][0], data["disasm"][1])
log("Inserting crash $PC 0x%08x Signal %s Exploitability %s Hash %s" %
(crash_info["pc"], crash_info["signal"], crash_info["exploitable"], crash_hash))
if crash_info["disasm"] is not None:
disasm = "%08x %s" % (crash_info["disasm"][0], crash_info["disasm"][1])
else:
disasm = "None"

additional_info = json.dumps(data["additional"])
additional_info = json.dumps(crash_info["additional"])
if store_crash:
self.db.insert("crashes", project_id=project_id, sample_id=sample_id,
program_counter=data["pc"], crash_signal=data["signal"],
exploitability=data["exploitable"],
disassembly=disasm, total_samples=total,
additional = str(additional_info),
crash_hash = crash_hash)
program_counter=crash_info["pc"], crash_signal=crash_info["signal"],
exploitability=crash_info["exploitable"],
disassembly=disasm, total_samples=total,
additional=str(additional_info),
crash_hash=crash_hash, status=0)
log("Crash stored")
else:
log("Ignoring and removing already existing crash with hash %s" % crash_hash)
os.remove(temp_file)
if os.path.isfile(temp_file):
os.remove(temp_file)
if os.path.isfile(temp_file + ".diff"):
os.remove(temp_file + ".diff")

self.reset_iteration(project_id)

Expand All @@ -331,24 +344,45 @@ def reset_iteration(self, project_id):
where = "project_id = $project_id and mutation_engine_id = -1"
self.db.update("statistics", iteration=0, where=where, vars=vars)

def add_templates(self):
what = "project_id, name, subfolder"
res = self.db.select("projects", what=what, where="enabled = 1")

for row in res:
project_folder = os.path.join(self.config["WORKING_PATH"], row['subfolder'])
input_folder = os.path.join(project_folder, "input")

for i in os.listdir(input_folder):
i_file = os.path.join(input_folder, i)
with open(i_file, "rb") as f:
buf = f.read()
file_hash = sha1(buf).hexdigest()
template = os.path.join(project_folder, "templates", file_hash)

if not os.path.isfile(template):
log("Adding sample %s to project %s" % (file_hash, row['name']))
os.rename(i_file, template)
else:
os.remove(i_file)

def find_crashes(self):
what = "project_id, tube_prefix"
what = "project_id, subfolder, tube_prefix"
res = self.db.select("projects", what=what, where="enabled = 1")

for row in res:
tube_name = "%s-crash" % row.tube_prefix
q = get_queue(watch=True, name=tube_name)
while q.stats_tube(tube_name)["current-jobs-ready"] > 0:
job = q.reserve()
crash_info = json.loads(job.body)
temp_file = crash_info.keys()[0]
crash_data = crash_info.values()[0]
self.insert_crash(row.project_id, temp_file, crash_data)
d = json.loads(job.body)
self.insert_crash(row.project_id, row.subfolder, d)
job.delete()

def generate(self):
log("Starting generator...")
while 1:
debug("Add templates...")
self.add_templates()
debug("Finding crashes...")
self.find_crashes()
debug("Checking files to remove...")
Expand All @@ -359,10 +393,11 @@ def generate(self):

for pe in project_engines:
tube_prefix = pe.tube_prefix
tube_name = "%s-samples" % tube_prefix
maximum = pe.maximum_samples
if not self.queue_is_full(tube_prefix, maximum):
for i in range(self.get_pending_elements(tube_prefix, maximum)):
if self.queue_is_full(tube_prefix, maximum):
if not self.queue_is_full(tube_name, maximum):
for i in range(self.get_pending_elements(tube_name, maximum)):
if self.queue_is_full(tube_name, maximum):
break

line = "Creating sample for %s from folder %s for tube %s mutator %s"
Expand Down
Loading