这是indexloc提供的服务,不要输入任何密码
Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
81f902c
added system version as standard field and dynamic generation in yara…
dorpvom Nov 14, 2018
d072032
fixed views accordingly
dorpvom Nov 14, 2018
f841224
removed caching + refactoring
jstucke Nov 14, 2018
82ab384
Merge branch '156-replace-analysis-caching' of https://github.com/fki…
jstucke Nov 14, 2018
21ac4bb
up to date analysis skipping WIP
jstucke Nov 14, 2018
bf3ec00
added smart scheduling
jstucke Nov 28, 2018
9245742
Merge branch 'master' into 156-replace-analysis-caching
jstucke Nov 28, 2018
11332eb
added desanitization + refactoring
jstucke Nov 28, 2018
00df108
test DB-Mock bugfix
jstucke Dec 5, 2018
8d38b94
removed now redundant plugin analysis history artefacts
jstucke Dec 5, 2018
4b34947
added test for unknown plugin
jstucke Dec 5, 2018
edbc3ca
Merge branch 'master' into 156-replace-analysis-caching
jstucke Dec 5, 2018
7dac323
spy bugfix
jstucke Dec 12, 2018
69e9f93
added dependency scheduling, removed pluginBase rescheduling
jstucke Dec 13, 2018
20ec598
update task bugfix + blacklist bugfix + added tests
jstucke Dec 14, 2018
821101f
sped up tests
jstucke Dec 14, 2018
800979f
Merge branch 'master' into 156-replace-analysis-caching
jstucke Jan 2, 2019
7ec6ae7
missing version bugfix
jstucke Jan 3, 2019
77f085c
Merge branch 'master' of https://github.com/fkie-cad/FACT_core into 1…
dorpvom Jan 11, 2019
007660a
requested review change
jstucke Jan 11, 2019
dc17564
type fix
jstucke Jan 11, 2019
99bb01d
live review fixes
dorpvom Jan 16, 2019
2429224
fixed tests
jstucke Jan 16, 2019
1ef99b6
added new test + refactored mock
jstucke Jan 16, 2019
ce38e73
added missing test
jstucke Jan 16, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 21 additions & 34 deletions src/analysis/PluginBase.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
from queue import Empty
from time import time

from helperFunctions.dependency import get_unmatched_dependencies, schedule_dependencies
from helperFunctions.parsing import bcolors
from helperFunctions.process import ExceptionSafeProcess, terminate_process_and_childs
from helperFunctions.tag import TagColor
from objects.file import FileObject
from plugins.base import BasePlugin


Expand All @@ -16,12 +16,12 @@ class AnalysisBasePlugin(BasePlugin): # pylint: disable=too-many-instance-attri
recursive flag: If True (default) recursively analyze included files
'''
VERSION = 'not set'
SYSTEM_VERSION = None

timeout = None

def __init__(self, plugin_administrator, config=None, recursive=True, no_multithread=False, timeout=300, offline_testing=False, plugin_path=None): # pylint: disable=too-many-arguments
super().__init__(plugin_administrator, config=config, plugin_path=plugin_path)
self.history = set()
self.check_config(no_multithread)
self.recursive = recursive
self.in_queue = Queue()
Expand All @@ -35,35 +35,22 @@ def __init__(self, plugin_administrator, config=None, recursive=True, no_multith
if not offline_testing:
self.start_worker()

def add_job(self, fw_object):
if self._job_is_already_done(fw_object):
logging.debug('{} analysis already done -> skip: {}\n Analysis Dependencies: {}'.format(
self.NAME, fw_object.get_uid(), fw_object.analysis_dependency))
elif self._recursive_condition_is_set(fw_object):
if self._dependencies_are_fulfilled(fw_object):
self.history.add(fw_object.get_uid())
self.in_queue.put(fw_object)
return
self._reschedule_job(fw_object)
def add_job(self, fw_object: FileObject):
if self._dependencies_are_unfulfilled(fw_object):
logging.error('{}: dependencies of plugin {} not fulfilled'.format(fw_object.get_uid(), self.NAME))
elif self._analysis_depth_not_reached_yet(fw_object):
self.in_queue.put(fw_object)
return
self.out_queue.put(fw_object)

def _reschedule_job(self, fw_object):
unmatched_dependencies = get_unmatched_dependencies([fw_object], self.DEPENDENCIES)
logging.debug('{} rescheduled due to unmatched dependencies:\n {}'.format(fw_object.get_virtual_file_paths(), unmatched_dependencies))
fw_object.scheduled_analysis = schedule_dependencies(fw_object.scheduled_analysis, unmatched_dependencies, self.NAME)
fw_object.analysis_dependency = fw_object.analysis_dependency.union(set(unmatched_dependencies))
logging.debug('new schedule for {}:\n {}\nAnalysis Dependencies: {}'.format(
fw_object.get_virtual_file_paths(), fw_object.scheduled_analysis, fw_object.analysis_dependency))
def _dependencies_are_unfulfilled(self, fw_object: FileObject):
# FIXME plugins can be in processed_analysis and could still be skipped, etc. -> need a way to verify that
# FIXME the analysis ran successfully
return any(dep not in fw_object.processed_analysis for dep in self.DEPENDENCIES)

def _job_is_already_done(self, fw_object):
return (fw_object.get_uid() in self.history) and (self.NAME not in fw_object.analysis_dependency)

def _recursive_condition_is_set(self, fo):
def _analysis_depth_not_reached_yet(self, fo):
return self.recursive or fo.depth == 0

def _dependencies_are_fulfilled(self, fo):
return get_unmatched_dependencies([fo], self.DEPENDENCIES) == []

def process_object(self, file_object): # pylint: disable=no-self-use
'''
This function must be implemented by the plugin
Expand Down Expand Up @@ -96,6 +83,8 @@ def shutdown(self):
self.in_queue.close()
self.out_queue.close()

# ---- internal functions ----

def add_analysis_tag(self, file_object, tag_name, value, color=TagColor.LIGHT_BLUE, propagate=False):
new_tag = {
tag_name: {
Expand All @@ -110,18 +99,16 @@ def add_analysis_tag(self, file_object, tag_name, value, color=TagColor.LIGHT_BL
else:
file_object.processed_analysis[self.NAME]['tags'].update(new_tag)

# ---- internal functions ----

def init_dict(self):
results = {}
results['analysis_date'] = time()
results['plugin_version'] = self.VERSION
return results
result_update = {'analysis_date': time(), 'plugin_version': self.VERSION}
if self.SYSTEM_VERSION:
result_update.update({'system_version': self.SYSTEM_VERSION})
return result_update

def check_config(self, no_multihread):
def check_config(self, no_multithread):
if self.NAME not in self.config:
self.config.add_section(self.NAME)
if 'threads' not in self.config[self.NAME] or no_multihread:
if 'threads' not in self.config[self.NAME] or no_multithread:
self.config.set(self.NAME, 'threads', '1')

def start_worker(self):
Expand Down
17 changes: 13 additions & 4 deletions src/analysis/YaraPluginBase.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import re
import os
import subprocess
from pathlib import Path

from analysis.PluginBase import AnalysisBasePlugin
from helperFunctions.fileSystem import get_src_dir
Expand All @@ -12,9 +13,9 @@ class YaraBasePlugin(AnalysisBasePlugin):
'''
This should be the base for all YARA based analysis plugins
'''
NAME = "Yara_Base_Plugin"
DESCRIPTION = "this is a Yara plugin"
VERSION = "0.0"
NAME = 'Yara_Base_Plugin'
DESCRIPTION = 'this is a Yara plugin'
VERSION = '0.0'

def __init__(self, plugin_administrator, config=None, recursive=True, plugin_path=None):
'''
Expand All @@ -23,8 +24,16 @@ def __init__(self, plugin_administrator, config=None, recursive=True, plugin_pat
'''
self.config = config
self._get_signature_file(plugin_path)
self.SYSTEM_VERSION = self.get_yara_system_version()
super().__init__(plugin_administrator, config=config, recursive=recursive, plugin_path=plugin_path)

def get_yara_system_version(self):
with subprocess.Popen(['yara', '--version'], stdout=subprocess.PIPE) as process:
yara_version = process.stdout.readline().decode().strip()

access_time = int(Path(self.signature_path).stat().st_mtime)
return '{}_{}'.format(yara_version, access_time)

def process_object(self, file_object):
if self.signature_path is not None:
with subprocess.Popen('yara --print-meta --print-strings {} {}'.format(self.signature_path, file_object.file_path), shell=True, stdout=subprocess.PIPE) as process:
Expand Down Expand Up @@ -101,7 +110,7 @@ def _parse_meta_data(meta_data_string):
for item in meta_data_string.split(','):
if '=' in item:
key, value = item.split('=', maxsplit=1)
value = json.loads(value) if value in ['true', 'false'] else value.strip('\"')
value = json.loads(value) if value in ['true', 'false'] else value.strip('"')
meta_data[key] = value
else:
logging.warning('Malformed meta string \'{}\''.format(meta_data_string))
Expand Down
7 changes: 0 additions & 7 deletions src/helperFunctions/dependency.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,3 @@
def schedule_dependencies(schedule_list, dependency_list, myself):
for item in dependency_list:
if item not in schedule_list:
schedule_list.append(item)
return [myself] + schedule_list


def get_unmatched_dependencies(fo_list, dependency_list):
missing_dependencies = []
for dependency in dependency_list:
Expand Down
8 changes: 8 additions & 0 deletions src/helperFunctions/merge_generators.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
from itertools import zip_longest
from copy import deepcopy
from random import sample, seed


seed()


def merge_generators(*generators):
Expand Down Expand Up @@ -70,3 +74,7 @@ def avg(l):
if len(l) == 0:
return 0
return sum(l) / len(l)


def shuffled(sequence):
return sample(sequence, len(sequence))
1 change: 0 additions & 1 deletion src/objects/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ def __init__(self, binary=None, file_name=None, file_path=None, scheduled_analys
self.depth = 0
self.processed_analysis = {}
self.scheduled_analysis = scheduled_analysis
self.analysis_dependency = set()
self.comments = []
self.parent_firmware_uids = set()
self.temporary_data = {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
{% block analysis_result_details %}

{% for key in firmware.processed_analysis[selected_analysis] %}
{% if key not in ['summary', 'plugin_version', 'analysis_date', 'tags', 'skipped'] %}
{% if key not in ['summary', 'plugin_version', 'system_version', 'analysis_date', 'tags', 'skipped'] %}
<tr>
<td class="active"> Description:</td>
<td class="active"> {{ key }} </td>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
{% block analysis_result_details %}

{% for key in firmware.processed_analysis[selected_analysis] %}
{% if key not in ['summary', 'plugin_version', 'analysis_date', 'skipped', 'tags'] %}
{% if key not in ['summary', 'plugin_version', 'analysis_date', 'skipped', 'system_version', 'tags'] %}
<tr>
<td class="{{ firmware.processed_analysis[selected_analysis][key]['score'] | vulnerability_class }}">{{ key }}</td>
<td> {{ firmware.processed_analysis[selected_analysis][key]['description'] }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
<col width="600">
</colgroup>
{% for key in firmware.processed_analysis[selected_analysis] %}
{% if key not in ['summary', 'plugin_version', 'analysis_date', 'tags', 'skipped'] %}
{% if key not in ['summary', 'system_version', 'plugin_version', 'analysis_date', 'tags', 'skipped'] %}
<tr>
<td class="active" rowspan=5> {{loop.index - 1}} </td>
<td class="active"> Software Name:</td>
Expand Down
Loading