diff --git a/src/helperFunctions/ccp_tresholds.py b/src/helperFunctions/ccp_tresholds.py
deleted file mode 100644
index 8f27acbc1..000000000
--- a/src/helperFunctions/ccp_tresholds.py
+++ /dev/null
@@ -1,99 +0,0 @@
-from collections import namedtuple
-
-ProcessValues = namedtuple('ProcessValues', ['slow', 'shigh', 'clow', 'chigh', 'glow', 'ghigh'])
-AnalysisResult = namedtuple('AnalysisResult', ['shannon', 'chi', 'gtest'])
-
-ENCRYPTED = {
- '1024': [
- ProcessValues(slow=0.9708, shigh=0.9710, clow=-1, chigh=0.9590, glow=-1, ghigh=0.0357),
- ProcessValues(slow=0.9711, shigh=0.9836, clow=-1, chigh=0.9629, glow=-1, ghigh=0.0357),
- ProcessValues(slow=0.9715, shigh=0.9836, clow=0.9629, chigh=0.9707, glow=-1, ghigh=0.0357),
- ProcessValues(slow=-1, shigh=0.9725, clow=0.9707, chigh=0.9746, glow=-1, ghigh=0.03186),
- ProcessValues(slow=0.9727, shigh=0.9836, clow=0.9707, chigh=0.9746, glow=-1, ghigh=0.0357),
- ProcessValues(slow=-1, shigh=0.9836, clow=0.9747, chigh=3000, glow=-1, ghigh=0.0317)
- ],
- '4096': [
- ProcessValues(slow=-1, shigh=1000, clow=-1, chigh=1000, glow=-1, ghigh=0.031334)
- ],
- '12288': [
- ProcessValues(slow=-1, shigh=100, clow=-1, chigh=500, glow=-1, ghigh=0.32314),
- ]
-}
-
-COMPRESSED = {
- '1024': [
- ProcessValues(slow=-1, shigh=0.9707, clow=-1, chigh=0.9590, glow=-1, ghigh=0.0357),
- ProcessValues(slow=-1, shigh=0.9710, clow=0.9590, chigh=0.9629, glow=-1, ghigh=0.0357),
- ProcessValues(slow=-1, shigh=0.9714, clow=0.9629, chigh=0.9707, glow=-1, ghigh=0.0357),
- ProcessValues(slow=-1, shigh=0.9725, clow=0.9629, chigh=0.9746, glow=0.031861, ghigh=0.0357),
- ProcessValues(slow=-1, shigh=0.9836, clow=0.9747, chigh=3000, glow=0.0317, ghigh=0.0357),
- ProcessValues(slow=-1, shigh=0.9836, clow=-1, chigh=3000, glow=0.00001, ghigh=0.0357),
- ProcessValues(slow=0.834875, shigh=300, clow=-1, chigh=0.68557, glow=0.0357, ghigh=0.2606),
- ProcessValues(slow=-1, shigh=0.75117, clow=0.68557, chigh=0.818381, glow=0.0357, ghigh=3000),
- ProcessValues(slow=0.75117, shigh=0.863614, clow=0.68557, chigh=3000, glow=0.0357, ghigh=3000),
- ],
- '4096': [
- ProcessValues(slow=-1, shigh=1000, clow=-1, chigh=1000, glow=0.031335, ghigh=0.032894),
- ProcessValues(slow=0.81297, shigh=1000, clow=-1, chigh=1000, glow=0.032894, ghigh=100),
- ProcessValues(slow=0.78172, shigh=0.81297, clow=0.95655, chigh=1000, glow=0.032894, ghigh=100),
- ProcessValues(slow=-1, shigh=0.78172, clow=0.95655, chigh=1000, glow=0.032894, ghigh=1.323416),
- ],
- '12288': [
- ProcessValues(slow=-1, shigh=100, clow=-1, chigh=500, glow=0.032315, ghigh=0.0328),
- ProcessValues(slow=0.895889, shigh=100, clow=-1, chigh=500, glow=0.0329, ghigh=500),
- ProcessValues(slow=0.824318, shigh=0.895888, clow=-1, chigh=0.995932, glow=0.0329, ghigh=500),
- ProcessValues(slow=0.816443, shigh=0.895888, clow=0.995933, chigh=500, glow=0.0329, ghigh=500),
- ]
-}
-
-PLAIN = {
- '1024': [
- ProcessValues(slow=0.9837, shigh=300, clow=-1, chigh=3000, glow=-1, ghigh=0),
- ProcessValues(slow=-1, shigh=0.834876, clow=-1, chigh=0.68557, glow=0.0358, ghigh=0.2606),
- ProcessValues(slow=-1, shigh=300, clow=-1, chigh=0.68557, glow=0.2606, ghigh=3000),
- ProcessValues(slow=-1, shigh=0.75117, clow=0.818381, chigh=3000, glow=0.0358, ghigh=3000),
- ProcessValues(slow=0.863614, shigh=300, clow=0.68557, chigh=3000, glow=0.0358, ghigh=3000), ],
- '4096': [
- ProcessValues(slow=-1, shigh=0.78172, clow=0.95655, chigh=1000, glow=1.323416, ghigh=100),
- ProcessValues(slow=-1, shigh=0.81297, clow=-1, chigh=0.95655, glow=0.032894, ghigh=100)
- ],
- '12288': [
- ProcessValues(slow=-1, shigh=0.816442, clow=-1, chigh=500, glow=0.0329, ghigh=500),
- ProcessValues(slow=-1, shigh=0.824317, clow=-1, chigh=0.995932, glow=0.0329, ghigh=500)
- ]
-}
-
-
-class BlockClass:
- encrypted = 'blue'
- compressed = 'red'
- plain = 'green'
- unknown = 'yellow'
-
-
-def new_categorization_from_features(block_features, blocksize):
- result = AnalysisResult(shannon=block_features['shannon'], chi=block_features['chi'],
- gtest=block_features['g-test'])
-
- if blocksize == 1024:
- if _is_in_class(ENCRYPTED[str(blocksize)], result):
- return BlockClass.encrypted
- if _is_in_class(COMPRESSED[str(blocksize)], result):
- return BlockClass.compressed
- if _is_in_class(PLAIN[str(blocksize)], result):
- return BlockClass.plain
- return BlockClass.unknown
- else:
- if _is_in_class(COMPRESSED[str(blocksize)], result):
- return BlockClass.compressed
- if _is_in_class(ENCRYPTED[str(blocksize)], result):
- return BlockClass.encrypted
- if _is_in_class(PLAIN[str(blocksize)], result):
- return BlockClass.plain
- return BlockClass.unknown
-
-
-def _is_in_class(class_array, result):
- return any(
- v.slow < result.shannon <= v.shigh and v.clow < result.chi <= v.chigh and v.glow < result.gtest <= v.ghigh
- for v in class_array)
diff --git a/src/helperFunctions/tag.py b/src/helperFunctions/tag.py
index efa73b951..fd7b7db13 100644
--- a/src/helperFunctions/tag.py
+++ b/src/helperFunctions/tag.py
@@ -1,8 +1,55 @@
class TagColor:
-
GRAY = 'default'
BLUE = 'primary'
GREEN = 'success'
LIGHT_BLUE = 'info'
ORANGE = 'warning'
RED = 'danger'
+
+
+def check_tags(file_object, analysis_name):
+ tags, root_uid = None, None
+ if analysis_name in file_object.processed_analysis and 'tags' in file_object.processed_analysis[analysis_name]:
+ try:
+ root_uid = file_object.processed_analysis[analysis_name]['tags'].pop('root_uid')
+ except (KeyError, AttributeError):
+ return dict(notags=True)
+ tags = file_object.processed_analysis[analysis_name]['tags']
+ return dict(notags=False, tags=tags, plugin=analysis_name, uid=root_uid) if root_uid else dict(notags=True)
+
+
+def add_tags_to_object(file_object, analysis_name):
+ if analysis_name in file_object.processed_analysis and 'tags' in file_object.processed_analysis[analysis_name]:
+ tags = file_object.processed_analysis[analysis_name]['tags']
+ file_object.analysis_tags[analysis_name] = tags
+ return file_object
+
+
+def update_tags(old_tags, plugin_name, tag_name, tag):
+ tag_is_stable, message = check_tag_integrity(tag)
+
+ if not tag_is_stable:
+ raise ValueError(message)
+
+ if plugin_name not in old_tags:
+ old_tags[plugin_name] = {tag_name: tag}
+
+ old_tags[plugin_name][tag_name] = tag
+
+ return old_tags
+
+
+def check_tag_integrity(tag):
+ if any(key not in tag for key in ['value', 'color', 'propagate']):
+ return False, 'missing key'
+
+ if tag['color'] not in [TagColor.GREEN, TagColor.GRAY, TagColor.BLUE, TagColor.LIGHT_BLUE, TagColor.ORANGE, TagColor.RED]:
+ return False, 'bad tag color'
+
+ if not isinstance(tag['value'], str):
+ return False, 'tag value has to be a string'
+
+ if tag['propagate'] not in [True, False]:
+ return False, 'tag propagate key has to be a boolean'
+
+ return True, 'empty'
diff --git a/src/objects/file.py b/src/objects/file.py
index b10a00d97..b101cccb6 100644
--- a/src/objects/file.py
+++ b/src/objects/file.py
@@ -26,6 +26,7 @@ def __init__(self, binary=None, file_name=None, file_path=None, scheduled_analys
self.comments = []
self.parent_firmware_uids = set()
self.temporary_data = {}
+ self.analysis_tags = {}
if binary is not None:
self.set_binary(binary)
else:
diff --git a/src/plugins/analysis/crypto_material/code/crypto_material.py b/src/plugins/analysis/crypto_material/code/crypto_material.py
index d931442f9..dc147d0c9 100644
--- a/src/plugins/analysis/crypto_material/code/crypto_material.py
+++ b/src/plugins/analysis/crypto_material/code/crypto_material.py
@@ -1,5 +1,6 @@
from analysis.YaraPluginBase import YaraBasePlugin
from helperFunctions.parsing import read_asn1_key, read_pkcs_cert, read_ssl_cert
+from helperFunctions.tag import TagColor
class AnalysisPlugin(YaraBasePlugin):
@@ -8,7 +9,7 @@ class AnalysisPlugin(YaraBasePlugin):
'''
NAME = 'crypto_material'
DESCRIPTION = 'detects crypto material like SSH keys and SSL certificates'
- VERSION = '0.5'
+ VERSION = '0.5.1'
FILE = __file__
STARTEND = ['PgpPublicKeyBlock', 'PgpPrivateKeyBlock', 'PgpPublicKeyBlock_GnuPG', 'genericPublicKey',
'SshRsaPrivateKeyBlock', 'SSLPrivateKey']
@@ -37,6 +38,7 @@ def process_object(self, file_object):
self.store_current_match_in_result(file_object=file_object, match=match, result=analysis_result, parsing_function=self.get_ssl_cert)
file_object.processed_analysis[self.NAME] = analysis_result
+ self._add_private_key_tag(file_object, analysis_result)
return file_object
def store_current_match_in_result(self, file_object, match, result, parsing_function):
@@ -98,3 +100,14 @@ def get_offset_pairs(strings=[]):
end_index = strings[index + 2][0] + len(strings[index + 2][2])
pairs.append((strings[index][0], end_index))
return pairs
+
+ def _add_private_key_tag(self, file_object, result):
+ if any('private' in key.lower() for key in result):
+ file_object.processed_analysis[self.NAME]['tags'] = {
+ 'private_key_inside': {
+ 'value': 'Private Key Found',
+ 'color': TagColor.ORANGE,
+ 'propagate': True,
+ },
+ 'root_uid': file_object.get_root_uid()
+ }
diff --git a/src/plugins/analysis/crypto_material/test/test_plugin_crypto_material.py b/src/plugins/analysis/crypto_material/test/test_plugin_crypto_material.py
index 9df9dd102..765de0496 100644
--- a/src/plugins/analysis/crypto_material/test/test_plugin_crypto_material.py
+++ b/src/plugins/analysis/crypto_material/test/test_plugin_crypto_material.py
@@ -22,16 +22,16 @@ def test_ssh_public(self):
self._rule_match('id_rsa.pub', 'SshRsaPublicKeyBlock')
def test_ssh_private(self):
- self._rule_match('id_rsa', 'SshRsaPrivateKeyBlock')
+ self._rule_match('id_rsa', 'SshRsaPrivateKeyBlock', expected_number_of_rules=2)
def test_PKCS8(self):
- self._rule_match('pkcs', 'Pkcs8PrivateKey')
+ self._rule_match('pkcs', 'Pkcs8PrivateKey', expected_number_of_rules=2)
def test_PKCS12(self):
self._rule_match('pkcs12', 'Pkcs12Certificate')
def test_SSL_key(self):
- self._rule_match('ssl.key', 'SSLPrivateKey')
+ self._rule_match('ssl.key', 'SSLPrivateKey', expected_number_of_rules=2)
def test_SSL_cert(self):
self._rule_match('ssl.crt', 'SSLCertificate')
diff --git a/src/plugins/analysis/crypto_material/view/crypto_material.html b/src/plugins/analysis/crypto_material/view/crypto_material.html
index 8e8cbf26f..4e93f527a 100644
--- a/src/plugins/analysis/crypto_material/view/crypto_material.html
+++ b/src/plugins/analysis/crypto_material/view/crypto_material.html
@@ -3,7 +3,7 @@
{% block analysis_result_details %}
{% for key in firmware.processed_analysis[selected_analysis] %}
- {% if key not in ['summary', 'plugin_version', 'analysis_date'] %}
+ {% if key not in ['summary', 'plugin_version', 'analysis_date', 'tags'] %}
| Description: |
{{ key }} |
diff --git a/src/scheduler/Analysis.py b/src/scheduler/Analysis.py
index 9578ee174..42b241efb 100644
--- a/src/scheduler/Analysis.py
+++ b/src/scheduler/Analysis.py
@@ -8,6 +8,7 @@
from helperFunctions.parsing import bcolors
from helperFunctions.plugin import import_plugins
from helperFunctions.process import ExceptionSafeProcess, terminate_process_and_childs
+from helperFunctions.tag import check_tags, add_tags_to_object
from storage.db_interface_backend import BackEndDbInterface
@@ -26,6 +27,7 @@ def __init__(self, config=None, post_analysis=None, db_interface=None):
self.load_plugins()
self.stop_condition = Value('i', 0)
self.process_queue = Queue()
+ self.tag_queue = Queue()
self.db_backend_service = db_interface if db_interface else BackEndDbInterface(config=config)
self.post_analysis = self.db_backend_service.add_object if post_analysis is None else post_analysis
self.start_scheduling_process()
@@ -46,6 +48,7 @@ def shutdown(self):
e.submit(self.analysis_plugins[plugin].shutdown)
if getattr(self.db_backend_service, 'shutdown', False):
self.db_backend_service.shutdown()
+ self.tag_queue.close()
self.process_queue.close()
logging.info('Analysis System offline')
@@ -157,6 +160,7 @@ def result_collector(self):
for plugin in self.analysis_plugins:
try:
fw = self.analysis_plugins[plugin].out_queue.get_nowait()
+ fw = self._handle_analysis_tags(fw, plugin)
except Empty:
pass
else:
@@ -165,6 +169,10 @@ def result_collector(self):
if nop:
sleep(int(self.config['ExpertSettings']['block_delay']))
+ def _handle_analysis_tags(self, fw, plugin):
+ self.tag_queue.put(check_tags(fw, plugin))
+ return add_tags_to_object(fw, plugin)
+
def check_further_process_or_complete(self, fw_object):
if not fw_object.scheduled_analysis:
logging.info('Analysis Completed:\n{}'.format(fw_object))
diff --git a/src/scheduler/analysis_tag.py b/src/scheduler/analysis_tag.py
new file mode 100644
index 000000000..2cf51c51f
--- /dev/null
+++ b/src/scheduler/analysis_tag.py
@@ -0,0 +1,49 @@
+import logging
+from queue import Empty
+from multiprocessing import Value
+from helperFunctions.process import ExceptionSafeProcess
+
+
+class TaggingDaemon:
+ def __init__(self, analysis_scheduler=None, db_interface=None):
+ self.parent = analysis_scheduler
+ self.config = self.parent.config
+ self.db_interface = db_interface if db_interface else self.parent.db_backend_service
+ self.stop_condition = Value('i', 0)
+
+ self.start_tagging_process()
+ logging.info('Tagging daemon online')
+
+ def shutdown(self):
+ self.stop_condition.value = 1
+ self.tagging_process.join()
+ logging.info('Tagging daemon offline')
+
+ def start_tagging_process(self):
+ self.tagging_process = ExceptionSafeProcess(target=self._analysis_tag_scheduler_main)
+ self.tagging_process.start()
+
+ def _analysis_tag_scheduler_main(self):
+ while self.stop_condition.value == 0:
+ self._fetch_next_tag()
+
+ def _fetch_next_tag(self):
+ try:
+ tags = self.parent.tag_queue.get(timeout=int(self.config['ExpertSettings']['block_delay']))
+ except Empty:
+ return
+
+ if not tags['notags']:
+ if self.db_interface.existence_quick_check(tags['uid']):
+ self._process_tags(tags)
+ else:
+ self.parent.tag_queue.put(tags)
+
+ def _process_tags(self, tags):
+ uid = tags['uid']
+ plugin_name = tags['plugin']
+ for tag_name, tag in tags['tags'].items():
+ if tag['propagate']:
+ # Tags should be deleted as well, how ?
+ self.db_interface.update_analysis_tags(uid=uid, plugin_name=plugin_name, tag_name=tag_name, tag=tag)
+ logging.debug('Tag {} set for plugin {} and uid {}'.format(tag_name, plugin_name, uid))
diff --git a/src/start_fact_backend.py b/src/start_fact_backend.py
index d1b5b8565..1962f931e 100755
--- a/src/start_fact_backend.py
+++ b/src/start_fact_backend.py
@@ -27,6 +27,7 @@
from scheduler.Analysis import AnalysisScheduler
from scheduler.Compare import CompareScheduler
from scheduler.Unpacking import UnpackingScheduler
+from scheduler.analysis_tag import TaggingDaemon
from statistic.work_load import WorkLoadStatistic
PROGRAM_NAME = 'FACT Backend'
@@ -46,6 +47,7 @@ def shutdown(signum, frame):
if __name__ == '__main__':
args, config = program_setup(PROGRAM_NAME, PROGRAM_DESCRIPTION)
analysis_service = AnalysisScheduler(config=config)
+ tagging_service = TaggingDaemon(analysis_scheduler=analysis_service)
unpacking_service = UnpackingScheduler(config=config, post_unpack=analysis_service.add_task, analysis_workload=analysis_service.get_scheduled_workload)
compare_service = CompareScheduler(config=config)
intercom = InterComBackEndBinding(config=config, analysis_service=analysis_service, compare_service=compare_service, unpacking_service=unpacking_service)
@@ -65,6 +67,7 @@ def shutdown(signum, frame):
intercom.shutdown()
compare_service.shutdown()
unpacking_service.shutdown()
+ tagging_service.shutdown()
analysis_service.shutdown()
if not args.testing:
complete_shutdown()
diff --git a/src/storage/db_interface_backend.py b/src/storage/db_interface_backend.py
index 29b88a622..5402d99b9 100644
--- a/src/storage/db_interface_backend.py
+++ b/src/storage/db_interface_backend.py
@@ -2,7 +2,10 @@
import sys
from time import time
+from pymongo.errors import PyMongoError
+
from helperFunctions.dataConversion import convert_str_to_time
+from helperFunctions.tag import update_tags
from objects.file import FileObject
from objects.firmware import Firmware
from storage.db_interface_common import MongoInterfaceCommon
@@ -47,6 +50,7 @@ def update_object(self, new_object=None, old_object=None):
'vendor': new_object.vendor,
'release_date': convert_str_to_time(new_object.release_date),
'tags': new_object.tags,
+ 'analysis_tags': new_object.analysis_tags,
'comments': new_object.comments}})
except Exception as e:
logging.error('Could not update firmware: {} - {}'.format(sys.exc_info()[0].__name__, e))
@@ -57,6 +61,7 @@ def update_object(self, new_object=None, old_object=None):
'$set': {'processed_analysis': old_pa,
'files_included': old_fi,
'virtual_file_path': old_vfp,
+ 'analysis_tags': new_object.analysis_tags,
'comments': new_object.comments,
'parent_firmware_uids': list(parent_firmware_uids)}})
except Exception as e:
@@ -99,6 +104,7 @@ def build_firmware_dict(self, firmware):
'vendor': firmware.vendor,
'release_date': convert_str_to_time(firmware.release_date),
'submission_date': time(),
+ 'analysis_tags': firmware.analysis_tags,
'tags': firmware.tags
}
if hasattr(firmware, 'comments'): # for backwards compatibility
@@ -137,6 +143,7 @@ def build_file_object_dict(self, file_object):
'processed_analysis': analysis,
'files_included': list(file_object.files_included),
'size': file_object.size,
+ 'analysis_tags': file_object.analysis_tags,
'parent_firmware_uids': list(file_object.parent_firmware_uids)
}
for attribute in ['comments']: # for backwards compatibility
@@ -153,3 +160,22 @@ def _convert_to_file_object(self, entry, analysis_filter=None):
file_object = super()._convert_to_file_object(entry, analysis_filter=None)
file_object.set_file_path(entry['file_path'])
return file_object
+
+ def update_analysis_tags(self, uid, plugin_name, tag_name, tag):
+ firmware_object = self.get_object(uid=uid, analysis_filter=[])
+ try:
+ tags = update_tags(firmware_object.analysis_tags, plugin_name, tag_name, tag)
+ except ValueError as value_error:
+ logging.error('Plugin {} tried setting a bad tag {}: {}'.format(plugin_name, tag_name, str(value_error)))
+ return None
+ except AttributeError:
+ logging.error('Firmware not in database yet: {}'.format(uid))
+ return None
+
+ if type(firmware_object) == Firmware:
+ try:
+ self.firmwares.update_one({'_id': uid}, {'$set': {'analysis_tags': tags}})
+ except (TypeError, ValueError, PyMongoError) as exception:
+ logging.error('Could not update firmware: {} - {}'.format(type(exception), str(exception)))
+ else:
+ logging.warning('Propagating tag only allowed for firmware. Given: {}')
diff --git a/src/storage/db_interface_common.py b/src/storage/db_interface_common.py
index fdb4b209f..64b32021d 100644
--- a/src/storage/db_interface_common.py
+++ b/src/storage/db_interface_common.py
@@ -110,10 +110,9 @@ def _convert_to_firmware(self, entry, analysis_filter=None):
firmware.processed_analysis = self.retrieve_analysis(entry['processed_analysis'], analysis_filter=analysis_filter)
firmware.files_included = set(entry['files_included'])
firmware.virtual_file_path = entry['virtual_file_path']
- if 'tags' in entry:
- firmware.tags = entry['tags']
- else:
- firmware.tags = dict()
+ firmware.tags = entry['tags'] if 'tags' in entry else dict()
+ firmware.analysis_tags = entry['analysis_tags'] if 'analysis_tags' in entry else dict()
+
if 'comments' in entry: # for backwards compatibility
firmware.comments = entry['comments']
return firmware
@@ -128,6 +127,8 @@ def _convert_to_file_object(self, entry, analysis_filter=None):
file_object.processed_analysis = self.retrieve_analysis(entry['processed_analysis'], analysis_filter=analysis_filter)
file_object.files_included = set(entry['files_included'])
file_object.parent_firmware_uids = set(entry['parent_firmware_uids'])
+ file_object.analysis_tags = entry['analysis_tags'] if 'analysis_tags' in entry else dict()
+
for attribute in ['comments']: # for backwards compatibility
if attribute in entry:
setattr(file_object, attribute, entry[attribute])
diff --git a/src/test/data/container/with_key.7z b/src/test/data/container/with_key.7z
new file mode 100644
index 000000000..c2bebf632
Binary files /dev/null and b/src/test/data/container/with_key.7z differ
diff --git a/src/test/integration/scheduler/test_cycle_with_tags.py b/src/test/integration/scheduler/test_cycle_with_tags.py
new file mode 100644
index 000000000..70fcf7a27
--- /dev/null
+++ b/src/test/integration/scheduler/test_cycle_with_tags.py
@@ -0,0 +1,71 @@
+import gc
+import unittest
+from multiprocessing import Event, Value
+from tempfile import TemporaryDirectory
+from time import sleep
+from unittest.mock import patch
+
+from helperFunctions.fileSystem import get_test_data_dir
+from objects.firmware import Firmware
+from scheduler.Analysis import AnalysisScheduler
+from scheduler.Unpacking import UnpackingScheduler
+from scheduler.analysis_tag import TaggingDaemon
+from storage.MongoMgr import MongoMgr
+from storage.db_interface_backend import BackEndDbInterface
+from test.common_helper import get_database_names
+from test.integration.common import initialize_config, MockFSOrganizer
+from test.unit.helperFunctions_setup_test_data import clean_test_database
+
+
+class TestTagPropagation(unittest.TestCase):
+
+ def setUp(self):
+ self._tmp_dir = TemporaryDirectory()
+ self._config = initialize_config(self._tmp_dir)
+ self.analysis_finished_event = Event()
+ self.uid_of_key_file = '530bf2f1203b789bfe054d3118ebd29a04013c587efd22235b3b9677cee21c0e_2048'
+
+ self._mongo_server = MongoMgr(config=self._config, auth=False)
+ self.backend_interface = BackEndDbInterface(config=self._config)
+
+ self._analysis_scheduler = AnalysisScheduler(config=self._config, post_analysis=self.count_analysis_finished_event)
+ self._tagging_scheduler = TaggingDaemon(analysis_scheduler=self._analysis_scheduler)
+ self._unpack_scheduler = UnpackingScheduler(config=self._config, post_unpack=self._analysis_scheduler.add_task)
+
+ def count_analysis_finished_event(self, fw_object):
+ self.backend_interface.add_object(fw_object)
+ if fw_object.uid == self.uid_of_key_file:
+ self.analysis_finished_event.set()
+
+ def _wait_for_empty_tag_queue(self):
+ while not self._analysis_scheduler.tag_queue.empty():
+ sleep(0.1)
+
+ def tearDown(self):
+ self._unpack_scheduler.shutdown()
+ self._tagging_scheduler.shutdown()
+ self._analysis_scheduler.shutdown()
+
+ clean_test_database(self._config, get_database_names(self._config))
+ self._mongo_server.shutdown()
+
+ self._tmp_dir.cleanup()
+ gc.collect()
+
+ def test_run_analysis_with_tag(self):
+ test_fw = Firmware(file_path='{}/container/with_key.7z'.format(get_test_data_dir()))
+ test_fw.release_date = '2017-01-01'
+ test_fw.scheduled_analysis = ['crypto_material']
+
+ self._unpack_scheduler.add_task(test_fw)
+
+ assert self.analysis_finished_event.wait(timeout=20)
+
+ processed_fo = self.backend_interface.get_object(self.uid_of_key_file, analysis_filter=['crypto_material'])
+ assert processed_fo.processed_analysis['crypto_material']['tags'], 'no tags set in analysis'
+
+ self._wait_for_empty_tag_queue()
+
+ processed_fw = self.backend_interface.get_object(test_fw.uid, analysis_filter=['crypto_material'])
+ assert processed_fw.analysis_tags, 'tags not propagated properly'
+ assert processed_fw.analysis_tags['crypto_material']['private_key_inside']
diff --git a/src/test/integration/storage/test_db_interface_backend.py b/src/test/integration/storage/test_db_interface_backend.py
index 9df514486..6bf8512a8 100644
--- a/src/test/integration/storage/test_db_interface_backend.py
+++ b/src/test/integration/storage/test_db_interface_backend.py
@@ -125,3 +125,34 @@ def test_add_and_get_object_including_comment(self):
self.assertEqual(author, retrieved_comment['author'])
self.assertEqual(comment, retrieved_comment['comment'])
self.assertEqual(date, retrieved_comment['time'])
+
+ def test_update_analysis_tag_no_firmware(self):
+ self.db_interface_backend.add_file_object(self.test_fo)
+ tag = {'value': 'yay', 'color': 'default', 'propagate': True}
+
+ self.db_interface_backend.update_analysis_tags(self.test_fo.uid, plugin_name='dummy', tag_name='some_tag', tag=tag)
+ processed_fo = self.db_interface_backend.get_object(self.test_fo.uid)
+
+ assert not processed_fo.analysis_tags
+
+ def test_update_analysis_tag_uid_not_found(self):
+ self.db_interface_backend.update_analysis_tags(self.test_fo.uid, plugin_name='dummy', tag_name='some_tag', tag='should not matter')
+ assert not self.db_interface_backend.get_object(self.test_fo.uid)
+
+ def test_update_analysis_tag_bad_tag(self):
+ self.db_interface_backend.add_firmware(self.test_firmware)
+
+ self.db_interface_backend.update_analysis_tags(self.test_firmware.uid, plugin_name='dummy', tag_name='some_tag', tag='bad_tag')
+ processed_firmware = self.db_interface_backend.get_object(self.test_firmware.uid)
+
+ assert not processed_firmware.analysis_tags
+
+ def test_update_analysis_tag_success(self):
+ self.db_interface_backend.add_firmware(self.test_firmware)
+ tag = {'value': 'yay', 'color': 'default', 'propagate': True}
+
+ self.db_interface_backend.update_analysis_tags(self.test_firmware.uid, plugin_name='dummy', tag_name='some_tag', tag=tag)
+ processed_firmware = self.db_interface_backend.get_object(self.test_firmware.uid)
+
+ assert processed_firmware.analysis_tags
+ assert processed_firmware.analysis_tags['dummy']['some_tag'] == tag
diff --git a/src/test/unit/helperFunctions/test_tag.py b/src/test/unit/helperFunctions/test_tag.py
new file mode 100644
index 000000000..4d4dc9c02
--- /dev/null
+++ b/src/test/unit/helperFunctions/test_tag.py
@@ -0,0 +1,91 @@
+from copy import deepcopy
+from unittest.mock import patch
+import pytest
+
+from helperFunctions.tag import TagColor, check_tags, add_tags_to_object, update_tags, check_tag_integrity
+from test.common_helper import TEST_TEXT_FILE
+
+
+@pytest.fixture(scope='function')
+def test_object():
+ return deepcopy(TEST_TEXT_FILE)
+
+
+@pytest.mark.parametrize('tag', [
+ dict(),
+ {'value': None, 'color': 'danger', 'propagate': True},
+ {'value': 12, 'color': 'danger', 'propagate': True},
+ {'value': 'good', 'color': None, 'propagate': True},
+ {'value': 'good', 'color': 12, 'propagate': True},
+ {'value': 'good', 'color': 'bad color', 'propagate': True},
+ {'value': 'good', 'color': 'danger', 'propagate': None},
+ {'value': 'good', 'color': 'danger', 'propagate': 12},
+])
+def test_check_tag_integrity_bad(tag):
+ status, message = check_tag_integrity(tag)
+ assert status is False
+
+
+def test_check_tag_integrity_good():
+ tag = {'value': 'good', 'color': 'danger', 'propagate': False}
+ status, message = check_tag_integrity(tag)
+ assert status is True
+
+
+def test_add_tags_to_object_unkown_analysis(test_object):
+ file_object = add_tags_to_object(test_object, 'any_analysis')
+ assert not file_object.analysis_tags
+
+
+def test_add_tags_to_object_success(test_object):
+ test_object.processed_analysis['some_analysis'] = {'tags': {'tag': 'any_tag'}}
+ file_object = add_tags_to_object(test_object, 'some_analysis')
+ assert 'some_analysis' in file_object.analysis_tags
+ assert file_object.analysis_tags['some_analysis'] == {'tag': 'any_tag'}
+
+
+def test_check_tags_no_analysis():
+ result = check_tags(TEST_TEXT_FILE, 'non_existing_analysis')
+ assert result['notags']
+
+
+def test_check_tags_no_tags():
+ result = check_tags(TEST_TEXT_FILE, 'dummy')
+ assert result['notags']
+
+
+@patch.object(TEST_TEXT_FILE, 'processed_analysis', {'mock_plugin': {'tags': {'some_stuff': 'anything'}}})
+def test_check_tags_missing_root_uid():
+ result = check_tags(TEST_TEXT_FILE, 'mock_plugin')
+ assert result['notags']
+
+
+@patch.object(TEST_TEXT_FILE, 'processed_analysis', {'mock_plugin': {'tags': None}})
+def test_check_tags_bad_type():
+ result = check_tags(TEST_TEXT_FILE, 'mock_plugin')
+ assert result['notags']
+
+
+@patch.object(TEST_TEXT_FILE, 'processed_analysis', {'mock_plugin': {'tags': {'some_stuff': 'anything', 'root_uid': 'abc_123'}}})
+def test_check_tags_found():
+ result = check_tags(TEST_TEXT_FILE, 'mock_plugin')
+ assert not result['notags']
+ assert result['tags'] == {'some_stuff': 'anything'}
+
+
+def test_update_tags_propagate_exception():
+ bad_tag = {'value': 'good', 'color': 'bad color', 'propagate': True}
+ with pytest.raises(ValueError):
+ update_tags(dict(), 'some_plugin', 'any_tag', bad_tag)
+
+
+def test_update_tags_new_plugin():
+ tag = {'value': 'good', 'color': 'danger', 'propagate': False}
+ result = update_tags(old_tags=dict(), plugin_name='some_plugin', tag_name='any_tag', tag=tag)
+ assert result['some_plugin']['any_tag'] == tag
+
+
+def test_update_tags_overwrite_tag():
+ tag = {'value': 'good', 'color': 'danger', 'propagate': False}
+ result = update_tags(old_tags=dict(some_plugin=dict(any_tag=dict())), plugin_name='some_plugin', tag_name='any_tag', tag=tag)
+ assert result['some_plugin']['any_tag'] == tag
diff --git a/src/test/unit/scheduler/test_analysis_tag.py b/src/test/unit/scheduler/test_analysis_tag.py
new file mode 100644
index 000000000..60af79d01
--- /dev/null
+++ b/src/test/unit/scheduler/test_analysis_tag.py
@@ -0,0 +1,91 @@
+from queue import Queue
+from time import sleep
+
+import pytest
+
+from helperFunctions.config import get_config_for_testing
+from scheduler.analysis_tag import TaggingDaemon
+from test.common_helper import DatabaseMock
+
+
+class MockProcess:
+ def __init__(self, **kwargs):
+ pass
+
+ def start(self):
+ pass
+
+ def join(self):
+ pass
+
+
+class MockAnalysisScheduler:
+ def __init__(self):
+ self.tag_queue = Queue()
+ self.config = get_config_for_testing()
+ self.db_backend_service = DatabaseMock(None)
+
+
+@pytest.fixture(scope='function')
+def analysis_service():
+ return MockAnalysisScheduler()
+
+
+@pytest.fixture(scope='function')
+def scheduler(analysis_service):
+ return TaggingDaemon(analysis_scheduler=analysis_service)
+
+
+@pytest.fixture(scope='function')
+def detached_scheduler(monkeypatch, analysis_service):
+ monkeypatch.setattr('scheduler.analysis_tag.ExceptionSafeProcess', MockProcess)
+ return TaggingDaemon(analysis_scheduler=analysis_service)
+
+
+def test_start_process(scheduler):
+ assert scheduler.tagging_process.is_alive()
+ scheduler.stop_condition.value = 1
+ sleep(int(scheduler.config['ExpertSettings']['block_delay']) * 2)
+ assert not scheduler.tagging_process.is_alive()
+
+
+def test_shutdown(detached_scheduler):
+ detached_scheduler.shutdown()
+ assert detached_scheduler.stop_condition.value == 1
+
+
+def test_fetch_tag(detached_scheduler):
+ detached_scheduler.parent.tag_queue.put({'notags': True})
+ assert not detached_scheduler.parent.tag_queue.empty()
+ detached_scheduler._fetch_next_tag()
+ assert detached_scheduler.parent.tag_queue.empty()
+
+
+def test_process_tags(detached_scheduler):
+ mock_queue = Queue()
+ setattr(detached_scheduler, '_process_tags', lambda tags: mock_queue.put(tags))
+ tags = {'notags': False, 'uid': 'error'}
+ detached_scheduler.parent.tag_queue.put(tags)
+ detached_scheduler._fetch_next_tag()
+ assert mock_queue.get(block=False) == tags
+
+
+def test_tag_is_put_back_if_uid_does_not_exist(detached_scheduler):
+ detached_scheduler.parent.tag_queue.put({'notags': False, 'uid': 'does_not_exist'})
+ assert not detached_scheduler.parent.tag_queue.empty()
+ detached_scheduler._fetch_next_tag()
+ assert not detached_scheduler.parent.tag_queue.empty()
+
+
+def test_update_tags(detached_scheduler):
+ mock_queue = Queue()
+ setattr(detached_scheduler.db_interface, 'update_analysis_tags', lambda uid, plugin_name, tag_name, tag: mock_queue.put((uid, plugin_name, tag_name, tag)))
+ tags = {'notags': False, 'uid': 'error', 'plugin': 'mock', 'tags': {'tag1': {'propagate': True}, 'tag2': {'propagate': False}}}
+ detached_scheduler.parent.tag_queue.put(tags)
+ detached_scheduler._fetch_next_tag()
+ assert mock_queue.get(block=False) == ('error', 'mock', 'tag1', {'propagate': True})
+ assert mock_queue.empty()
+
+
+def test_empty_queue_times_out(detached_scheduler):
+ assert not detached_scheduler._fetch_next_tag()
diff --git a/src/test/unit/web_interface/test_filter.py b/src/test/unit/web_interface/test_filter.py
index d67bac4c6..9c4a4dfd8 100644
--- a/src/test/unit/web_interface/test_filter.py
+++ b/src/test/unit/web_interface/test_filter.py
@@ -1,12 +1,13 @@
import unittest
-import pytest
from time import gmtime
-from web_interface.filter import replace_underscore_filter, byte_number_filter, get_all_uids_in_string, nice_list, uids_to_link, \
- list_to_line_break_string, nice_unix_time, nice_number_filter, sort_chart_list_by_value, \
+import pytest
+
+from web_interface.filter import replace_underscore_filter, byte_number_filter, get_all_uids_in_string, nice_list, \
+ uids_to_link, list_to_line_break_string, nice_unix_time, nice_number_filter, sort_chart_list_by_value, \
sort_chart_list_by_name, text_highlighter, generic_nice_representation, list_to_line_break_string_no_sort, \
encode_base64_filter, render_tags, fix_cwe, set_limit_for_data_to_chart, data_to_chart_with_value_percentage_pairs, \
- data_to_chart_limited, vulnerability_class
+ data_to_chart_limited, render_analysis_tags, vulnerability_class
class TestWebInterfaceFilter(unittest.TestCase):
@@ -158,6 +159,23 @@ def test_render_tags(tag_dict, output):
assert render_tags(tag_dict) == output
+def test_empty_analysis_tags():
+ assert render_analysis_tags(dict()) == ''
+
+
+def test_render_analysis_tags_success():
+ tags = {'such plugin': {'tag': {'color': 'very color', 'value': 'wow'}}}
+ output = render_analysis_tags(tags)
+ assert 'label-very color' in output
+ assert '>wow<' in output
+
+
+def test_render_analysis_tags_bad_type():
+ tags = {'such plugin': {42: {'color': 'very color', 'value': 'wow'}}}
+ with pytest.raises(AttributeError):
+ render_analysis_tags(tags)
+
+
@pytest.mark.parametrize('score_and_class', [('low', 'active'), ('medium', 'warning'), ('high', 'danger')])
def test_vulnerability_class_success(score_and_class):
assert vulnerability_class(score_and_class[0]) == score_and_class[1]
diff --git a/src/web_interface/components/jinja_filter.py b/src/web_interface/components/jinja_filter.py
index 31453b2ae..e79b36521 100644
--- a/src/web_interface/components/jinja_filter.py
+++ b/src/web_interface/components/jinja_filter.py
@@ -11,12 +11,13 @@
from helperFunctions.web_interface import ConnectTo
from intercom.front_end_binding import InterComFrontEndBinding
from storage.db_interface_frontend import FrontEndDbInterface
-from web_interface.filter import byte_number_filter, encode_base64_filter, bytes_to_str_filter, \
- replace_underscore_filter, nice_list, data_to_chart_limited, data_to_chart, uids_to_link, get_all_uids_in_string, \
- list_to_line_break_string, sort_comments, nice_unix_time, infection_color, nice_number_filter, \
- sort_chart_list_by_name, sort_chart_list_by_value, text_highlighter, get_canvas_height, \
- comment_out_regex_meta_chars, generic_nice_representation, list_to_line_break_string_no_sort, render_tags, \
- fix_cwe, data_to_chart_with_value_percentage_pairs, vulnerability_class
+from web_interface.filter import byte_number_filter, encode_base64_filter, \
+ bytes_to_str_filter, replace_underscore_filter, nice_list, data_to_chart_limited, data_to_chart, \
+ uids_to_link, get_all_uids_in_string, list_to_line_break_string, sort_comments, \
+ nice_unix_time, infection_color, nice_number_filter, sort_chart_list_by_name, sort_chart_list_by_value, \
+ text_highlighter, get_canvas_height, comment_out_regex_meta_chars, \
+ generic_nice_representation, list_to_line_break_string_no_sort, render_tags, fix_cwe, \
+ data_to_chart_with_value_percentage_pairs, render_analysis_tags, vulnerability_class
class FilterClass:
@@ -118,6 +119,7 @@ def _setup_filters(self):
self._app.jinja_env.filters['regex_meta'] = comment_out_regex_meta_chars
self._app.jinja_env.filters['nice_time'] = time_format
self._app.jinja_env.filters['render_tags'] = render_tags
+ self._app.jinja_env.filters['render_analysis_tags'] = render_analysis_tags
self._app.jinja_env.filters['fix_cwe'] = fix_cwe
self._app.jinja_env.filters['vulnerability_class'] = vulnerability_class
self._app.jinja_env.filters['auth_enabled'] = self.check_auth
diff --git a/src/web_interface/filter.py b/src/web_interface/filter.py
index 9860c1d44..dc1ee8fc9 100644
--- a/src/web_interface/filter.py
+++ b/src/web_interface/filter.py
@@ -246,13 +246,13 @@ def data_to_chart_with_value_percentage_pairs(data, limit=10, color_list=None):
label_list, value_list = set_limit_for_data_to_chart(label_list, limit, value_list)
color_list = set_color_list_for_data_to_chart(color_list, value_list)
result = {
- "labels": label_list,
- "datasets": [{
- "data": value_list,
- "percentage": percentage_list,
- "backgroundColor": color_list,
- "borderColor": color_list,
- "borderWidth": 1
+ 'labels': label_list,
+ 'datasets': [{
+ 'data': value_list,
+ 'percentage': percentage_list,
+ 'backgroundColor': color_list,
+ 'borderColor': color_list,
+ 'borderWidth': 1
}]
}
return result
@@ -267,7 +267,7 @@ def set_color_list_for_data_to_chart(color_list, value_list):
def set_limit_for_data_to_chart(label_list, limit, value_list):
if limit and len(label_list) > limit:
label_list = label_list[:limit]
- label_list.append("rest")
+ label_list.append('rest')
rest_sum = sum(value_list[limit:])
value_list = value_list[:limit]
value_list.append(rest_sum)
@@ -304,12 +304,23 @@ def render_tags(tag_dict, additional_class='', size=10):
return output
+def render_analysis_tags(tags, size=10):
+ output = ''
+ if tags:
+ for plugin_name in tags:
+ for key, tag in tags[plugin_name].items():
+ output += '{}\n'.format(
+ tag['color'], size, replace_underscore_filter(plugin_name), replace_underscore_filter(key), tag['value']
+ )
+ return output
+
+
def fix_cwe(s):
- if ("CWE" in s):
- return s.split("]")[0].split("E")[-1]
+ if 'CWE' in s:
+ return s.split(']')[0].split('E')[-1]
else:
- logging.warning("Expected a CWE string.")
- return ""
+ logging.warning('Expected a CWE string.')
+ return ''
def vulnerability_class(score):
diff --git a/src/web_interface/templates/analysis_plugins/generic.html b/src/web_interface/templates/analysis_plugins/generic.html
index dcedb09e1..893303c85 100644
--- a/src/web_interface/templates/analysis_plugins/generic.html
+++ b/src/web_interface/templates/analysis_plugins/generic.html
@@ -3,7 +3,7 @@
{% block analysis_result_details %}
{% for key in firmware.processed_analysis[selected_analysis]|sort %}
- {% if key not in ['summary', 'plugin_version', 'analysis_date'] %}
+ {% if key not in ['summary', 'plugin_version', 'analysis_date', 'tags'] %}
| {{key}} |
{{firmware.processed_analysis[selected_analysis][key] | nice_generic | safe}}
|
diff --git a/src/web_interface/templates/show_analysis.html b/src/web_interface/templates/show_analysis.html
index 8af61499c..ae9eb77b2 100644
--- a/src/web_interface/templates/show_analysis.html
+++ b/src/web_interface/templates/show_analysis.html
@@ -14,7 +14,8 @@
("file size", firmware.size|number_format(verbose=True), "", ""),
("MD5", firmware.md5, "word-break: break-all;", ""),
("SHA256", firmware.sha256, "word-break: break-all;", ""),
- ("Tags", firmware.tags|render_tags(size=12)|safe, "", "")
+ ("Tags", firmware.tags|render_tags(size=12)|safe, "", ""),
+ ("Analysis Tags", firmware.analysis_tags | render_analysis_tags(size=12) | safe, "", "")
] -%}
{% block head %}