Added Investigate analyzer

This commit is contained in:
Matt Foley 2018-07-20 11:37:33 -07:00
parent 3c94a6292f
commit 25b8904551
8 changed files with 247 additions and 0 deletions

View File

@ -0,0 +1,23 @@
{
"name": "Investigate_Categorization",
"version": "1.0",
"author": "Cisco Umbrella Research @opendns",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers/Investigate",
"license": "AGPL-V3",
"description": "Retrieve Investigate categorization and security features for a domain.",
"dataTypeList": ["domain", "fqdn"],
"command": "Investigate/investigate_analyzer.py",
"baseConfig": "Investigate",
"config": {
"service": "categorization"
},
"configurationItems": [
{
"name": "key",
"description": "Define the Investigate API Key",
"type": "string",
"multi": false,
"required": true
}
]
}

View File

@ -0,0 +1,23 @@
{
"name": "Investigate_Sample",
"version": "1.0",
"author": "Cisco Umbrella Research @opendns",
"url": "https://github.com/TheHive-Project/Cortex-Analyzers/Investigate",
"license": "AGPL-V3",
"description": "Retrieve sample data from Investigate for a hash. (Sample data provided by ThreatGrid)",
"dataTypeList": ["hash"],
"command": "Investigate/investigate_analyzer.py",
"baseConfig": "Investigate",
"config": {
"service": "sample"
},
"configurationItems": [
{
"name": "key",
"description": "Define the Investigate API Key",
"type": "string",
"multi": false,
"required": true
}
]
}

View File

@ -0,0 +1,108 @@
#!/usr/bin/env python3
# encoding: utf-8
from cortexutils.analyzer import Analyzer
from investigate import Investigate
from requests.exceptions import HTTPError
class InvestigateAnalyzer(Analyzer):
STATUS_MAP = {-1: 'Blocked', 0: 'Unknown', 1: 'Whitelisted'}
def __init__(self):
Analyzer.__init__(self)
self.service = self.get_param(
'config.service', None, 'Service parameter is missing')
def investigate(self, data):
api = Investigate(self.get_param('config.key'))
if self.service == 'categorization' and self.data_type in ['domain', 'fqdn']:
response = api.categorization(data, labels=True)[data]
response['name'] = data
if self.service == 'sample' and self.data_type == 'hash':
response = api.sample(data)
return response
def summary(self, raw):
taxonomies = []
namespace = 'Investigate'
#Summary for domain categorization report
if self.service == 'categorization':
#Generate taxonomy of the domains current blocklist status
predicate = 'Status'
if 'status' not in raw:
status = 0
else:
status = raw['status']
if status == -1:
level = 'malicious'
elif status == 0:
level = 'suspicious'
elif status == 1:
level = 'safe'
taxonomies.append(self.build_taxonomy(level, namespace, predicate,
'{}'.format(self.STATUS_MAP[status])))
#Generate taxonomy of the security categories associated with the domain
level = 'info'
predicate = 'Security Categories'
if 'security_categories' not in raw:
security_categories = []
else:
security_categories = raw['security_categories']
display_str = ', '.join(security_categories) if security_categories else 'None'
taxonomies.append(self.build_taxonomy(level, namespace, predicate,
'{}'.format(display_str)))
#Generate taxonomy of the content categories associated with the domain
predicate = 'Content Categories'
if 'content_categories' not in raw:
content_categories = []
else:
content_categories = raw['content_categories']
display_str = ', '.join(content_categories) if content_categories else 'None'
taxonomies.append(self.build_taxonomy(level, namespace, predicate,
'{}'.format(display_str)))
#Summary for file hash lookup in sample database
if self.service == 'sample':
predicate = 'ThreatScore'
if 'error' in raw:
level = 'info'
message = 'Hash not found'
else:
if raw['threatScore'] < 50:
level = 'safe'
elif raw['threatScore'] >= 50 and raw['threatScore'] < 80:
level = 'suspicious'
else:
level = 'malicious'
message = '{}'.format(raw['threatScore'])
taxonomies.append(self.build_taxonomy(level, namespace, predicate, message))
return {'taxonomies': taxonomies}
def run(self):
data = self.get_data()
try:
r = self.investigate(data)
self.report(r)
except HTTPError:
self.error('An HTTP Error occurred. Check API key.')
except Exception as e:
self.unexpectedError(e)
else:
self.error('Unknown Investigate service or invalid data type')
if __name__ == '__main__':
InvestigateAnalyzer().run()

View File

@ -0,0 +1,3 @@
investigate
cortexutils
requests

View File

@ -0,0 +1,42 @@
<!-- Success -->
<div class="panel panel-primary" ng-if="success">
<div class="panel-heading">
Investigate Categorization for <strong>{{artifact.data}}</strong>
</div>
<div class="panel-body">
<dl class="dl-horizontal">
<dt>Categorization</dt>
<dd>
<span class="label" ng-class="{'-1': 'label-danger', '0': 'label-warning', '1': 'label-success'}[content.status]">
{{ {'-1': 'Blocked', '0': 'Unknown', '1': 'Whitelisted'}[content.status] }}
</span>
</dd>
<dt>Security Categories</dt>
<dd>
<div ng-repeat="c in content.security_categories">
{{c}}
</div>
</dd>
<dt>Content Categories</dt>
<dd>
<div ng-repeat="c in content.content_categories">
{{c}}
</div>
</dd>
</dl>
</div>
</div>
<!-- General error -->
<div class="panel panel-danger" ng-if="!success">
<div class="panel-heading">
<strong>{{(artifact.data || artifact.attachment.name) | fang}}</strong>
</div>
<div class="panel-body">
<dl class="dl-horizontal" ng-if="content.errorMessage">
<dt><i class="fa fa-warning"></i> Investigate Categorization: </dt>
<dd class="wrap">{{content.errorMessage}}</dd>
</dl>
</div>
</div>

View File

@ -0,0 +1,3 @@
<span class="label" ng-repeat="t in content.taxonomies" ng-class="{'info': 'label-info', 'safe': 'label-success', 'suspicious': 'label-warning', 'malicious':'label-danger'}[t.level]">
{{t.namespace}}:{{t.predicate}}="{{t.value}}"
</span>

View File

@ -0,0 +1,42 @@
<!-- Success -->
<div class="panel panel-primary" ng-if="success">
<div class="panel-heading">
Investigate Sample Lookup for <strong>{{artifact.data}}</strong>
</div>
<div class="panel-body">
<dl class="dl-horizontal" ng-if="content.error">
<dt>Hash not found</dt>
</dl>
<dl class="dl-horizontal" ng-if="!content.error">
<dt>ThreatScore</dt>
<dd>
<span class="label" ng-class="{'label-danger': content.threatScore >= 80,
'label-warning': content.threatScore >= 50 && content.threatScore <80,
'label-safe': content.threatScore < 50}">
{{content.threatScore}}
</span>
</dd>
<dt>Magic Type</dt>
<dd>
<div>
{{content.magicType}}
</div>
</dd>
</dl>
</div>
</div>
<!-- General error -->
<div class="panel panel-danger" ng-if="!success">
<div class="panel-heading">
<strong>{{artifact.data | fang}}</strong>
</div>
<div class="panel-body">
<dl class="dl-horizontal" ng-if="content.errorMessage">
<dt><i class="fa fa-warning"></i> Investigate Categorization: </dt>
<dd class="wrap">{{content.errorMessage}}</dd>
</dl>
</div>
</div>

View File

@ -0,0 +1,3 @@
<span class="label" ng-repeat="t in content.taxonomies" ng-class="{'info': 'label-info', 'safe': 'label-success', 'suspicious': 'label-warning', 'malicious':'label-danger'}[t.level]">
{{t.namespace}}:{{t.predicate}}="{{t.value}}"
</span>