2019-10-26 20:45:48 +00:00
|
|
|
# LimaCharlie backend for sigmac created by LimaCharlie.io
|
|
|
|
# Copyright 2019 Refraction Point, Inc
|
|
|
|
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Lesser General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Lesser General Public License for more details.
|
|
|
|
|
|
|
|
# You should have received a copy of the GNU Lesser General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
import re
|
|
|
|
import yaml
|
2019-10-28 15:49:05 +00:00
|
|
|
from collections import namedtuple
|
2019-10-26 20:45:48 +00:00
|
|
|
from .base import BaseBackend
|
|
|
|
from sigma.parser.modifiers.base import SigmaTypeModifier
|
|
|
|
from sigma.parser.modifiers.type import SigmaRegularExpressionModifier
|
|
|
|
|
2019-10-26 21:30:50 +00:00
|
|
|
# A few helper functions for cases where field mapping cannot be done
|
|
|
|
# as easily one by one, or can be done more efficiently.
|
|
|
|
def _windowsEventLogFieldName(fieldName):
|
|
|
|
if 'EventID' == fieldName:
|
|
|
|
return 'Event/System/EventID'
|
|
|
|
return 'Event/EventData/%s' % (fieldName,)
|
|
|
|
|
2019-10-26 20:45:48 +00:00
|
|
|
# We support many different log sources so we keep different mapping depending
|
|
|
|
# on the log source and category.
|
2019-10-26 21:30:50 +00:00
|
|
|
# The mapping key is product/category/service.
|
2019-10-28 15:49:05 +00:00
|
|
|
# The mapping value is tuple like:
|
2019-10-27 17:17:15 +00:00
|
|
|
# - top-level parameters
|
2019-10-26 21:30:50 +00:00
|
|
|
# - pre-condition is a D&R rule node filtering relevant events.
|
|
|
|
# - field mappings is a dict with a mapping or a callable to convert the field name.
|
|
|
|
# - isAllStringValues is a bool indicating whether all values should be converted to string.
|
2019-10-27 20:28:54 +00:00
|
|
|
# - isKeywordsSupported is a bool indicating if full-text keyword searches are supported.
|
2019-10-28 15:49:05 +00:00
|
|
|
SigmaLCConfig = namedtuple('SigmaLCConfig', [
|
|
|
|
'topLevelParams',
|
|
|
|
'preConditions',
|
|
|
|
'fieldMappings',
|
|
|
|
'isAllStringValues',
|
|
|
|
'isKeywordsSupported',
|
|
|
|
])
|
2019-10-26 20:45:48 +00:00
|
|
|
_allFieldMappings = {
|
2019-10-28 15:49:05 +00:00
|
|
|
"windows/process_creation/": SigmaLCConfig(
|
|
|
|
topLevelParams = {
|
|
|
|
"events": [
|
|
|
|
"NEW_PROCESS",
|
|
|
|
"EXISTING_PROCESS",
|
|
|
|
]
|
|
|
|
},
|
|
|
|
preConditions = {
|
|
|
|
"op": "is windows",
|
|
|
|
},
|
|
|
|
fieldMappings = {
|
|
|
|
"CommandLine": "event/COMMAND_LINE",
|
|
|
|
"Image": "event/FILE_PATH",
|
|
|
|
"ParentImage": "event/PARENT/FILE_PATH",
|
|
|
|
"ParentCommandLine": "event/PARENT/COMMAND_LINE",
|
|
|
|
"User": "event/USER_NAME",
|
|
|
|
# This field is redundant in LC, it seems to always be used with Image
|
|
|
|
# so we will ignore it.
|
|
|
|
"OriginalFileName": None,
|
|
|
|
# Custom field names coming from somewhere unknown.
|
|
|
|
"NewProcessName": "event/FILE_PATH",
|
|
|
|
"ProcessCommandLine": "event/COMMAND_LINE",
|
|
|
|
# Another one-off command line.
|
|
|
|
"Command": "event/COMMAND_LINE",
|
|
|
|
},
|
|
|
|
isAllStringValues = False,
|
|
|
|
isKeywordsSupported = False
|
|
|
|
),
|
|
|
|
"windows//": SigmaLCConfig(
|
|
|
|
topLevelParams = {
|
|
|
|
"target": "log",
|
|
|
|
"log type": "wel",
|
|
|
|
},
|
|
|
|
preConditions = None,
|
|
|
|
fieldMappings = _windowsEventLogFieldName,
|
|
|
|
isAllStringValues = True,
|
|
|
|
isKeywordsSupported = False
|
|
|
|
),
|
|
|
|
"windows_defender//": SigmaLCConfig(
|
|
|
|
topLevelParams = {
|
|
|
|
"target": "log",
|
|
|
|
"log type": "wel",
|
|
|
|
},
|
|
|
|
preConditions = None,
|
|
|
|
fieldMappings = _windowsEventLogFieldName,
|
|
|
|
isAllStringValues = True,
|
|
|
|
isKeywordsSupported = False
|
|
|
|
),
|
|
|
|
"dns//": SigmaLCConfig(
|
|
|
|
topLevelParams = {
|
|
|
|
"event": "DNS_REQUEST",
|
|
|
|
},
|
|
|
|
preConditions = None,
|
|
|
|
fieldMappings = {
|
|
|
|
"query": "event/DOMAIN_NAME",
|
|
|
|
},
|
|
|
|
isAllStringValues = False,
|
|
|
|
isKeywordsSupported = False
|
|
|
|
),
|
|
|
|
"linux//": SigmaLCConfig(
|
|
|
|
topLevelParams = {
|
|
|
|
"events": [
|
|
|
|
"NEW_PROCESS",
|
|
|
|
"EXISTING_PROCESS",
|
|
|
|
]
|
|
|
|
},
|
|
|
|
preConditions = {
|
|
|
|
"op": "is linux",
|
|
|
|
},
|
|
|
|
fieldMappings = {
|
|
|
|
"keywords": "event/COMMAND_LINE",
|
|
|
|
"exe": "event/FILE_PATH",
|
|
|
|
"type": None,
|
|
|
|
},
|
|
|
|
isAllStringValues = False,
|
|
|
|
isKeywordsSupported = True),
|
|
|
|
"unix//": SigmaLCConfig(
|
|
|
|
topLevelParams = {
|
|
|
|
"events": [
|
|
|
|
"NEW_PROCESS",
|
|
|
|
"EXISTING_PROCESS",
|
|
|
|
]
|
|
|
|
},
|
|
|
|
preConditions = {
|
|
|
|
"op": "is linux",
|
|
|
|
},
|
|
|
|
fieldMappings = {
|
|
|
|
"keywords": "event/COMMAND_LINE",
|
|
|
|
"exe": "event/FILE_PATH",
|
|
|
|
"type": None,
|
|
|
|
},
|
|
|
|
isAllStringValues = False,
|
|
|
|
isKeywordsSupported = True),
|
|
|
|
"netflow//": SigmaLCConfig(
|
|
|
|
topLevelParams = {
|
|
|
|
"event": "NETWORK_CONNECTIONS",
|
|
|
|
},
|
|
|
|
preConditions = None,
|
|
|
|
fieldMappings = {
|
|
|
|
"destination.port": "event/NETWORK_ACTIVITY/DESTINATION/PORT",
|
|
|
|
"source.port": "event/NETWORK_ACTIVITY/SOURCE/PORT",
|
|
|
|
},
|
|
|
|
isAllStringValues = False,
|
|
|
|
isKeywordsSupported = True)
|
2019-10-26 20:45:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
class LimaCharlieBackend(BaseBackend):
|
|
|
|
"""Converts Sigma rule into LimaCharlie D&R rules. Contributed by LimaCharlie. https://limacharlie.io"""
|
|
|
|
identifier = "limacharlie"
|
|
|
|
active = True
|
2019-11-03 22:32:50 +00:00
|
|
|
config_required = False
|
|
|
|
default_config = ["limacharlie"]
|
2019-10-26 20:45:48 +00:00
|
|
|
|
|
|
|
def generate(self, sigmaparser):
|
|
|
|
# Take the log source information and figure out which set of mappings to use.
|
2019-10-26 22:30:40 +00:00
|
|
|
ruleConfig = sigmaparser.parsedyaml
|
|
|
|
ls_rule = ruleConfig['logsource']
|
2019-10-26 20:45:48 +00:00
|
|
|
try:
|
|
|
|
category = ls_rule['category']
|
|
|
|
except KeyError:
|
2019-10-26 21:30:50 +00:00
|
|
|
category = ""
|
2019-10-26 20:45:48 +00:00
|
|
|
try:
|
|
|
|
product = ls_rule['product']
|
|
|
|
except KeyError:
|
2019-10-26 21:30:50 +00:00
|
|
|
product = ""
|
2019-10-26 21:59:33 +00:00
|
|
|
# try:
|
|
|
|
# service = ls_rule['service']
|
|
|
|
# except KeyError:
|
|
|
|
# service = ""
|
2019-10-26 22:37:13 +00:00
|
|
|
|
2019-10-26 21:59:33 +00:00
|
|
|
# Don't use service for now, most Windows Event Logs
|
|
|
|
# uses a different service with no category, since we
|
|
|
|
# treat all Windows Event Logs together we can ignore
|
|
|
|
# the service.
|
|
|
|
service = ""
|
2019-10-26 20:45:48 +00:00
|
|
|
|
2019-10-28 15:49:05 +00:00
|
|
|
# See if we have a definition for the source combination.
|
2019-10-26 21:09:39 +00:00
|
|
|
mappingKey = "%s/%s/%s" % (product, category, service)
|
2019-10-27 20:48:28 +00:00
|
|
|
topFilter, preCond, mappings, isAllStringValues, isKeywordsSupported = _allFieldMappings.get(mappingKey, tuple([None, None, None, None, None]))
|
2019-10-26 20:45:48 +00:00
|
|
|
if mappings is None:
|
2019-10-26 21:59:33 +00:00
|
|
|
raise NotImplementedError("Log source %s/%s/%s not supported by backend." % (product, category, service))
|
2019-10-26 20:45:48 +00:00
|
|
|
|
2019-10-26 22:37:13 +00:00
|
|
|
# Field name conversions.
|
2019-10-26 20:45:48 +00:00
|
|
|
self._fieldMappingInEffect = mappings
|
2019-10-26 22:37:13 +00:00
|
|
|
|
|
|
|
# LC event type pre-selector for the type of data.
|
2019-10-26 20:45:48 +00:00
|
|
|
self._preCondition = preCond
|
2019-10-26 22:37:13 +00:00
|
|
|
|
|
|
|
# Are all the values treated as strings?
|
2019-10-26 21:30:50 +00:00
|
|
|
self._isAllStringValues = isAllStringValues
|
2019-10-26 20:45:48 +00:00
|
|
|
|
2019-10-27 20:28:54 +00:00
|
|
|
# Are we supporting keywords full text search?
|
|
|
|
self._isKeywordsSupported = isKeywordsSupported
|
|
|
|
|
2019-10-26 22:37:13 +00:00
|
|
|
# Call the original generation code.
|
2019-10-26 22:30:40 +00:00
|
|
|
detectComponent = super().generate(sigmaparser)
|
2019-10-26 22:37:13 +00:00
|
|
|
|
|
|
|
# We expect a string (yaml) as output, so if
|
|
|
|
# we get anything else we assume it's a core
|
|
|
|
# library value and just return it as-is.
|
2019-10-26 22:30:40 +00:00
|
|
|
if not isinstance( detectComponent, str):
|
|
|
|
return detectComponent
|
|
|
|
|
|
|
|
# This redundant to deserialize it right after
|
|
|
|
# generating the yaml, but we try to use the parent
|
|
|
|
# official class code as much as possible for future
|
|
|
|
# compatibility.
|
|
|
|
detectComponent = yaml.safe_load(detectComponent)
|
2019-10-28 15:49:05 +00:00
|
|
|
|
|
|
|
# Check that we got a proper node and not just a string
|
|
|
|
# which we don't really know what to do with.
|
|
|
|
if not isinstance(detectComponent, dict):
|
|
|
|
raise NotImplementedError("Selection combination not supported.")
|
|
|
|
|
|
|
|
# Apply top level filter.
|
|
|
|
detectComponent.update(topFilter)
|
|
|
|
|
|
|
|
# Now prepare the Response component.
|
2019-10-26 22:30:40 +00:00
|
|
|
respondComponents = [{
|
|
|
|
"action": "report",
|
|
|
|
"name": ruleConfig["title"],
|
|
|
|
}]
|
|
|
|
|
2019-10-26 22:37:13 +00:00
|
|
|
# Add a lot of the metadata available to the report.
|
2019-10-26 22:30:40 +00:00
|
|
|
if ruleConfig.get("tags", None) is not None:
|
2019-10-28 16:31:50 +00:00
|
|
|
respondComponents[0].setdefault("metadata", {})["tags"] = ruleConfig["tags"]
|
2019-10-26 22:30:40 +00:00
|
|
|
|
|
|
|
if ruleConfig.get("description", None) is not None:
|
2019-10-28 16:31:50 +00:00
|
|
|
respondComponents[0].setdefault("metadata", {})["description"] = ruleConfig["description"]
|
2019-10-26 22:30:40 +00:00
|
|
|
|
|
|
|
if ruleConfig.get("references", None) is not None:
|
2019-10-28 16:31:50 +00:00
|
|
|
respondComponents[0].setdefault("metadata", {})["references"] = ruleConfig["references"]
|
2019-10-26 22:30:40 +00:00
|
|
|
|
|
|
|
if ruleConfig.get("level", None) is not None:
|
2019-10-28 16:31:50 +00:00
|
|
|
respondComponents[0].setdefault("metadata", {})["level"] = ruleConfig["level"]
|
2019-10-26 22:30:40 +00:00
|
|
|
|
|
|
|
if ruleConfig.get("author", None) is not None:
|
2019-10-28 16:31:50 +00:00
|
|
|
respondComponents[0].setdefault("metadata", {})["author"] = ruleConfig["author"]
|
2019-10-26 22:30:40 +00:00
|
|
|
|
2019-10-26 22:37:13 +00:00
|
|
|
# Assemble it all as a single, complete D&R rule.
|
2019-10-26 22:30:40 +00:00
|
|
|
return yaml.safe_dump({
|
|
|
|
"detect": detectComponent,
|
|
|
|
"respond": respondComponents,
|
|
|
|
})
|
2019-10-26 20:45:48 +00:00
|
|
|
|
|
|
|
def generateQuery(self, parsed):
|
2019-10-26 22:37:13 +00:00
|
|
|
# We override the generateQuery function because
|
|
|
|
# we generate proper JSON structures internally
|
|
|
|
# and only convert to string (yaml) once the
|
|
|
|
# whole thing is assembled.
|
2019-10-26 20:45:48 +00:00
|
|
|
result = self.generateNode(parsed.parsedSearch)
|
|
|
|
if self._preCondition is not None:
|
|
|
|
result = {
|
|
|
|
"op": "and",
|
|
|
|
"rules": [
|
|
|
|
self._preCondition,
|
|
|
|
result,
|
|
|
|
]
|
|
|
|
}
|
|
|
|
return yaml.safe_dump(result)
|
|
|
|
|
|
|
|
def generateANDNode(self, node):
|
|
|
|
generated = [ self.generateNode(val) for val in node ]
|
|
|
|
filtered = [ g for g in generated if g is not None ]
|
2019-10-28 15:49:05 +00:00
|
|
|
if not filtered:
|
2019-10-26 20:45:48 +00:00
|
|
|
return None
|
2019-10-28 15:49:05 +00:00
|
|
|
if 1 == len(filtered):
|
|
|
|
return filtered[0]
|
|
|
|
return {
|
|
|
|
"op": "and",
|
|
|
|
"rules": filtered,
|
|
|
|
}
|
2019-10-26 20:45:48 +00:00
|
|
|
|
|
|
|
def generateORNode(self, node):
|
2019-10-26 21:59:33 +00:00
|
|
|
generated = [self.generateNode(val) for val in node]
|
|
|
|
filtered = [g for g in generated if g is not None]
|
2019-10-28 15:49:05 +00:00
|
|
|
if not filtered:
|
2019-10-26 20:45:48 +00:00
|
|
|
return None
|
2019-10-28 15:49:05 +00:00
|
|
|
if isinstance(filtered[0], str):
|
|
|
|
if not self._isKeywordsSupported:
|
|
|
|
raise NotImplementedError("Full-text keyboard searches not supported.")
|
|
|
|
# This seems to be indicative only of "keywords" which are mostly
|
|
|
|
# representative of full-text searches. We don't suport that but
|
|
|
|
# in some data sources we can alias them to an actual field.
|
|
|
|
mappedFiltered = []
|
|
|
|
for k in filtered:
|
|
|
|
op, newVal = self._valuePatternToLcOp(k)
|
2019-10-30 20:23:56 +00:00
|
|
|
newOp = {
|
2019-10-28 15:49:05 +00:00
|
|
|
"op": op,
|
|
|
|
"path": self._fieldMappingInEffect["keywords"],
|
2019-10-30 20:23:56 +00:00
|
|
|
}
|
|
|
|
if op == "matches":
|
|
|
|
newOp["re"] = newVal
|
|
|
|
else:
|
|
|
|
newOp["value"] = newVal
|
|
|
|
mappedFiltered.append(newOp)
|
2019-10-28 15:49:05 +00:00
|
|
|
filtered = mappedFiltered
|
|
|
|
if 1 == len(filtered):
|
|
|
|
return filtered[0]
|
|
|
|
return {
|
|
|
|
"op": "or",
|
|
|
|
"rules": filtered,
|
|
|
|
}
|
2019-10-26 20:45:48 +00:00
|
|
|
|
|
|
|
def generateNOTNode(self, node):
|
|
|
|
generated = self.generateNode(node.item)
|
2019-10-28 15:49:05 +00:00
|
|
|
if generated is None:
|
2019-10-26 20:45:48 +00:00
|
|
|
return None
|
2019-10-28 15:49:05 +00:00
|
|
|
if not isinstance(generated, dict):
|
|
|
|
raise NotImplementedError("Not operator not available on non-dict nodes.")
|
|
|
|
generated['not'] = True
|
|
|
|
return generated
|
2019-10-26 20:45:48 +00:00
|
|
|
|
|
|
|
def generateSubexpressionNode(self, node):
|
2019-10-28 15:49:05 +00:00
|
|
|
return self.generateNode(node.items)
|
2019-10-26 20:45:48 +00:00
|
|
|
|
|
|
|
def generateListNode(self, node):
|
|
|
|
return [self.generateNode(value) for value in node]
|
|
|
|
|
|
|
|
def generateMapItemNode(self, node):
|
|
|
|
fieldname, value = node
|
|
|
|
|
2019-10-26 21:30:50 +00:00
|
|
|
# The mapping can be a dictionary of mapping or a callable
|
|
|
|
# to get the correct value.
|
|
|
|
if callable(self._fieldMappingInEffect):
|
|
|
|
fieldname = self._fieldMappingInEffect(fieldname)
|
|
|
|
else:
|
2019-10-26 21:59:33 +00:00
|
|
|
try:
|
|
|
|
fieldname = self._fieldMappingInEffect[fieldname]
|
|
|
|
except:
|
2019-10-26 21:30:50 +00:00
|
|
|
raise NotImplementedError("Field name %s not supported by backend." % (fieldname,))
|
2019-10-26 20:45:48 +00:00
|
|
|
|
2019-10-26 21:59:33 +00:00
|
|
|
# If fieldname returned is None, it's a special case where we
|
|
|
|
# ignore the node.
|
|
|
|
if fieldname is None:
|
|
|
|
return None
|
|
|
|
|
2019-10-26 20:45:48 +00:00
|
|
|
if isinstance(value, (int, str)):
|
|
|
|
op, newVal = self._valuePatternToLcOp(value)
|
|
|
|
return {
|
|
|
|
"op": op,
|
|
|
|
"path": fieldname,
|
|
|
|
"value": newVal,
|
2019-10-26 21:30:50 +00:00
|
|
|
"case sensitive": False,
|
2019-10-26 20:45:48 +00:00
|
|
|
}
|
|
|
|
elif isinstance(value, list):
|
|
|
|
subOps = []
|
|
|
|
for v in value:
|
|
|
|
op, newVal = self._valuePatternToLcOp(v)
|
|
|
|
subOps.append({
|
|
|
|
"op": op,
|
|
|
|
"path": fieldname,
|
|
|
|
"value": newVal,
|
2019-10-26 21:30:50 +00:00
|
|
|
"case sensitive": False,
|
2019-10-26 20:45:48 +00:00
|
|
|
})
|
|
|
|
if 1 == len(subOps):
|
|
|
|
return subOps[0]
|
|
|
|
return {
|
|
|
|
"op": "or",
|
|
|
|
"rules": subOps
|
|
|
|
}
|
|
|
|
elif isinstance(value, SigmaTypeModifier):
|
|
|
|
if isinstance(value, SigmaRegularExpressionModifier):
|
|
|
|
return {
|
|
|
|
"op": "matches",
|
|
|
|
"path": fieldname,
|
|
|
|
"re": re.compile(value),
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
raise TypeError("Backend does not support TypeModifier: %s" % (str(type(value))))
|
|
|
|
elif value is None:
|
|
|
|
return {
|
|
|
|
"op": "exists",
|
|
|
|
"not": True,
|
|
|
|
"path": fieldname,
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
raise TypeError("Backend does not support map values of type " + str(type(value)))
|
|
|
|
|
|
|
|
def generateValueNode(self, node):
|
2019-10-26 20:54:08 +00:00
|
|
|
return node
|
2019-10-26 20:45:48 +00:00
|
|
|
|
|
|
|
def _valuePatternToLcOp(self, val):
|
2019-10-26 22:37:13 +00:00
|
|
|
# Here we convert the string values supported by Sigma that
|
|
|
|
# can include wildcards into either proper values (string or int)
|
|
|
|
# or into altered values to be functionally equivalent using
|
|
|
|
# a few different LC D&R rule operators.
|
|
|
|
|
2019-10-26 20:45:48 +00:00
|
|
|
if not isinstance(val, str):
|
2019-10-26 21:30:50 +00:00
|
|
|
return ("is", str(val) if self._isAllStringValues else val)
|
2019-10-26 20:45:48 +00:00
|
|
|
# The following logic is taken from the WDATP backend to translate
|
|
|
|
# the basic wildcard format into proper regular expression.
|
|
|
|
if "*" in val[1:-1]:
|
|
|
|
# Contains a wildcard within, must be translated.
|
|
|
|
# TODO: getting a W605 from the \g escape, this may be broken.
|
|
|
|
val = re.sub('([".^$]|\\\\(?![*?]))', '\\\\\g<1>', val)
|
|
|
|
val = re.sub('\\*', '.*', val)
|
|
|
|
val = re.sub('\\?', '.', val)
|
|
|
|
return ("matches", val)
|
|
|
|
# value possibly only starts and/or ends with *, use prefix/postfix match
|
|
|
|
# TODO: this is actually not correct since the string could end with
|
|
|
|
# a \* expression which would mean it's NOT a wildcard. We'll gloss over
|
|
|
|
# it for now to get something out but it should eventually be fixed
|
|
|
|
# so that it's accurate in all corner cases.
|
|
|
|
if val.endswith("*") and val.startswith("*"):
|
|
|
|
return ("contains", val[1:-1])
|
|
|
|
elif val.endswith("*"):
|
|
|
|
return ("starts with", val[:-1])
|
|
|
|
elif val.startswith("*"):
|
|
|
|
return ("ends with", val[1:])
|
2019-11-03 22:32:50 +00:00
|
|
|
return ("is", val)
|