2017-02-13 22:14:40 +00:00
# Output backends for sigmac
2017-08-28 22:05:59 +00:00
import sys
2017-02-13 22:14:40 +00:00
import json
2017-02-22 21:47:12 +00:00
import re
import sigma
2017-02-13 22:14:40 +00:00
def getBackendList ( ) :
""" Return list of backend classes """
return list ( filter ( lambda cls : type ( cls ) == type and issubclass ( cls , BaseBackend ) and cls . active , [ item [ 1 ] for item in globals ( ) . items ( ) ] ) )
def getBackendDict ( ) :
return { cls . identifier : cls for cls in getBackendList ( ) }
2017-02-22 21:47:12 +00:00
def getBackend ( name ) :
try :
return getBackendDict ( ) [ name ]
except KeyError as e :
raise LookupError ( " Backend not found " ) from e
2017-09-16 21:46:40 +00:00
class BackendOptions ( dict ) :
""" Object contains all options that should be passed to the backend from command line (or other user interfaces) """
def __init__ ( self , options ) :
"""
Receives the argparser result from the backend option paramater value list ( nargs = * ) and builds the dict from it . There are two option types :
* key = value : self { key } = value
* key : self { key } = True
"""
if options == None :
return
for option in options :
parsed = option . split ( " = " , 1 )
try :
self [ parsed [ 0 ] ] = parsed [ 1 ]
except IndexError :
self [ parsed [ 0 ] ] = True
2017-08-28 22:05:59 +00:00
### Output classes
2017-09-03 22:56:04 +00:00
class SingleOutput :
2017-08-28 22:05:59 +00:00
"""
Single file output
2017-05-26 21:42:49 +00:00
2017-08-28 22:05:59 +00:00
By default , this opens the given file or stdin and passes everything into this .
"""
def __init__ ( self , filename = None ) :
if type ( filename ) == str :
self . fd = open ( filename , " w " )
else :
self . fd = sys . stdout
def print ( self , * args , * * kwargs ) :
print ( * args , file = self . fd , * * kwargs )
def close ( self ) :
self . fd . close ( )
2017-09-29 23:03:08 +00:00
### Generic backend base classes and mixins
2017-02-13 22:14:40 +00:00
class BaseBackend :
""" Base class for all backends """
identifier = " base "
active = False
2017-08-28 22:05:59 +00:00
index_field = None # field name that is used to address indices
output_class = None # one of the above output classes
file_list = None
2017-09-16 21:46:40 +00:00
def __init__ ( self , sigmaconfig , backend_options = None , filename = None ) :
2017-08-28 22:05:59 +00:00
"""
Initialize backend . This gets a sigmaconfig object , which is notified about the used backend class by
passing the object instance to it . Further , output files are initialized by the output class defined in output_class .
"""
2017-09-29 23:03:08 +00:00
super ( ) . __init__ ( )
2017-03-06 21:07:04 +00:00
if not isinstance ( sigmaconfig , ( sigma . SigmaConfiguration , None ) ) :
raise TypeError ( " SigmaConfiguration object expected " )
2017-09-16 21:46:40 +00:00
self . options = backend_options
2017-03-06 21:07:04 +00:00
self . sigmaconfig = sigmaconfig
2017-03-17 22:28:06 +00:00
self . sigmaconfig . set_backend ( self )
2017-08-28 22:05:59 +00:00
self . output = self . output_class ( filename )
2017-03-06 21:07:04 +00:00
2017-09-03 22:56:04 +00:00
def generate ( self , sigmaparser ) :
""" Method is called for each sigma rule and receives the parsed rule (SigmaParser) """
for parsed in sigmaparser . condparsed :
result = self . generateNode ( parsed . parsedSearch )
if parsed . parsedAgg :
result + = self . generateAggregation ( parsed . parsedAgg )
self . output . print ( result )
2017-02-22 21:47:12 +00:00
def generateNode ( self , node ) :
if type ( node ) == sigma . ConditionAND :
2017-03-01 20:47:51 +00:00
return self . generateANDNode ( node )
2017-02-22 21:47:12 +00:00
elif type ( node ) == sigma . ConditionOR :
2017-03-01 20:47:51 +00:00
return self . generateORNode ( node )
2017-02-22 21:47:12 +00:00
elif type ( node ) == sigma . ConditionNOT :
2017-03-01 20:47:51 +00:00
return self . generateNOTNode ( node )
2017-02-22 21:47:12 +00:00
elif type ( node ) == sigma . NodeSubexpression :
2017-03-01 20:47:51 +00:00
return self . generateSubexpressionNode ( node )
2017-02-22 21:47:12 +00:00
elif type ( node ) == tuple :
2017-03-01 20:47:51 +00:00
return self . generateMapItemNode ( node )
2017-02-22 21:47:12 +00:00
elif type ( node ) in ( str , int ) :
2017-03-01 20:47:51 +00:00
return self . generateValueNode ( node )
2017-02-22 21:47:12 +00:00
elif type ( node ) == list :
2017-03-01 20:47:51 +00:00
return self . generateListNode ( node )
2017-02-22 21:47:12 +00:00
else :
raise TypeError ( " Node type %s was not expected in Sigma parse tree " % ( str ( type ( node ) ) ) )
2017-02-13 22:14:40 +00:00
2017-03-01 20:47:51 +00:00
def generateANDNode ( self , node ) :
raise NotImplementedError ( " Node type not implemented for this backend " )
def generateORNode ( self , node ) :
raise NotImplementedError ( " Node type not implemented for this backend " )
def generateNOTNode ( self , node ) :
raise NotImplementedError ( " Node type not implemented for this backend " )
def generateSubexpressionNode ( self , node ) :
raise NotImplementedError ( " Node type not implemented for this backend " )
def generateListNode ( self , node ) :
raise NotImplementedError ( " Node type not implemented for this backend " )
def generateMapItemNode ( self , node ) :
raise NotImplementedError ( " Node type not implemented for this backend " )
def generateValueNode ( self , node ) :
raise NotImplementedError ( " Node type not implemented for this backend " )
2017-03-29 21:18:47 +00:00
def generateAggregation ( self , agg ) :
raise NotImplementedError ( " Aggregations not implemented for this backend " )
2017-09-03 22:56:04 +00:00
def finalize ( self ) :
"""
Is called after the last file was processed with generate ( ) . The right place if this backend is not intended to
look isolated at each rule , but generates an output which incorporates multiple rules , e . g . dashboards .
"""
pass
2017-10-18 17:03:38 +00:00
class QuoteCharMixin :
"""
This class adds the cleanValue method that quotes and filters characters according to the configuration in
the attributes provided by the mixin .
"""
reEscape = None # match characters that must be quoted
escapeSubst = " \\ \\ \ g<1> " # Substitution that is applied to characters/strings matched for escaping by reEscape
reClear = None # match characters that are cleaned out completely
def cleanValue ( self , val ) :
if self . reEscape :
val = self . reEscape . sub ( self . escapeSubst , val )
if self . reClear :
val = self . reClear . sub ( " " , val )
return val
class SingleTextQueryBackend ( BaseBackend , QuoteCharMixin ) :
2017-06-02 21:43:45 +00:00
""" Base class for backends that generate one text-based expression from a Sigma rule """
identifier = " base-textquery "
active = False
2017-09-03 22:56:04 +00:00
output_class = SingleOutput
2017-05-26 21:42:49 +00:00
2017-06-02 21:43:45 +00:00
# the following class variables define the generation and behavior of queries from a parse tree some are prefilled with default values that are quite usual
andToken = None # Token used for linking expressions with logical AND
orToken = None # Same for OR
notToken = None # Same for NOT
subExpression = None # Syntax for subexpressions, usually parenthesis around it. %s is inner expression
listExpression = None # Syntax for lists, %s are list items separated with listSeparator
listSeparator = None # Character for separation of list items
valueExpression = None # Expression of values, %s represents value
mapExpression = None # Syntax for field/value conditions. First %s is key, second is value
mapListsSpecialHandling = False # Same handling for map items with list values as for normal values (strings, integers) if True, generateMapItemListNode method is called with node
mapListValueExpression = None # Syntax for field/value condititons where map value is a list
2017-03-01 20:47:51 +00:00
def generateANDNode ( self , node ) :
2017-06-02 21:43:45 +00:00
return self . andToken . join ( [ self . generateNode ( val ) for val in node ] )
2017-03-01 20:47:51 +00:00
def generateORNode ( self , node ) :
2017-06-02 21:43:45 +00:00
return self . orToken . join ( [ self . generateNode ( val ) for val in node ] )
2017-03-01 20:47:51 +00:00
def generateNOTNode ( self , node ) :
2017-06-02 21:43:45 +00:00
return self . notToken + self . generateNode ( node . item )
2017-03-01 20:47:51 +00:00
def generateSubexpressionNode ( self , node ) :
2017-06-02 21:43:45 +00:00
return self . subExpression % self . generateNode ( node . items )
2017-03-01 20:47:51 +00:00
def generateListNode ( self , node ) :
if not set ( [ type ( value ) for value in node ] ) . issubset ( { str , int } ) :
raise TypeError ( " List values must be strings or numbers " )
2017-06-02 21:43:45 +00:00
return self . listExpression % ( self . listSeparator . join ( [ self . generateNode ( value ) for value in node ] ) )
2017-03-01 20:47:51 +00:00
def generateMapItemNode ( self , node ) :
key , value = node
2017-06-02 21:43:45 +00:00
if self . mapListsSpecialHandling == False and type ( value ) in ( str , int , list ) or self . mapListsSpecialHandling == True and type ( value ) in ( str , int ) :
return self . mapExpression % ( key , self . generateNode ( value ) )
elif type ( value ) == list :
return self . generateMapItemListNode ( key , value )
else :
raise TypeError ( " Backend does not support map values of type " + str ( type ( value ) ) )
def generateMapItemListNode ( self , key , value ) :
return self . mapListValueExpression % ( key , self . generateNode ( value ) )
2017-03-01 20:47:51 +00:00
def generateValueNode ( self , node ) :
2017-06-02 21:43:45 +00:00
return self . valueExpression % ( self . cleanValue ( str ( node ) ) )
2017-09-29 23:03:08 +00:00
class MultiRuleOutputMixin :
""" Mixin with common for multi-rule outputs """
def __init__ ( self , * args , * * kwargs ) :
super ( ) . __init__ ( * args , * * kwargs )
self . rulenames = set ( )
def getRuleName ( self , sigmaparser ) :
"""
Generate a rule name from the title of the Sigma rule with following properties :
* Spaces are replaced with -
* Unique name by addition of a counter if generated name already in usage
Generated names are tracked by the Mixin .
"""
rulename = sigmaparser . parsedyaml [ " title " ] . replace ( " " , " - " )
if rulename in self . rulenames : # add counter if name collides
cnt = 2
while " %s - %d " % ( rulename , cnt ) in self . rulenames :
cnt + = 1
rulename = " %s - %d " % ( rulename , cnt )
self . rulenames . add ( rulename )
return rulename
2017-06-02 21:43:45 +00:00
### Backends for specific SIEMs
class ElasticsearchQuerystringBackend ( SingleTextQueryBackend ) :
""" Converts Sigma rule into Elasticsearch query string. Only searches, no aggregations. """
identifier = " es-qs "
active = True
reEscape = re . compile ( " ([+ \\ -=!() {} \\ [ \\ ]^ \" ~: \\ \\ /]|&&| \\ | \\ |) " )
reClear = re . compile ( " [<>] " )
andToken = " AND "
orToken = " OR "
notToken = " NOT "
subExpression = " ( %s ) "
listExpression = " ( %s ) "
listSeparator = " "
valueExpression = " \" %s \" "
mapExpression = " %s : %s "
mapListsSpecialHandling = False
2017-03-01 20:47:51 +00:00
2017-09-29 23:03:08 +00:00
class KibanaBackend ( ElasticsearchQuerystringBackend , MultiRuleOutputMixin ) :
2017-09-16 22:31:25 +00:00
""" Converts Sigma rule into Kibana JSON Configuration files (searches only). """
2017-02-13 22:14:40 +00:00
identifier = " kibana "
2017-09-04 22:14:13 +00:00
active = True
output_class = SingleOutput
def __init__ ( self , * args , * * kwargs ) :
super ( ) . __init__ ( * args , * * kwargs )
self . kibanaconf = list ( )
def generate ( self , sigmaparser ) :
2017-09-30 21:22:05 +00:00
rulename = self . getRuleName ( sigmaparser )
description = sigmaparser . parsedyaml . setdefault ( " description " , " " )
columns = list ( )
try :
for field in sigmaparser . parsedyaml [ " fields " ] :
mapped = sigmaparser . config . get_fieldmapping ( field ) . resolve_fieldname ( field )
if type ( mapped ) == str :
columns . append ( mapped )
elif type ( mapped ) == list :
columns . extend ( mapped )
else :
raise TypeError ( " Field mapping must return string or list " )
except KeyError : # no 'fields' attribute
pass
indices = sigmaparser . get_logsource ( ) . index
2017-09-04 22:14:13 +00:00
for parsed in sigmaparser . condparsed :
result = self . generateNode ( parsed . parsedSearch )
2017-09-10 22:30:01 +00:00
for index in indices :
2017-09-30 21:22:05 +00:00
final_rulename = rulename
2017-09-10 22:30:01 +00:00
if len ( indices ) > 1 : # add index names if rule must be replicated because of ambigiuous index patterns
2017-10-22 22:45:01 +00:00
raise NotSupportedError ( " Multiple target indices are not supported by Kibana " )
2017-09-10 22:30:01 +00:00
else :
title = sigmaparser . parsedyaml [ " title " ]
2017-09-16 21:46:40 +00:00
try :
title = self . options [ " prefix " ] + title
except KeyError :
pass
2017-09-10 22:30:01 +00:00
self . kibanaconf . append ( {
2017-09-30 21:22:05 +00:00
" _id " : final_rulename ,
2017-09-10 22:30:01 +00:00
" _type " : " search " ,
" _source " : {
" title " : title ,
" description " : description ,
" hits " : 0 ,
2017-09-15 22:37:16 +00:00
" columns " : columns ,
2017-09-10 22:30:01 +00:00
" sort " : [ " @timestamp " , " desc " ] ,
" version " : 1 ,
" kibanaSavedObjectMeta " : {
" searchSourceJSON " : json . dumps ( {
" index " : index ,
" filter " : [ ] ,
" highlight " : {
" pre_tags " : [ " @kibana-highlighted-field@ " ] ,
" post_tags " : [ " @/kibana-highlighted-field@ " ] ,
" fields " : { " * " : { } } ,
" require_field_match " : False ,
" fragment_size " : 2147483647
} ,
" query " : {
" query_string " : {
" query " : result ,
" analyze_wildcard " : True
}
2017-09-04 22:14:13 +00:00
}
}
2017-09-10 22:30:01 +00:00
)
}
2017-09-04 22:14:13 +00:00
}
2017-09-10 22:30:01 +00:00
} )
2017-09-04 22:14:13 +00:00
def finalize ( self ) :
2017-09-10 22:30:01 +00:00
self . output . print ( json . dumps ( self . kibanaconf , indent = 2 ) )
2017-02-13 22:14:40 +00:00
2017-09-29 23:03:08 +00:00
class XPackWatcherBackend ( ElasticsearchQuerystringBackend , MultiRuleOutputMixin ) :
2017-09-21 22:28:35 +00:00
""" Converts Sigma Rule into X-Pack Watcher JSON for alerting """
2017-09-15 14:46:37 +00:00
identifier = " xpack-watcher "
active = True
output_class = SingleOutput
def __init__ ( self , * args , * * kwargs ) :
super ( ) . __init__ ( * args , * * kwargs )
self . watcher_alert = dict ( )
2017-09-21 22:28:35 +00:00
try :
self . output_type = self . options [ " output " ]
except KeyError :
self . output_type = " curl "
try :
self . es = self . options [ " es " ]
except KeyError :
self . es = " localhost:9200 "
2017-09-15 14:46:37 +00:00
def generate ( self , sigmaparser ) :
# get the details if this alert occurs
2017-09-30 21:22:05 +00:00
rulename = self . getRuleName ( sigmaparser )
description = sigmaparser . parsedyaml . setdefault ( " description " , " " )
false_positives = sigmaparser . parsedyaml . setdefault ( " falsepositives " , " " )
level = sigmaparser . parsedyaml . setdefault ( " level " , " " )
2017-09-15 14:46:37 +00:00
logging_result = " Rule description: " + str ( description ) + " , false positives: " + str ( false_positives ) + " , level: " + level
# Get time frame if exists
2017-09-30 21:22:05 +00:00
interval = sigmaparser . parsedyaml [ " detection " ] . setdefault ( " timeframe " , " 30m " )
2017-09-15 14:46:37 +00:00
# creating condition
2017-09-30 21:22:05 +00:00
indices = sigmaparser . get_logsource ( ) . index
2017-09-21 22:28:35 +00:00
for condition in sigmaparser . condparsed :
2017-09-30 21:22:05 +00:00
result = self . generateNode ( condition . parsedSearch )
2017-09-21 22:28:35 +00:00
try :
if condition . parsedAgg . cond_op == " > " :
alert_condition = { " gt " : int ( condition . parsedAgg . condition ) }
elif condition . parsedAgg . cond_op == " >= " :
alert_condition = { " gte " : int ( condition . parsedAgg . condition ) }
elif condition . parsedAgg . cond_op == " < " :
alert_condition = { " lt " : int ( condition . parsedAgg . condition ) }
elif condition . parsedAgg . cond_op == " <= " :
alert_condition = { " lte " : int ( condition . parsedAgg . condition ) }
else :
alert_condition = { " not_eq " : 0 }
except KeyError :
alert_condition = { " not_eq " : 0 }
except AttributeError :
2017-09-15 14:46:37 +00:00
alert_condition = { " not_eq " : 0 }
2017-09-21 22:28:35 +00:00
self . watcher_alert [ rulename ] = {
" trigger " : {
" schedule " : {
" interval " : interval # how often the watcher should check
}
} ,
" input " : {
" search " : {
" request " : {
" body " : {
" size " : 0 ,
" query " : {
" query_string " : {
" query " : result , # this is where the elasticsearch query syntax goes
" analyze_wildcard " : True
}
}
} ,
" indices " : indices
2017-09-15 14:46:37 +00:00
}
2017-09-21 22:28:35 +00:00
}
} ,
" condition " : {
" compare " : { # TODO: Issue #49
" ctx.payload.hits.total " : alert_condition
}
} ,
" actions " : {
" logging-action " : {
" logging " : {
" text " : logging_result
}
}
2017-09-15 14:46:37 +00:00
}
}
def finalize ( self ) :
2017-09-21 22:28:35 +00:00
for rulename , rule in self . watcher_alert . items ( ) :
if self . output_type == " plain " : # output request line + body
self . output . print ( " PUT _xpack/watcher/watch/ %s \n %s \n " % ( rulename , json . dumps ( rule , indent = 2 ) ) )
elif self . output_type == " curl " : # output curl command line
self . output . print ( " curl -s -XPUT --data-binary @- %s /_xpack/watcher/watch/ %s <<EOF \n %s \n EOF " % ( self . es , rulename , json . dumps ( rule , indent = 2 ) ) )
else :
raise NotImplementedError ( " Output type ' %s ' not supported " % self . output_type )
2017-02-13 22:14:40 +00:00
2017-06-02 21:43:45 +00:00
class LogPointBackend ( SingleTextQueryBackend ) :
2017-03-18 10:12:06 +00:00
""" Converts Sigma rule into LogPoint query """
identifier = " logpoint "
active = True
2017-06-02 21:43:45 +00:00
reEscape = re . compile ( ' ([ " \\ \\ ]) ' )
reClear = None
andToken = " "
orToken = " OR "
notToken = " - "
subExpression = " ( %s ) "
listExpression = " [ %s ] "
listSeparator = " , "
valueExpression = " \" %s \" "
mapExpression = " %s = %s "
mapListsSpecialHandling = True
mapListValueExpression = " %s IN %s "
2017-03-18 10:12:06 +00:00
2017-06-19 13:21:29 +00:00
def generateAggregation ( self , agg ) :
if agg == None :
return " "
2017-08-05 21:48:28 +00:00
if agg . aggfunc == sigma . SigmaAggregationParser . AGGFUNC_NEAR :
raise NotImplementedError ( " The ' near ' aggregation operator is not yet implemented for this backend " )
2017-06-19 13:21:29 +00:00
if agg . groupfield == None :
return " | chart %s ( %s ) as val | search val %s %s " % ( agg . aggfunc_notrans , agg . aggfield , agg . cond_op , agg . condition )
else :
return " | chart %s ( %s ) as val by %s | search val %s %s " % ( agg . aggfunc_notrans , agg . aggfield , agg . groupfield , agg . cond_op , agg . condition )
2017-06-02 21:43:45 +00:00
class SplunkBackend ( SingleTextQueryBackend ) :
2017-02-13 22:14:40 +00:00
""" Converts Sigma rule into Splunk Search Processing Language (SPL). """
identifier = " splunk "
2017-03-02 22:34:12 +00:00
active = True
2017-03-17 22:28:06 +00:00
index_field = " index "
2017-03-02 22:34:12 +00:00
2017-06-02 21:43:45 +00:00
reEscape = re . compile ( ' ([ " \\ \\ ]) ' )
reClear = None
andToken = " "
orToken = " OR "
notToken = " NOT "
subExpression = " ( %s ) "
listExpression = " ( %s ) "
listSeparator = " "
valueExpression = " \" %s \" "
mapExpression = " %s = %s "
mapListsSpecialHandling = False
mapListValueExpression = " %s IN %s "
def generateMapItemListNode ( self , node ) :
return " ( " + ( " OR " . join ( [ ' %s = %s ' % ( key , self . generateValueNode ( item ) ) for item in value ] ) ) + " ) "
2017-02-13 22:14:40 +00:00
2017-03-29 21:18:47 +00:00
def generateAggregation ( self , agg ) :
if agg == None :
return " "
2017-08-05 21:48:28 +00:00
if agg . aggfunc == sigma . SigmaAggregationParser . AGGFUNC_NEAR :
raise NotImplementedError ( " The ' near ' aggregation operator is not yet implemented for this backend " )
2017-03-29 21:18:47 +00:00
if agg . groupfield == None :
return " | stats %s ( %s ) as val | search val %s %s " % ( agg . aggfunc_notrans , agg . aggfield , agg . cond_op , agg . condition )
else :
return " | stats %s ( %s ) as val by %s | search val %s %s " % ( agg . aggfunc_notrans , agg . aggfield , agg . groupfield , agg . cond_op , agg . condition )
2017-10-18 17:03:38 +00:00
class GrepBackend ( BaseBackend , QuoteCharMixin ) :
""" Generates Perl compatible regular expressions and puts ' grep -P ' around it """
identifier = " grep "
active = True
output_class = SingleOutput
reEscape = re . compile ( " ([ \\ |() \ [ \ ] {} .^$]) " )
def generate ( self , sigmaparser ) :
for parsed in sigmaparser . condparsed :
self . output . print ( " grep -P ' ^ %s ' " % self . generateNode ( parsed . parsedSearch ) )
def cleanValue ( self , val ) :
val = super ( ) . cleanValue ( val )
return re . sub ( " \\ * " , " .* " , val )
def generateORNode ( self , node ) :
return " (?: %s ) " % " | " . join ( [ " .* " + self . generateNode ( val ) for val in node ] )
def generateANDNode ( self , node ) :
return " " . join ( [ " (?=.* %s ) " % self . generateNode ( val ) for val in node ] )
def generateNOTNode ( self , node ) :
return " (?!.* %s ) " % self . generateNode ( node . item )
def generateSubexpressionNode ( self , node ) :
return " (?:.* %s ) " % self . generateNode ( node . items )
def generateListNode ( self , node ) :
if not set ( [ type ( value ) for value in node ] ) . issubset ( { str , int } ) :
raise TypeError ( " List values must be strings or numbers " )
return self . generateORNode ( node )
def generateMapItemNode ( self , node ) :
key , value = node
return self . generateNode ( value )
def generateValueNode ( self , node ) :
return self . cleanValue ( str ( node ) )
2017-05-26 21:42:49 +00:00
### Backends for developement purposes
2017-03-06 21:47:30 +00:00
class FieldnameListBackend ( BaseBackend ) :
""" List all fieldnames from given Sigma rules for creation of a field mapping configuration. """
identifier = " fieldlist "
active = True
2017-09-03 22:56:04 +00:00
output_class = SingleOutput
2017-02-22 21:47:12 +00:00
2017-09-03 22:56:04 +00:00
def generate ( self , sigmaparser ) :
for parsed in sigmaparser . condparsed :
self . output . print ( " \n " . join ( sorted ( set ( list ( flatten ( self . generateNode ( parsed . parsedSearch ) ) ) ) ) ) )
2017-03-06 21:47:30 +00:00
def generateANDNode ( self , node ) :
return [ self . generateNode ( val ) for val in node ]
def generateORNode ( self , node ) :
return self . generateANDNode ( node )
def generateNOTNode ( self , node ) :
return self . generateNode ( node . item )
def generateSubexpressionNode ( self , node ) :
return self . generateNode ( node . items )
def generateListNode ( self , node ) :
if not set ( [ type ( value ) for value in node ] ) . issubset ( { str , int } ) :
raise TypeError ( " List values must be strings or numbers " )
return [ self . generateNode ( value ) for value in node ]
def generateMapItemNode ( self , node ) :
key , value = node
if type ( value ) not in ( str , int , list ) :
raise TypeError ( " Map values must be strings, numbers or lists, not " + str ( type ( value ) ) )
2017-03-23 23:48:32 +00:00
return [ key ]
2017-03-06 21:47:30 +00:00
def generateValueNode ( self , node ) :
return [ ]
# Helpers
def flatten ( l ) :
for i in l :
if type ( i ) == list :
yield from flatten ( i )
else :
yield i
2017-10-22 22:45:01 +00:00
# Exceptions
class BackendError ( Exception ) :
""" Base exception for backend-specific errors. """
pass
class NotSupportedError ( BackendError ) :
""" Exception is raised if some output is required that is not supported by the target language. """
pass