Skip to content

Commit

Permalink
Support for Mitre ATT&CK framework
Browse files Browse the repository at this point in the history
  • Loading branch information
qjerome committed Jun 4, 2019
1 parent aab11d1 commit bc3b158
Show file tree
Hide file tree
Showing 6 changed files with 140 additions and 20 deletions.
14 changes: 14 additions & 0 deletions engine/engine.go
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ type Engine struct {
nameFilters datastructs.SyncedSet
trace bool
dumpRaw bool
showAttck bool
// Used to mark the traces and not duplicate those
markedTraces datastructs.SyncedSet
containers *rules.ContainerDB
Expand Down Expand Up @@ -165,6 +166,11 @@ func (e *Engine) SetFilters(names, tags []string) {
}
}

// SetShowAttck sets engine flag to display ATT&CK information in matching events
func (e *Engine) SetShowAttck(value bool) {
e.showAttck = value
}

//Count returns the number of rules successfuly loaded
func (e *Engine) Count() int {
return len(e.rules)
Expand Down Expand Up @@ -442,11 +448,13 @@ func (e *Engine) AddTraceRules(ruleList ...*rules.CompiledRule) {
func (e *Engine) Match(event *evtx.GoEvtxMap) (names []string, criticality int) {
traces := make([]*rules.CompiledRule, 0)
names = make([]string, 0)
attcks := make([]rules.Attack, 0)

e.RLock()
for _, r := range e.rules {
if r.Match(event) {
names = append(names, r.Name)
attcks = append(attcks, r.Attck...)
criticality += r.Criticality
// If we decide to trace the other events matching the rules
if e.trace {
Expand Down Expand Up @@ -483,6 +491,12 @@ func (e *Engine) Match(event *evtx.GoEvtxMap) (names []string, criticality int)
genInfo := map[string]interface{}{
"Signature": names,
"Criticality": criticality}

// Update ATT&CK information if needed
if e.showAttck && len(attcks) > 0 {
genInfo["ATTACK"] = attcks
}

event.Set(&geneInfoPath, genInfo)
return
}
47 changes: 46 additions & 1 deletion engine/engine_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package engine

import (
"encoding/json"
"engine"
"fmt"
"io"
"os"
Expand Down Expand Up @@ -130,6 +129,14 @@ func init() {
}
}

func prettyJSON(i interface{}) string {
b, err := json.MarshalIndent(i, "", " ")
if err != nil {
panic(err)
}
return string(b)
}

func openEvtx(path string) *evtx.File {
f, err := evtx.New(path)
if err != nil {
Expand Down Expand Up @@ -179,6 +186,44 @@ func TestMatch(t *testing.T) {
}
}

func TestMatchAttck(t *testing.T) {
rule := `{
"Name": "ShouldMatch",
"Meta": {
"Channels": ["Microsoft-Windows-Sysmon/Operational"],
"ATTACK": [
{
"ID": "T666",
"Tactic": "Blow everything up",
"Reference": "https://attack.mitre.org/"
},
{
"ID": "S4242",
"Description": "Super nasty software",
"Reference": "https://attack.mitre.org/"
}
]
},
"Matches": [
"$a: Hashes ~= 'SHA1=65894B0162897F2A6BB8D2EB13684BF2B451FDEE,'"
],
"Condition": "$a"
}`

e := NewEngine(false)
e.SetShowAttck(true)
if err := e.LoadReader(NewSeekBuffer([]byte(rule))); err != nil {
t.Logf("Loading failed: %s", err)
t.FailNow()
}
t.Logf("Successfuly loaded %d rules", e.Count())
if m, _ := e.Match(&event); len(m) == 0 {
t.Fail()
} else {
t.Log(prettyJSON(event))
}
}

func TestMatchByTag(t *testing.T) {
rules := `{
"Name": "ShouldMatch",
Expand Down
1 change: 0 additions & 1 deletion engine/validation_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package engine

import (
"engine"
"testing"
)

Expand Down
10 changes: 8 additions & 2 deletions gene/gene.go
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ func reduce(e *engine.Engine) {
sigs = append(sigs, s.(string))
}

reducer.Update(computer, sigs)
reducer.Update(e.TimeCreated(), computer, sigs)
}
}()
}
Expand Down Expand Up @@ -198,6 +198,7 @@ var (
listTags bool
versionFlag bool
reduceFlag bool
showAttckFlag bool
cpuprofile string
tags []string
names []string
Expand Down Expand Up @@ -229,6 +230,7 @@ func main() {
flag.BoolVar(&listTags, "lt", listTags, "List tags of rules loaded into the engine")
flag.BoolVar(&versionFlag, "version", versionFlag, "Show version information and exit")
flag.BoolVar(&reduceFlag, "reduce", reduceFlag, "Aggregate the results of already processed events and outputs condensed information")
flag.BoolVar(&showAttckFlag, "a", showAttckFlag, "Show Mitre ATT&CK information in matching events")
flag.StringVar(&rulesPath, "r", rulesPath, "Rule file or directory")
flag.StringVar(&cpuprofile, "cpuprofile", cpuprofile, "Profile CPU")
flag.StringVar(&whitelist, "wl", whitelist, "File containing values to insert into the whitelist")
Expand Down Expand Up @@ -280,7 +282,9 @@ func main() {

// Display rule template and exit if template flag
if template {
b, err := json.Marshal(rules.NewRule())
r := rules.NewRule()
r.Meta.Attack = append(r.Meta.Attack, rules.Attack{})
b, err := json.Marshal(r)
if err != nil {
log.LogErrorAndExit(err, exitFail)
}
Expand All @@ -300,6 +304,8 @@ func main() {
names = []string(namesVar)
// Enable rule dumping on engine side
e.SetDumpRaw(len(dumpsVar) > 0)
// Enable showing Mitre ATT&CK information
e.SetShowAttck(showAttckFlag)

// Validation
if len(tags) > 0 && len(names) > 0 {
Expand Down
73 changes: 57 additions & 16 deletions reducer/reducer.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,12 @@ package reducer

import (
"fmt"
"math"
"sync"
"time"

"github.com/0xrawsec/golang-utils/datastructs"
"github.com/0xrawsec/golang-utils/log"

"github.com/0xrawsec/gene/engine"
"github.com/0xrawsec/golang-evtx/evtx"
Expand All @@ -11,35 +16,53 @@ import (

//////////////////////////// Reducer ////////////////////////////////

// ReducedStats structrur definition
// ReducedStats structrure definition
type ReducedStats struct {
Computer string
CntEvents int
CntBySig map[string]int
UniqSigs []string
UniqSigs []string `json:"Signatures"`
SumRuleCrit int
SumEvtCrit int
TotalSigs int
AvgEvtsCrit float64
AvgSigCrit float64
StdDevCrit float64
SigDiv float64
SigDiv float64 `json:"SignatureDiversity"`
CntUniqSigs int
CntUniqByAvgCritBySig int `json:"Metric0"`
AvgEvtCritBySigDiv float64 `json:"Metric1"`
CntUniqByAvgCritBySig int `json:"AugmentedSigCriticality"`
AvgEvtCritBySigDiv float64 `json:"WeightedEventsCriticality"`
StartTime time.Time
MedianTime time.Time
StopTime time.Time
sigCrits []float64
evtCrits []float64
eng *engine.Engine
}

// NewReducedStats structure
func NewReducedStats(e *engine.Engine, computer string) *ReducedStats {
return &ReducedStats{Computer: computer, CntBySig: make(map[string]int), UniqSigs: make([]string, 0), sigCrits: make([]float64, 0), eng: e}
return &ReducedStats{Computer: computer,
CntBySig: make(map[string]int),
UniqSigs: make([]string, 0),
sigCrits: make([]float64, 0),
eng: e}
}

// Update ReducedStats with data
func (rs *ReducedStats) Update(matches []string) {
func (rs *ReducedStats) Update(t time.Time, matches []string) {
evtCrit := 0

// Set StartTime
if t.Before(rs.StartTime) || rs.StartTime.IsZero() {
rs.StartTime = t
}

// Set StopTime
if t.After(rs.StopTime) {
rs.StopTime = t
}

for _, m := range matches {
rs.CntBySig[m]++
rs.TotalSigs++
Expand All @@ -61,30 +84,33 @@ func (rs *ReducedStats) Update(matches []string) {
}

rs.CntEvents++
//rs.SumCriticalities += criticality
//rs.criticalities = append(rs.criticalities, float64(criticality))
}

// Finalize the computation of the statistics
func (rs *ReducedStats) Finalize() {
func (rs *ReducedStats) Finalize(cntSigs int) {
rs.AvgEvtsCrit = stats.Truncate(float64(rs.SumEvtCrit)/float64(rs.CntEvents), 2)

rs.AvgSigCrit = stats.Truncate(float64(rs.SumRuleCrit)/float64(rs.TotalSigs), 2)
rs.CntUniqSigs = len(rs.CntBySig)
rs.CntUniqByAvgCritBySig = rs.CntUniqSigs * int(rs.AvgSigCrit)
rs.CntUniqByAvgCritBySig = rs.CntUniqSigs * int(math.Round(rs.AvgSigCrit))

// Compute Standard Dev
rs.StdDevCrit = stats.Truncate(stats.StdDev(rs.sigCrits), 2)
rs.SigDiv = float64(rs.CntUniqSigs) * 100 / float64(rs.eng.Count())
rs.AvgEvtCritBySigDiv = stats.Truncate((rs.AvgEvtsCrit * rs.SigDiv), 2)

// The diversity is relative to the number of signatures observed
// accross the dataset
rs.SigDiv = stats.Truncate(float64(rs.CntUniqSigs)*100/float64(cntSigs), 2)
rs.AvgEvtCritBySigDiv = math.Round((rs.AvgEvtsCrit * rs.SigDiv))

for s := range rs.CntBySig {
rs.UniqSigs = append(rs.UniqSigs, s)
}

rs.MedianTime = rs.StartTime.Add((rs.StopTime.Sub(rs.StartTime)) / 2)
}

func (rs ReducedStats) String() string {
rs.Finalize()
//rs.Finalize()
return string(evtx.ToJSON(rs))
}

Expand All @@ -101,19 +127,34 @@ func NewReducer(e *engine.Engine) *Reducer {
}

// Update a ReducedStats stored in Reducer with data
func (r *Reducer) Update(computer string, matches []string) {
func (r *Reducer) Update(t time.Time, computer string, matches []string) {
r.Lock()
if _, ok := r.m[computer]; !ok {
r.m[computer] = NewReducedStats(r.e, computer)
}
rs := r.m[computer]
rs.Update(matches)
rs.Update(t, matches)
r.Unlock()
}

// CountUniqSigs counts all the uniq signatures seen in the reduced stats
func (r *Reducer) CountUniqSigs() int {
uniqSigs := datastructs.NewSyncedSet()
for comp := range r.m {
for sig := range r.m[comp].CntBySig {
uniqSigs.Add(sig)
}
}
return uniqSigs.Len()
}

// Print prints out all the informations stored in the Reducer
func (r *Reducer) Print() {
cnt := r.CountUniqSigs()
log.Infof("cnt:%d", cnt)
//cnt := r.e.Count()
for computer := range r.m {
r.m[computer].Finalize(cnt)
fmt.Println(r.m[computer])
}
}
15 changes: 15 additions & 0 deletions rules/rules.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ type CompiledRule struct {
Disabled bool // Way to deal with no container issue
Conditions *ConditionElement
containers *ContainerDB
// ATT&CK information
Attck []Attack
}

//NewCompiledRule initializes and returns an EvtxRule object
Expand All @@ -40,6 +42,7 @@ func NewCompiledRule() (er CompiledRule) {
er.Computers = datastructs.NewSyncedSet()
er.EventIDs = datastructs.NewSyncedSet()
er.AtomMap = datastructs.NewSyncedMap()
er.Attck = make([]Attack, 0)
return
}

Expand Down Expand Up @@ -125,12 +128,21 @@ func (oe *EventOpReader) Read(operand string) (value bool, ok bool) {
// Temporary: we use JSON for easy parsing right now, lets see if we need to
// switch to another format in the future

// Attack structure definiton to encode information from ATT&CK Mitre
type Attack struct {
ID string
Tactic string
Description string `json:",omitempty"`
Reference string
}

//MetaSection defines the section holding the metadata of the rule
type MetaSection struct {
EventIDs []int64 // GoEvtxMap.EventID returns int64
Channels []string
Computers []string
Traces []string
Attack []Attack `json:"ATTACK"`
Criticality int
Disable bool
}
Expand All @@ -154,6 +166,7 @@ func NewRule() Rule {
Channels: make([]string, 0),
Computers: make([]string, 0),
Traces: make([]string, 0),
Attack: make([]Attack, 0),
Criticality: 0},
Matches: make([]string, 0),
Condition: ""}
Expand Down Expand Up @@ -185,6 +198,8 @@ func (jr *Rule) Compile(containers *ContainerDB) (*CompiledRule, error) {

rule.Name = jr.Name
rule.Criticality = globals.Bound(jr.Meta.Criticality)
// Pass ATT&CK information to compiled rule
rule.Attck = jr.Meta.Attack
for _, t := range jr.Tags {
rule.Tags.Add(t)
}
Expand Down

0 comments on commit bc3b158

Please sign in to comment.