From 81bbdb87e98f144c263bc1fa19076b0a09d32b62 Mon Sep 17 00:00:00 2001 From: Melissa Bruno Date: Wed, 7 Jun 2017 10:53:00 -0400 Subject: [PATCH 001/117] Updated data size score --- analysis/beacon/beacon.go | 39 ++++++++++++++++++++++++++++---- analysis/structure/uconn.go | 4 ++++ commands/show-beacons.go | 8 +++---- datatypes/beacon/beacon.go | 2 ++ datatypes/data/data.go | 21 +++++++++-------- datatypes/structure/structure.go | 2 +- 6 files changed, 57 insertions(+), 19 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index f5575f70..7461d5e9 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -46,7 +46,7 @@ type ( uconnID bson.ObjectId // Unique Connection ID ts []int64 // Connection timestamps for this src, dst pair //dur []int64 - //orig_bytes []int64 + orig_bytes []int64 //resp_bytes []int64 } ) @@ -195,6 +195,7 @@ func (t *Beacon) collect() { for connIter.Next(&conn) { newInput.ts = append(newInput.ts, conn.Ts) + newInput.orig_bytes = append(newInput.orig_bytes, conn.OriginBytes) } t.analysisChannel <- newInput } @@ -206,8 +207,9 @@ func (t *Beacon) collect() { // analyze src, dst pairs with their connection data func (t *Beacon) analyze() { for data := range t.analysisChannel { - //sort the timestamps since they may have arrived out of order + //sort the size and timestamps since they may have arrived out of order sort.Sort(util.SortableInt64(data.ts)) + sort.Sort(util.SortableInt64(data.orig_bytes)) //remove subsecond communications //these will appear as beacons if we do not remove them @@ -221,9 +223,10 @@ func (t *Beacon) analyze() { } //store the diff slice length since we use it a lot - //this is one less then the data slice length + //for timestamps this is one less then the data slice length //since we are calculating the times in between readings length := len(data.ts) - 1 + ds_length := len(data.orig_bytes) //find the duration of this connection //perfect beacons should fill the observation period @@ -236,10 +239,11 @@ func (t *Beacon) analyze() { diff[i] = data.ts[i+1] - data.ts[i] } - //perfect beacons should have symmetric delta time distributions + //perfect beacons should have symmetric delta time and size distributions //Bowley's measure of skew is used to check symmetry sort.Sort(util.SortableInt64(diff)) bSkew := float64(0) + ds_bSkew := float64(0) //length -1 is used since diff is a zero based slice low := diff[util.Round(.25*float64(length-1))] @@ -248,11 +252,21 @@ func (t *Beacon) analyze() { bNum := low + high - 2*mid bDen := high - low + ds_low := data.orig_bytes[util.Round(.25*float64(ds_length-1))] + ds_mid := data.orig_bytes[util.Round(.5*float64(ds_length-1))] + ds_high := data.orig_bytes[util.Round(.75*float64(ds_length-1))] + ds_bNum := ds_low + ds_high - 2*ds_mid + ds_bDen := ds_high - ds_low + //bSkew should equal zero if the denominator equals zero //bowley skew is unreliable if Q2 = Q1 or Q2 = Q3 if bDen != 0 && mid != low && mid != high { bSkew = float64(bNum) / float64(bDen) } + + if ds_bDen != 0 { + ds_bSkew = float64(ds_bNum) / float64(ds_bDen) + } //perfect beacons should have very low dispersion around the //median of their delta times @@ -262,9 +276,17 @@ func (t *Beacon) analyze() { for i := 0; i < length; i++ { devs[i] = util.Abs(diff[i] - mid) } + + ds_devs := make([]int64, ds_length) + for i := 0; i < ds_length; i++ { + ds_devs[i] = util.Abs(data.orig_bytes[i] - ds_mid) + } sort.Sort(util.SortableInt64(devs)) + sort.Sort(util.SortableInt64(ds_devs)) + madm := devs[util.Round(.5*float64(length-1))] + ds_madm := ds_devs[util.Round(.5*float64(ds_length-1))] //Store the range for human analysis iRange := diff[length-1] - diff[0] @@ -289,16 +311,24 @@ func (t *Beacon) analyze() { //more skewed distributions recieve a lower score //less skewed distributions recieve a higher score alpha := 1.0 - math.Abs(bSkew) + ds_alpha := 1.0 - math.Abs(ds_bSkew) //lower dispersion is better, cutoff dispersion scores at 30 seconds beta := 1.0 - float64(madm)/30.0 if beta < 0 { beta = 0 } + ds_beta := 1.0 - float64(ds_madm) + if ds_beta < 0 { + ds_beta = 0 + } + gamma := duration //in order of ascending importance: skew, duration, dispersion output.TS_score = (alpha + beta + gamma) / 3.0 + //in order of ascending importance: skew, dispersion + output.DS_score = (ds_alpha + ds_beta) / 2.0 t.writeChannel <- &output } @@ -376,6 +406,7 @@ func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { { {"$project", bson.D{ {"ts_score", 1}, + {"ds_score", 1}, {"src", "$uconn.src"}, {"dst", "$uconn.dst"}, {"local_src", "$uconn.local_src"}, diff --git a/analysis/structure/uconn.go b/analysis/structure/uconn.go index 27566b8c..d2bba04a 100644 --- a/analysis/structure/uconn.go +++ b/analysis/structure/uconn.go @@ -88,6 +88,9 @@ func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, [] }}, }}, }}, + {"orig_bytes", bson.D{ + {"$first", "$orig_bytes"}, + }}, {"avg_bytes", bson.D{ {"$avg", bson.D{ {"$add", []interface{}{ @@ -110,6 +113,7 @@ func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, [] {"local_src", 1}, {"local_dst", 1}, {"total_bytes", 1}, + {"orig_bytes", 1}, {"avg_bytes", 1}, {"total_duration", 1}, {"uid", 1}, diff --git a/commands/show-beacons.go b/commands/show-beacons.go index 6ce203d5..b9801c62 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -54,7 +54,7 @@ func showBeacons(c *cli.Context) error { func showBeaconReport(data []beaconData.BeaconAnalysisView) error { table := tablewriter.NewWriter(os.Stdout) - table.SetHeader([]string{"Score", "Source IP", "Destination IP", + table.SetHeader([]string{"Score", "Size Score", "Source IP", "Destination IP", "Connections", "Avg. Bytes", "Intvl Range", "Top Intvl", "Top Intvl Count", "Intvl Skew", "Intvl Dispersion", "Intvl Duration"}) f := func(f float64) string { @@ -66,7 +66,7 @@ func showBeaconReport(data []beaconData.BeaconAnalysisView) error { for _, d := range data { table.Append( []string{ - f(d.TS_score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), + f(d.TS_score), f(d.DS_score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), i(d.TS_iRange), i(d.TS_iMode), i(d.TS_iModeCount), f(d.TS_iSkew), i(d.TS_iDispersion), f(d.TS_duration)}) } @@ -75,8 +75,8 @@ func showBeaconReport(data []beaconData.BeaconAnalysisView) error { } func showBeaconCsv(data []beaconData.BeaconAnalysisView) error { - tmpl := "{{.TS_score}},{{.Src}},{{.Dst}},{{.Connections}},{{.AvgBytes}}," - tmpl += "{{.TS_iRange}},{{.TS_iMode}},{{.TS_iModeCount}}," + tmpl := "{{.TS_score}},{{.DS_score}},{{.Src}},{{.Dst}},{{.Connections}}," + tmpl += "{{.AvgBytes}}{{.TS_iRange}},{{.TS_iMode}},{{.TS_iModeCount}}," tmpl += "{{.TS_iSkew}},{{.TS_iDispersion}},{{.TS_duration}}\n" out, err := template.New("beacon").Parse(tmpl) diff --git a/datatypes/beacon/beacon.go b/datatypes/beacon/beacon.go index d3d829d3..f66c5ba0 100644 --- a/datatypes/beacon/beacon.go +++ b/datatypes/beacon/beacon.go @@ -16,6 +16,7 @@ type ( TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` TS_score float64 `bson:"ts_score"` + DS_score float64 `bson:"ds_score"` TS_intervals []int64 `bson:"ts_intervals"` TS_intervalCounts []int64 `bson:"ts_interval_counts"` } @@ -36,5 +37,6 @@ type ( TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` TS_score float64 `bson:"ts_score"` + DS_score float64 `bson:"ds_score"` } ) diff --git a/datatypes/data/data.go b/datatypes/data/data.go index 3bb1d748..7cb6b74f 100644 --- a/datatypes/data/data.go +++ b/datatypes/data/data.go @@ -8,16 +8,17 @@ type ( // not in this Conn structure use parser.Conn instead. Conn struct { ID bson.ObjectId `bson:"_id,omitempty"` - Ts int64 `bson:"ts,omitempty"` - UID string `bson:"uid"` - Src string `bson:"id_origin_h,omitempty"` - Spt int `bson:"id_origin_p,omitempty"` - Dst string `bson:"id_resp_h,omitempty"` - Dpt int `bson:"id_resp_p,omitempty"` - Dur float64 `bson:"duration,omitempty"` - Proto string `bson:"proto,omitempty"` - LocalSrc bool `bson:"local_orig,omitempty"` - LocalDst bool `bson:"local_resp,omitempty"` + Ts int64 `bson:"ts,omitempty"` + UID string `bson:"uid"` + Src string `bson:"id_origin_h,omitempty"` + Spt int `bson:"id_origin_p,omitempty"` + Dst string `bson:"id_resp_h,omitempty"` + Dpt int `bson:"id_resp_p,omitempty"` + Dur float64 `bson:"duration,omitempty"` + Proto string `bson:"proto,omitempty"` + LocalSrc bool `bson:"local_orig,omitempty"` + LocalDst bool `bson:"local_resp,omitempty"` + OriginBytes int64 `bson:"orig_bytes,omitempty"` } // DNS provides structure for a subset of the fields in the diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index e189f4ed..4e3ba3d7 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -18,7 +18,7 @@ type ( Dst string `bson:"dst"` LocalSrc bool `bson:"local_src"` LocalDst bool `bson:"local_dst"` - TotalBytes int `bson:"total_bytes"` + OriginBytes int64 `bson:"origin_bytes"` AverageBytes float32 `bson:"average_bytes"` TotalDuration float32 `bson:"total_duration"` } From 0ea6d0536b6373fcb75f0d64cf7cb28ee0a6f2e8 Mon Sep 17 00:00:00 2001 From: Melissa Bruno Date: Fri, 9 Jun 2017 11:44:02 -0400 Subject: [PATCH 002/117] Fixed errors in uconn.go and structure.go --- analysis/structure/uconn.go | 4 ---- datatypes/structure/structure.go | 1 + 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/analysis/structure/uconn.go b/analysis/structure/uconn.go index d2bba04a..27566b8c 100644 --- a/analysis/structure/uconn.go +++ b/analysis/structure/uconn.go @@ -88,9 +88,6 @@ func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, [] }}, }}, }}, - {"orig_bytes", bson.D{ - {"$first", "$orig_bytes"}, - }}, {"avg_bytes", bson.D{ {"$avg", bson.D{ {"$add", []interface{}{ @@ -113,7 +110,6 @@ func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, [] {"local_src", 1}, {"local_dst", 1}, {"total_bytes", 1}, - {"orig_bytes", 1}, {"avg_bytes", 1}, {"total_duration", 1}, {"uid", 1}, diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index 4e3ba3d7..631e6d8f 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -18,6 +18,7 @@ type ( Dst string `bson:"dst"` LocalSrc bool `bson:"local_src"` LocalDst bool `bson:"local_dst"` + TotalBytes int `bson:"total_bytes"` OriginBytes int64 `bson:"origin_bytes"` AverageBytes float32 `bson:"average_bytes"` TotalDuration float32 `bson:"total_duration"` From 8ee957a9b4368ae355852e9c53097f08dd9d82a5 Mon Sep 17 00:00:00 2001 From: Melissa Bruno Date: Wed, 21 Jun 2017 20:54:26 -0400 Subject: [PATCH 003/117] Adds new data size metrics --- analysis/beacon/beacon.go | 57 ++++++++++++++++++++------------ commands/show-beacons.go | 22 +++++++----- datatypes/beacon/beacon.go | 14 ++++++-- datatypes/data/data.go | 24 +++++++------- datatypes/structure/structure.go | 1 - 5 files changed, 73 insertions(+), 45 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 7461d5e9..9bf0d5a0 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -46,7 +46,7 @@ type ( uconnID bson.ObjectId // Unique Connection ID ts []int64 // Connection timestamps for this src, dst pair //dur []int64 - orig_bytes []int64 + orig_ip_bytes []int64 //resp_bytes []int64 } ) @@ -195,7 +195,7 @@ func (t *Beacon) collect() { for connIter.Next(&conn) { newInput.ts = append(newInput.ts, conn.Ts) - newInput.orig_bytes = append(newInput.orig_bytes, conn.OriginBytes) + newInput.orig_ip_bytes = append(newInput.orig_ip_bytes, conn.OriginIPBytes) } t.analysisChannel <- newInput } @@ -209,7 +209,7 @@ func (t *Beacon) analyze() { for data := range t.analysisChannel { //sort the size and timestamps since they may have arrived out of order sort.Sort(util.SortableInt64(data.ts)) - sort.Sort(util.SortableInt64(data.orig_bytes)) + sort.Sort(util.SortableInt64(data.orig_ip_bytes)) //remove subsecond communications //these will appear as beacons if we do not remove them @@ -226,7 +226,7 @@ func (t *Beacon) analyze() { //for timestamps this is one less then the data slice length //since we are calculating the times in between readings length := len(data.ts) - 1 - ds_length := len(data.orig_bytes) + ds_length := len(data.orig_ip_bytes) //find the duration of this connection //perfect beacons should fill the observation period @@ -243,7 +243,7 @@ func (t *Beacon) analyze() { //Bowley's measure of skew is used to check symmetry sort.Sort(util.SortableInt64(diff)) bSkew := float64(0) - ds_bSkew := float64(0) + ds_skew := float64(0) //length -1 is used since diff is a zero based slice low := diff[util.Round(.25*float64(length-1))] @@ -252,9 +252,9 @@ func (t *Beacon) analyze() { bNum := low + high - 2*mid bDen := high - low - ds_low := data.orig_bytes[util.Round(.25*float64(ds_length-1))] - ds_mid := data.orig_bytes[util.Round(.5*float64(ds_length-1))] - ds_high := data.orig_bytes[util.Round(.75*float64(ds_length-1))] + ds_low := data.orig_ip_bytes[util.Round(.25*float64(ds_length-1))] + ds_mid := data.orig_ip_bytes[util.Round(.5*float64(ds_length-1))] + ds_high := data.orig_ip_bytes[util.Round(.75*float64(ds_length-1))] ds_bNum := ds_low + ds_high - 2*ds_mid ds_bDen := ds_high - ds_low @@ -264,8 +264,8 @@ func (t *Beacon) analyze() { bSkew = float64(bNum) / float64(bDen) } - if ds_bDen != 0 { - ds_bSkew = float64(ds_bNum) / float64(ds_bDen) + if ds_bDen != 0 && ds_mid != ds_low && ds_mid != ds_high { + ds_skew = float64(ds_bNum) / float64(ds_bDen) } //perfect beacons should have very low dispersion around the @@ -279,7 +279,7 @@ func (t *Beacon) analyze() { ds_devs := make([]int64, ds_length) for i := 0; i < ds_length; i++ { - ds_devs[i] = util.Abs(data.orig_bytes[i] - ds_mid) + ds_devs[i] = util.Abs(data.orig_ip_bytes[i] - ds_mid) } sort.Sort(util.SortableInt64(devs)) @@ -290,11 +290,13 @@ func (t *Beacon) analyze() { //Store the range for human analysis iRange := diff[length-1] - diff[0] + ds_range := data.orig_ip_bytes[ds_length-1] - data.orig_ip_bytes[0] //get a list of the intervals found in the data, //the number of times the interval was found, //and the most occurring interval intervals, intervalCounts, mode, modeCount := createCountMap(diff) + ds_sizes, ds_counts, ds_mode, ds_modeCount := createCountMap(data.orig_ip_bytes) output := dataBeacon.BeaconAnalysisOutput{ UconnID: data.uconnID, @@ -306,32 +308,40 @@ func (t *Beacon) analyze() { TS_iModeCount: modeCount, TS_intervals: intervals, TS_intervalCounts: intervalCounts, + DS_skew: ds_skew, + DS_dispersion: ds_madm, + DS_range: ds_range, + DS_sizes: ds_sizes, + DS_counts: ds_counts, + DS_mode: ds_mode, + DS_modeCount: ds_modeCount, } //more skewed distributions recieve a lower score //less skewed distributions recieve a higher score alpha := 1.0 - math.Abs(bSkew) - ds_alpha := 1.0 - math.Abs(ds_bSkew) + delta := 1.0 - math.Abs(ds_skew) //lower dispersion is better, cutoff dispersion scores at 30 seconds beta := 1.0 - float64(madm)/30.0 if beta < 0 { beta = 0 } - ds_beta := 1.0 - float64(ds_madm) - if ds_beta < 0 { - ds_beta = 0 + //no cutoff dispersion for data size + epsilon := 1.0 - float64(ds_madm) + if epsilon < 0 { + epsilon = 0 } gamma := duration + //smaller data sizes receive a higher score + zeta := 1.0 - (float64(ds_mode) / 65535.0) - //in order of ascending importance: skew, duration, dispersion - output.TS_score = (alpha + beta + gamma) / 3.0 - //in order of ascending importance: skew, dispersion - output.DS_score = (ds_alpha + ds_beta) / 2.0 + //in order of ascending importance: timestamp skew, timestamp duration, + //timestamp dispersion, size skew, size duration, size weight + output.TS_score = (alpha + beta + gamma + delta + epsilon + zeta) / 6.0 t.writeChannel <- &output } - t.analysisWg.Done() } @@ -406,7 +416,6 @@ func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { { {"$project", bson.D{ {"ts_score", 1}, - {"ds_score", 1}, {"src", "$uconn.src"}, {"dst", "$uconn.dst"}, {"local_src", "$uconn.local_src"}, @@ -418,6 +427,12 @@ func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { {"ts_iMode_count", 1}, {"ts_iSkew", 1}, {"ts_duration", 1}, + {"ts_iDispersion", 1}, + {"ds_dispersion", 1}, + {"ds_range", 1}, + {"ds_mode", 1}, + {"ds_mode_count", 1}, + {"ds_skew", 1}, }}, }, } diff --git a/commands/show-beacons.go b/commands/show-beacons.go index b9801c62..90296078 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -54,9 +54,11 @@ func showBeacons(c *cli.Context) error { func showBeaconReport(data []beaconData.BeaconAnalysisView) error { table := tablewriter.NewWriter(os.Stdout) - table.SetHeader([]string{"Score", "Size Score", "Source IP", "Destination IP", - "Connections", "Avg. Bytes", "Intvl Range", "Top Intvl", - "Top Intvl Count", "Intvl Skew", "Intvl Dispersion", "Intvl Duration"}) + table.SetHeader([]string{"Score", "Source IP", "Destination IP", + "Connections", "Avg. Bytes", "Intvl Range", "Size Range", "Top Intvl", + "Top Size", "Top Intvl Count", "Top Size Count", "Intvl Skew", + "Size Skew", "Intvl Dispersion", "Size Dispersion", "Intvl Duration"}) + f := func(f float64) string { return strconv.FormatFloat(f, 'g', 6, 64) } @@ -66,18 +68,20 @@ func showBeaconReport(data []beaconData.BeaconAnalysisView) error { for _, d := range data { table.Append( []string{ - f(d.TS_score), f(d.DS_score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), - i(d.TS_iRange), i(d.TS_iMode), i(d.TS_iModeCount), f(d.TS_iSkew), - i(d.TS_iDispersion), f(d.TS_duration)}) + f(d.TS_score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), + i(d.TS_iRange), i(d.DS_range), i(d.TS_iMode), i(d.DS_mode), + i(d.TS_iModeCount), i(d.DS_modeCount), f(d.TS_iSkew), f(d.DS_skew), + i(d.TS_iDispersion), i(d.DS_dispersion), f(d.TS_duration)}) } table.Render() return nil } func showBeaconCsv(data []beaconData.BeaconAnalysisView) error { - tmpl := "{{.TS_score}},{{.DS_score}},{{.Src}},{{.Dst}},{{.Connections}}," - tmpl += "{{.AvgBytes}}{{.TS_iRange}},{{.TS_iMode}},{{.TS_iModeCount}}," - tmpl += "{{.TS_iSkew}},{{.TS_iDispersion}},{{.TS_duration}}\n" + tmpl := "{{.TS_score}},{{.Src}},{{.Dst}},{{.Connections}},{{.AvgBytes}}," + tmpl += "{{.TS_iRange}},{{.DS_range}},{{.TS_iMode}},{{.DS_mode}},{{.TS_iModeCount}}," + tmpl += "{{.DS_modeCount}},{{.TS_iSkew}},{{.DS_skew}},{{.TS_iDispersion}}," + tmpl += "{{.DS_dispersion}},{{.TS_duration}}\n" out, err := template.New("beacon").Parse(tmpl) if err != nil { diff --git a/datatypes/beacon/beacon.go b/datatypes/beacon/beacon.go index f66c5ba0..83deab38 100644 --- a/datatypes/beacon/beacon.go +++ b/datatypes/beacon/beacon.go @@ -16,9 +16,15 @@ type ( TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` TS_score float64 `bson:"ts_score"` - DS_score float64 `bson:"ds_score"` TS_intervals []int64 `bson:"ts_intervals"` TS_intervalCounts []int64 `bson:"ts_interval_counts"` + DS_skew float64 `bson:"ds_skew"` + DS_dispersion int64 `bson:"ds_dispersion"` + DS_range int64 `bson:"ds_range"` + DS_sizes []int64 `bson:"ds_sizes"` + DS_counts []int64 `bson:"ds_counts"` + DS_mode int64 `bson:"ds_mode"` + DS_modeCount int64 `bson:"ds_mode_count"` } //Used in order to join the uconn and beacon tables @@ -37,6 +43,10 @@ type ( TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` TS_score float64 `bson:"ts_score"` - DS_score float64 `bson:"ds_score"` + DS_skew float64 `bson:"ds_skew"` + DS_dispersion int64 `bson:"ds_dispersion"` + DS_range int64 `bson:"ds_range"` + DS_mode int64 `bson:"ds_mode"` + DS_modeCount int64 `bson:"ds_mode_count"` } ) diff --git a/datatypes/data/data.go b/datatypes/data/data.go index 7cb6b74f..21a88d2f 100644 --- a/datatypes/data/data.go +++ b/datatypes/data/data.go @@ -7,18 +7,18 @@ type ( // parser.Conn data structure. If fields are needed that are // not in this Conn structure use parser.Conn instead. Conn struct { - ID bson.ObjectId `bson:"_id,omitempty"` - Ts int64 `bson:"ts,omitempty"` - UID string `bson:"uid"` - Src string `bson:"id_origin_h,omitempty"` - Spt int `bson:"id_origin_p,omitempty"` - Dst string `bson:"id_resp_h,omitempty"` - Dpt int `bson:"id_resp_p,omitempty"` - Dur float64 `bson:"duration,omitempty"` - Proto string `bson:"proto,omitempty"` - LocalSrc bool `bson:"local_orig,omitempty"` - LocalDst bool `bson:"local_resp,omitempty"` - OriginBytes int64 `bson:"orig_bytes,omitempty"` + ID bson.ObjectId `bson:"_id,omitempty"` + Ts int64 `bson:"ts,omitempty"` + UID string `bson:"uid"` + Src string `bson:"id_origin_h,omitempty"` + Spt int `bson:"id_origin_p,omitempty"` + Dst string `bson:"id_resp_h,omitempty"` + Dpt int `bson:"id_resp_p,omitempty"` + Dur float64 `bson:"duration,omitempty"` + Proto string `bson:"proto,omitempty"` + LocalSrc bool `bson:"local_orig,omitempty"` + LocalDst bool `bson:"local_resp,omitempty"` + OriginIPBytes int64 `bson:"orig_ip_bytes,omitempty"` } // DNS provides structure for a subset of the fields in the diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index 631e6d8f..e189f4ed 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -19,7 +19,6 @@ type ( LocalSrc bool `bson:"local_src"` LocalDst bool `bson:"local_dst"` TotalBytes int `bson:"total_bytes"` - OriginBytes int64 `bson:"origin_bytes"` AverageBytes float32 `bson:"average_bytes"` TotalDuration float32 `bson:"total_duration"` } From 1eda8173d026827334abd801d1b92ad35b7e98da Mon Sep 17 00:00:00 2001 From: logan lembke Date: Fri, 23 Jun 2017 16:10:06 -0600 Subject: [PATCH 004/117] Add config flag. remove all flag. use head --- commands/commands.go | 5 ----- commands/delete-database.go | 3 ++- commands/reporting.go | 2 +- commands/reset-analysis.go | 4 ++-- commands/show-beacons.go | 10 +++------- commands/show-blacklisted.go | 3 ++- commands/show-databases.go | 5 ++++- commands/show-explodedDns.go | 11 +++-------- commands/show-long-connections.go | 10 +++------- commands/show-scans.go | 3 ++- commands/show-urls.go | 17 ++++------------- commands/show-user-agents.go | 12 ++++-------- 12 files changed, 30 insertions(+), 55 deletions(-) diff --git a/commands/commands.go b/commands/commands.go index 14f3e843..7e380141 100644 --- a/commands/commands.go +++ b/commands/commands.go @@ -38,11 +38,6 @@ var ( Name: "human-readable, H", Usage: "print a report instead of csv", } - - allFlag = cli.BoolFlag{ - Name: "all, a", - Usage: "print all available records", - } ) // bootstrapCommands simply adds a given command to the allCommands array diff --git a/commands/delete-database.go b/commands/delete-database.go index 22c9e829..78fd1d22 100644 --- a/commands/delete-database.go +++ b/commands/delete-database.go @@ -17,9 +17,10 @@ func init() { Usage: "Delete an imported database", Flags: []cli.Flag{ databaseFlag, + configFlag, }, Action: func(c *cli.Context) error { - res := database.InitResources("") + res := database.InitResources(c.String("config")) if c.String("database") == "" { return cli.NewExitError("Specify a database with -d", -1) } diff --git a/commands/reporting.go b/commands/reporting.go index df8ac076..4b14c400 100644 --- a/commands/reporting.go +++ b/commands/reporting.go @@ -20,7 +20,7 @@ func init() { }, }, Action: func(c *cli.Context) error { - res := database.InitResources("") + res := database.InitResources(c.String("config")) databaseName := c.String("database") var databases []string if databaseName != "" { diff --git a/commands/reset-analysis.go b/commands/reset-analysis.go index f584d1d9..ee930104 100644 --- a/commands/reset-analysis.go +++ b/commands/reset-analysis.go @@ -17,9 +17,10 @@ func init() { Usage: "Reset analysis of one or more databases", Flags: []cli.Flag{ databaseFlag, + configFlag, }, Action: func(c *cli.Context) error { - res := database.InitResources("") + res := database.InitResources(c.String("config")) if c.String("database") == "" { return cli.NewExitError("Specify a database with -d", -1) } @@ -89,4 +90,3 @@ func cleanAnalysis(database string, res *database.Resources) error { } return nil } - diff --git a/commands/show-beacons.go b/commands/show-beacons.go index e8c76a4b..0fc8aad0 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -21,7 +21,7 @@ func init() { Flags: []cli.Flag{ humanFlag, databaseFlag, - allFlag, + configFlag, }, Action: showBeacons, } @@ -33,17 +33,13 @@ func showBeacons(c *cli.Context) error { if c.String("database") == "" { return cli.NewExitError("Specify a database with -d", -1) } - res := database.InitResources("") + res := database.InitResources(c.String("config")) res.DB.SelectDB(c.String("database")) var data []beaconData.BeaconAnalysisView - cutoffScore := .7 - if c.Bool("all") { - cutoffScore = 0 - } ssn := res.DB.Session.Copy() - resultsView := beacon.GetBeaconResultsView(res, ssn, cutoffScore) + resultsView := beacon.GetBeaconResultsView(res, ssn, 0) if resultsView == nil { return errors.New("No beacons were found for " + c.String("database")) } diff --git a/commands/show-blacklisted.go b/commands/show-blacklisted.go index 3cc26790..b8d5323d 100644 --- a/commands/show-blacklisted.go +++ b/commands/show-blacklisted.go @@ -28,6 +28,7 @@ func init() { Usage: "Show sources with results", Destination: &sourcesFlag, }, + configFlag, }, Action: showBlacklisted, } @@ -40,7 +41,7 @@ func showBlacklisted(c *cli.Context) error { return cli.NewExitError("Specify a database with -d", -1) } - res := database.InitResources("") + res := database.InitResources(c.String("config")) res.DB.SelectDB(c.String("database")) var result blacklistedData.Blacklist diff --git a/commands/show-databases.go b/commands/show-databases.go index bd1c2f69..5a7dc277 100644 --- a/commands/show-databases.go +++ b/commands/show-databases.go @@ -12,8 +12,11 @@ func init() { databases := cli.Command{ Name: "show-databases", Usage: "Print the databases currently stored", + Flags: []cli.Flag{ + configFlag, + }, Action: func(c *cli.Context) error { - res := database.InitResources("") + res := database.InitResources(c.String("config")) for _, name := range res.MetaDB.GetDatabases() { fmt.Println(name) } diff --git a/commands/show-explodedDns.go b/commands/show-explodedDns.go index 314f3b09..d02a06b4 100644 --- a/commands/show-explodedDns.go +++ b/commands/show-explodedDns.go @@ -20,24 +20,19 @@ func init() { Flags: []cli.Flag{ humanFlag, databaseFlag, - allFlag, + configFlag, }, Action: func(c *cli.Context) error { if c.String("database") == "" { return cli.NewExitError("Specify a database with -d", -1) } - res := database.InitResources("") + res := database.InitResources(c.String("config")) var explodedResults []dns.ExplodedDNS iter := res.DB.Session.DB(c.String("database")).C(res.System.DNSConfig.ExplodedDNSTable).Find(nil) - count, _ := iter.Count() - if !c.Bool("all") { - count = 15 - } - - iter.Sort("-subdomains").Limit(count).All(&explodedResults) + iter.Sort("-subdomains").All(&explodedResults) if c.Bool("human-readable") { return showResultsHuman(explodedResults) diff --git a/commands/show-long-connections.go b/commands/show-long-connections.go index 462eba68..5bfb7a6f 100644 --- a/commands/show-long-connections.go +++ b/commands/show-long-connections.go @@ -20,25 +20,21 @@ func init() { Flags: []cli.Flag{ humanFlag, databaseFlag, - allFlag, + configFlag, }, Action: func(c *cli.Context) error { if c.String("database") == "" { return cli.NewExitError("Specify a database with -d", -1) } - res := database.InitResources("") + res := database.InitResources(c.String("config")) var longConns []data.Conn coll := res.DB.Session.DB(c.String("database")).C(res.System.StructureConfig.ConnTable) sortStr := "-duration" - query := coll.Find(nil).Sort(sortStr) - if !c.Bool("all") { - query.Limit(15) - } - query.All(&longConns) + coll.Find(nil).Sort(sortStr).All(&longConns) if c.Bool("human-readable") { return showConnsHuman(longConns) diff --git a/commands/show-scans.go b/commands/show-scans.go index 534c20b8..a6433aef 100644 --- a/commands/show-scans.go +++ b/commands/show-scans.go @@ -21,13 +21,14 @@ func init() { Flags: []cli.Flag{ humanFlag, databaseFlag, + configFlag, }, Action: func(c *cli.Context) error { if c.String("database") == "" { return cli.NewExitError("Specify a database with -d", -1) } - res := database.InitResources("") + res := database.InitResources(c.String("config")) var scans []scanning.Scan coll := res.DB.Session.DB(c.String("database")).C(res.System.ScanningConfig.ScanTable) diff --git a/commands/show-urls.go b/commands/show-urls.go index 44af5746..262f3739 100644 --- a/commands/show-urls.go +++ b/commands/show-urls.go @@ -20,23 +20,19 @@ func init() { Flags: []cli.Flag{ humanFlag, databaseFlag, - allFlag, + configFlag, }, Action: func(c *cli.Context) error { if c.String("database") == "" { return cli.NewExitError("Specify a database with -d", -1) } - res := database.InitResources("") + res := database.InitResources(c.String("config")) var urls []urls.URL coll := res.DB.Session.DB(c.String("database")).C(res.System.UrlsConfig.UrlsTable) - query := coll.Find(nil).Sort("-length") - if !c.Bool("all") { - query.Limit(15) - } - query.All(&urls) + coll.Find(nil).Sort("-length").All(&urls) if c.Bool("human-readable") { return showURLsHuman(urls) @@ -51,7 +47,6 @@ func init() { Flags: []cli.Flag{ humanFlag, databaseFlag, - allFlag, }, Action: func(c *cli.Context) error { if c.String("database") == "" { @@ -63,11 +58,7 @@ func init() { var urls []urls.URL coll := res.DB.Session.DB(c.String("database")).C(res.System.UrlsConfig.UrlsTable) - query := coll.Find(nil).Sort("-count") - if !c.Bool("all") { - query.Limit(10) - } - query.All(&urls) + coll.Find(nil).Sort("-count").All(&urls) if c.Bool("human-readable") { return showURLsHuman(urls) diff --git a/commands/show-user-agents.go b/commands/show-user-agents.go index 5038432d..417b6436 100644 --- a/commands/show-user-agents.go +++ b/commands/show-user-agents.go @@ -20,18 +20,18 @@ func init() { Flags: []cli.Flag{ humanFlag, databaseFlag, - allFlag, cli.BoolFlag{ Name: "least-used, l", - Usage: "Print the least used user agent strings", + Usage: "Sort the user agents from least used to most used.", }, + configFlag, }, Action: func(c *cli.Context) error { if c.String("database") == "" { return cli.NewExitError("Specify a database with -d", -1) } - res := database.InitResources("") + res := database.InitResources(c.String("config")) var agents []useragent.UserAgent coll := res.DB.Session.DB(c.String("database")).C(res.System.UserAgentConfig.UserAgentTable) @@ -43,11 +43,7 @@ func init() { sortStr = "-times_used" } - query := coll.Find(nil).Sort(sortStr) - if !c.Bool("all") { - query.Limit(15) - } - query.All(&agents) + coll.Find(nil).Sort(sortStr).All(&agents) if c.Bool("human-readable") { return showAgentsHuman(agents) From ce475de34f5e823250d592b49c1a5ddd570e4d45 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Fri, 23 Jun 2017 16:40:58 -0600 Subject: [PATCH 005/117] Fix error swallowing. Better errors --- commands/analyze.go | 16 ++++++++------- commands/import.go | 8 ++++---- commands/reset-analysis.go | 6 ++---- commands/show-beacons.go | 14 +++++++++---- commands/show-blacklisted.go | 15 ++++++++++++-- commands/show-explodedDns.go | 19 ++++++++++++----- commands/show-long-connections.go | 19 ++++++++++++----- commands/show-scans.go | 19 ++++++++++++----- commands/show-urls.go | 34 ++++++++++++++++++++++++------- commands/show-user-agents.go | 19 ++++++++++++----- 10 files changed, 121 insertions(+), 48 deletions(-) diff --git a/commands/analyze.go b/commands/analyze.go index 7b52df85..d74ac872 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -27,15 +27,14 @@ func init() { configFlag, }, Action: func(c *cli.Context) error { - analyze(c.String("database"), c.String("config")) - return nil + return analyze(c.String("database"), c.String("config")) }, } bootstrapCommands(analyzeCommand) } -func analyze(inDb string, configFile string) { +func analyze(inDb string, configFile string) error { res := database.InitResources(configFile) var toRun []string @@ -46,12 +45,14 @@ func analyze(inDb string, configFile string) { } else { info, err := res.MetaDB.GetDBMetaInfo(inDb) if err != nil { - res.Log.Errorf("Error: %s not found.\n", inDb) - return + errStr := fmt.Sprintf("Error: %s not found.", inDb) + res.Log.Errorf(errStr) + return cli.NewExitError(errStr, -1) } if info.Analyzed { - res.Log.Errorf("Error: %s is already analyzed.\n", inDb) - return + errStr := fmt.Sprintf("Error: %s is already analyzed.", inDb) + res.Log.Errorf(errStr) + return cli.NewExitError(errStr, -1) } toRun = append(toRun, inDb) @@ -120,6 +121,7 @@ func analyze(inDb string, configFile string) { "end_time": endAll.Format(util.TimeFormat), "duration": endAll.Sub(startAll), }).Info("Analysis complete") + return nil } func logAnalysisFunc(analysisName string, databaseName string, diff --git a/commands/import.go b/commands/import.go index 4959fc47..291d55d3 100644 --- a/commands/import.go +++ b/commands/import.go @@ -42,10 +42,10 @@ func doImport(c *cli.Context) error { //one flag was set if importDir != "" && databaseName == "" || importDir == "" && databaseName != "" { - fmt.Println("Import failed.\nUse 'rita import' to import the directories " + - "specified in the config file or 'rita import -i [import-dir] -d [database-name]' " + - "to import bro logs from a given directory.") - return nil + return cli.NewExitError( + "Import failed.\nUse 'rita import' to import the directories "+ + "specified in the config file or 'rita import -i [import-dir] -d [database-name]' "+ + "to import bro logs from a given directory.", -1) } //both flags were set diff --git a/commands/reset-analysis.go b/commands/reset-analysis.go index ee930104..36c043ea 100644 --- a/commands/reset-analysis.go +++ b/commands/reset-analysis.go @@ -42,8 +42,7 @@ func cleanAnalysis(database string, res *database.Resources) error { names, err := res.DB.Session.DB(database).CollectionNames() if err != nil || len(names) == 0 { - fmt.Fprintf(os.Stderr, "Failed to find analysis results\n") - return err + return cli.NewExitError("Failed to find analysis results", -1) } fmt.Println("Are you sure you want to reset analysis for", database, "[Y/n]") @@ -81,8 +80,7 @@ func cleanAnalysis(database string, res *database.Resources) error { err3 := res.MetaDB.MarkDBAnalyzed(database, false) if err3 != nil { - fmt.Fprintf(os.Stderr, "Failed to update metadb\n") - return err3 + return cli.NewExitError("Failed to update metadb", -1) } if err == nil && err2Flag == nil && err3 == nil { diff --git a/commands/show-beacons.go b/commands/show-beacons.go index 0fc8aad0..b6de71ef 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -1,7 +1,6 @@ package commands import ( - "errors" "fmt" "os" "strconv" @@ -41,16 +40,23 @@ func showBeacons(c *cli.Context) error { ssn := res.DB.Session.Copy() resultsView := beacon.GetBeaconResultsView(res, ssn, 0) if resultsView == nil { - return errors.New("No beacons were found for " + c.String("database")) + return cli.NewExitError("No results were found for "+c.String("database"), -1) } resultsView.All(&data) ssn.Close() if c.Bool("human-readable") { - return showBeaconReport(data) + err := showBeaconReport(data) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } } - return showBeaconCsv(data) + err := showBeaconCsv(data) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + return nil } func showBeaconReport(data []beaconData.BeaconAnalysisView) error { diff --git a/commands/show-blacklisted.go b/commands/show-blacklisted.go index b8d5323d..85def566 100644 --- a/commands/show-blacklisted.go +++ b/commands/show-blacklisted.go @@ -50,6 +50,10 @@ func showBlacklisted(c *cli.Context) error { coll := res.DB.Session.DB(c.String("database")).C(res.System.BlacklistedConfig.BlacklistTable) iter := coll.Find(nil).Sort("-count").Iter() + if iter.Done() { + return cli.NewExitError("No results were found for "+c.String("database"), -1) + } + for iter.Next(&result) { if sourcesFlag { blacklisted.SetBlacklistSources(res, &result) @@ -58,9 +62,16 @@ func showBlacklisted(c *cli.Context) error { } if c.Bool("human-readable") { - return showBlacklistedHuman(results) + err := showBlacklistedHuman(results) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } } - return showBlacklistedCsv(results) + err := showBlacklistedCsv(results) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + return nil } // showBlacklisted prints all blacklisted for a given database diff --git a/commands/show-explodedDns.go b/commands/show-explodedDns.go index d02a06b4..11f51fdd 100644 --- a/commands/show-explodedDns.go +++ b/commands/show-explodedDns.go @@ -34,10 +34,21 @@ func init() { iter.Sort("-subdomains").All(&explodedResults) + if len(explodedResults) == 0 { + return cli.NewExitError("No results were found for "+c.String("database"), -1) + } + if c.Bool("human-readable") { - return showResultsHuman(explodedResults) + err := showResultsHuman(explodedResults) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } } - return showResults(explodedResults) + err := showResults(explodedResults) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + return nil }, } bootstrapCommands(command) @@ -51,15 +62,13 @@ func showResults(dnsResults []dns.ExplodedDNS) error { return err } - var error error for _, result := range dnsResults { err := out.Execute(os.Stdout, result) if err != nil { fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - error = err } } - return error + return nil } func showResultsHuman(dnsResults []dns.ExplodedDNS) error { diff --git a/commands/show-long-connections.go b/commands/show-long-connections.go index 5bfb7a6f..831c1c98 100644 --- a/commands/show-long-connections.go +++ b/commands/show-long-connections.go @@ -36,10 +36,21 @@ func init() { coll.Find(nil).Sort(sortStr).All(&longConns) + if len(longConns) == 0 { + return cli.NewExitError("No results were found for "+c.String("database"), -1) + } + if c.Bool("human-readable") { - return showConnsHuman(longConns) + err := showConnsHuman(longConns) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } } - return showConns(longConns) + err := showConns(longConns) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + return nil }, } bootstrapCommands(command) @@ -53,15 +64,13 @@ func showConns(connResults []data.Conn) error { return err } - var error error for _, result := range connResults { err := out.Execute(os.Stdout, result) if err != nil { fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - error = err } } - return error + return nil } func showConnsHuman(connResults []data.Conn) error { diff --git a/commands/show-scans.go b/commands/show-scans.go index a6433aef..a9ef4a81 100644 --- a/commands/show-scans.go +++ b/commands/show-scans.go @@ -34,10 +34,21 @@ func init() { coll := res.DB.Session.DB(c.String("database")).C(res.System.ScanningConfig.ScanTable) coll.Find(nil).All(&scans) + if len(scans) == 0 { + return cli.NewExitError("No results were found for "+c.String("database"), -1) + } + if c.Bool("human-readable") { - return showScansHuman(scans) + err := showScansHuman(scans) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } } - return showScans(scans) + err := showScans(scans) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + return nil }, } bootstrapCommands(command) @@ -51,16 +62,14 @@ func showScans(scans []scanning.Scan) error { return err } - var error error for _, scan := range scans { sort.Ints(scan.PortSet) err := out.Execute(os.Stdout, scan) if err != nil { fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - error = err } } - return error + return nil } // showScans prints all scans for a given database diff --git a/commands/show-urls.go b/commands/show-urls.go index 262f3739..deb449f4 100644 --- a/commands/show-urls.go +++ b/commands/show-urls.go @@ -34,10 +34,21 @@ func init() { coll.Find(nil).Sort("-length").All(&urls) + if len(urls) == 0 { + return cli.NewExitError("No results were found for "+c.String("database"), -1) + } + if c.Bool("human-readable") { - return showURLsHuman(urls) + err := showURLsHuman(urls) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } } - return showURLs(urls) + err := showURLs(urls) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + return nil }, } vistedURLs := cli.Command{ @@ -60,10 +71,21 @@ func init() { coll.Find(nil).Sort("-count").All(&urls) + if len(urls) == 0 { + return cli.NewExitError("No results were found for "+c.String("database"), -1) + } + if c.Bool("human-readable") { - return showURLsHuman(urls) + err := showURLsHuman(urls) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + } + err := showURLs(urls) + if err != nil { + return cli.NewExitError(err.Error(), -1) } - return showURLs(urls) + return nil }, } bootstrapCommands(longURLs, vistedURLs) @@ -77,15 +99,13 @@ func showURLs(urls []urls.URL) error { return err } - var error error for _, url := range urls { err := out.Execute(os.Stdout, url) if err != nil { fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - error = err } } - return error + return nil } func showURLsHuman(urls []urls.URL) error { diff --git a/commands/show-user-agents.go b/commands/show-user-agents.go index 417b6436..1a478b09 100644 --- a/commands/show-user-agents.go +++ b/commands/show-user-agents.go @@ -45,10 +45,21 @@ func init() { coll.Find(nil).Sort(sortStr).All(&agents) + if len(agents) == 0 { + return cli.NewExitError("No results were found for "+c.String("database"), -1) + } + if c.Bool("human-readable") { - return showAgentsHuman(agents) + err := showAgentsHuman(agents) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } } - return showAgents(agents) + err := showAgents(agents) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + return nil }, } bootstrapCommands(command) @@ -62,15 +73,13 @@ func showAgents(agents []useragent.UserAgent) error { return err } - var error error for _, agent := range agents { err := out.Execute(os.Stdout, agent) if err != nil { fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - error = err } } - return error + return nil } func showAgentsHuman(agents []useragent.UserAgent) error { From fbf77087d98e9a9f01e912943a36f749730b2abd Mon Sep 17 00:00:00 2001 From: Melissa Bruno Date: Wed, 7 Jun 2017 10:53:00 -0400 Subject: [PATCH 006/117] Updated data size score --- analysis/beacon/beacon.go | 39 ++++++++++++++++++++++++++++---- analysis/structure/uconn.go | 4 ++++ commands/show-beacons.go | 8 +++---- datatypes/beacon/beacon.go | 2 ++ datatypes/data/data.go | 21 +++++++++-------- datatypes/structure/structure.go | 2 +- 6 files changed, 57 insertions(+), 19 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 3a366dfc..103b1bb1 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -46,7 +46,7 @@ type ( uconnID bson.ObjectId // Unique Connection ID ts []int64 // Connection timestamps for this src, dst pair //dur []int64 - //orig_bytes []int64 + orig_bytes []int64 //resp_bytes []int64 } ) @@ -194,6 +194,7 @@ func (t *Beacon) collect() { for connIter.Next(&conn) { newInput.ts = append(newInput.ts, conn.Ts) + newInput.orig_bytes = append(newInput.orig_bytes, conn.OriginBytes) } t.analysisChannel <- newInput } @@ -205,8 +206,9 @@ func (t *Beacon) collect() { // analyze src, dst pairs with their connection data func (t *Beacon) analyze() { for data := range t.analysisChannel { - //sort the timestamps since they may have arrived out of order + //sort the size and timestamps since they may have arrived out of order sort.Sort(util.SortableInt64(data.ts)) + sort.Sort(util.SortableInt64(data.orig_bytes)) //remove subsecond communications //these will appear as beacons if we do not remove them @@ -220,9 +222,10 @@ func (t *Beacon) analyze() { } //store the diff slice length since we use it a lot - //this is one less then the data slice length + //for timestamps this is one less then the data slice length //since we are calculating the times in between readings length := len(data.ts) - 1 + ds_length := len(data.orig_bytes) //find the duration of this connection //perfect beacons should fill the observation period @@ -235,10 +238,11 @@ func (t *Beacon) analyze() { diff[i] = data.ts[i+1] - data.ts[i] } - //perfect beacons should have symmetric delta time distributions + //perfect beacons should have symmetric delta time and size distributions //Bowley's measure of skew is used to check symmetry sort.Sort(util.SortableInt64(diff)) bSkew := float64(0) + ds_bSkew := float64(0) //length -1 is used since diff is a zero based slice low := diff[util.Round(.25*float64(length-1))] @@ -247,11 +251,21 @@ func (t *Beacon) analyze() { bNum := low + high - 2*mid bDen := high - low + ds_low := data.orig_bytes[util.Round(.25*float64(ds_length-1))] + ds_mid := data.orig_bytes[util.Round(.5*float64(ds_length-1))] + ds_high := data.orig_bytes[util.Round(.75*float64(ds_length-1))] + ds_bNum := ds_low + ds_high - 2*ds_mid + ds_bDen := ds_high - ds_low + //bSkew should equal zero if the denominator equals zero //bowley skew is unreliable if Q2 = Q1 or Q2 = Q3 if bDen != 0 && mid != low && mid != high { bSkew = float64(bNum) / float64(bDen) } + + if ds_bDen != 0 { + ds_bSkew = float64(ds_bNum) / float64(ds_bDen) + } //perfect beacons should have very low dispersion around the //median of their delta times @@ -261,9 +275,17 @@ func (t *Beacon) analyze() { for i := 0; i < length; i++ { devs[i] = util.Abs(diff[i] - mid) } + + ds_devs := make([]int64, ds_length) + for i := 0; i < ds_length; i++ { + ds_devs[i] = util.Abs(data.orig_bytes[i] - ds_mid) + } sort.Sort(util.SortableInt64(devs)) + sort.Sort(util.SortableInt64(ds_devs)) + madm := devs[util.Round(.5*float64(length-1))] + ds_madm := ds_devs[util.Round(.5*float64(ds_length-1))] //Store the range for human analysis iRange := diff[length-1] - diff[0] @@ -288,16 +310,24 @@ func (t *Beacon) analyze() { //more skewed distributions recieve a lower score //less skewed distributions recieve a higher score alpha := 1.0 - math.Abs(bSkew) + ds_alpha := 1.0 - math.Abs(ds_bSkew) //lower dispersion is better, cutoff dispersion scores at 30 seconds beta := 1.0 - float64(madm)/30.0 if beta < 0 { beta = 0 } + ds_beta := 1.0 - float64(ds_madm) + if ds_beta < 0 { + ds_beta = 0 + } + gamma := duration //in order of ascending importance: skew, duration, dispersion output.TS_score = (alpha + beta + gamma) / 3.0 + //in order of ascending importance: skew, dispersion + output.DS_score = (ds_alpha + ds_beta) / 2.0 t.writeChannel <- &output } @@ -375,6 +405,7 @@ func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { { {"$project", bson.D{ {"ts_score", 1}, + {"ds_score", 1}, {"src", "$uconn.src"}, {"dst", "$uconn.dst"}, {"local_src", "$uconn.local_src"}, diff --git a/analysis/structure/uconn.go b/analysis/structure/uconn.go index 27566b8c..d2bba04a 100644 --- a/analysis/structure/uconn.go +++ b/analysis/structure/uconn.go @@ -88,6 +88,9 @@ func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, [] }}, }}, }}, + {"orig_bytes", bson.D{ + {"$first", "$orig_bytes"}, + }}, {"avg_bytes", bson.D{ {"$avg", bson.D{ {"$add", []interface{}{ @@ -110,6 +113,7 @@ func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, [] {"local_src", 1}, {"local_dst", 1}, {"total_bytes", 1}, + {"orig_bytes", 1}, {"avg_bytes", 1}, {"total_duration", 1}, {"uid", 1}, diff --git a/commands/show-beacons.go b/commands/show-beacons.go index e8c76a4b..582ceca5 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -59,7 +59,7 @@ func showBeacons(c *cli.Context) error { func showBeaconReport(data []beaconData.BeaconAnalysisView) error { table := tablewriter.NewWriter(os.Stdout) - table.SetHeader([]string{"Score", "Source IP", "Destination IP", + table.SetHeader([]string{"Score", "Size Score", "Source IP", "Destination IP", "Connections", "Avg. Bytes", "Intvl Range", "Top Intvl", "Top Intvl Count", "Intvl Skew", "Intvl Dispersion", "Intvl Duration"}) f := func(f float64) string { @@ -71,7 +71,7 @@ func showBeaconReport(data []beaconData.BeaconAnalysisView) error { for _, d := range data { table.Append( []string{ - f(d.TS_score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), + f(d.TS_score), f(d.DS_score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), i(d.TS_iRange), i(d.TS_iMode), i(d.TS_iModeCount), f(d.TS_iSkew), i(d.TS_iDispersion), f(d.TS_duration)}) } @@ -80,8 +80,8 @@ func showBeaconReport(data []beaconData.BeaconAnalysisView) error { } func showBeaconCsv(data []beaconData.BeaconAnalysisView) error { - tmpl := "{{.TS_score}},{{.Src}},{{.Dst}},{{.Connections}},{{.AvgBytes}}," - tmpl += "{{.TS_iRange}},{{.TS_iMode}},{{.TS_iModeCount}}," + tmpl := "{{.TS_score}},{{.DS_score}},{{.Src}},{{.Dst}},{{.Connections}}," + tmpl += "{{.AvgBytes}}{{.TS_iRange}},{{.TS_iMode}},{{.TS_iModeCount}}," tmpl += "{{.TS_iSkew}},{{.TS_iDispersion}},{{.TS_duration}}\n" out, err := template.New("beacon").Parse(tmpl) diff --git a/datatypes/beacon/beacon.go b/datatypes/beacon/beacon.go index d3d829d3..f66c5ba0 100644 --- a/datatypes/beacon/beacon.go +++ b/datatypes/beacon/beacon.go @@ -16,6 +16,7 @@ type ( TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` TS_score float64 `bson:"ts_score"` + DS_score float64 `bson:"ds_score"` TS_intervals []int64 `bson:"ts_intervals"` TS_intervalCounts []int64 `bson:"ts_interval_counts"` } @@ -36,5 +37,6 @@ type ( TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` TS_score float64 `bson:"ts_score"` + DS_score float64 `bson:"ds_score"` } ) diff --git a/datatypes/data/data.go b/datatypes/data/data.go index 3bb1d748..7cb6b74f 100644 --- a/datatypes/data/data.go +++ b/datatypes/data/data.go @@ -8,16 +8,17 @@ type ( // not in this Conn structure use parser.Conn instead. Conn struct { ID bson.ObjectId `bson:"_id,omitempty"` - Ts int64 `bson:"ts,omitempty"` - UID string `bson:"uid"` - Src string `bson:"id_origin_h,omitempty"` - Spt int `bson:"id_origin_p,omitempty"` - Dst string `bson:"id_resp_h,omitempty"` - Dpt int `bson:"id_resp_p,omitempty"` - Dur float64 `bson:"duration,omitempty"` - Proto string `bson:"proto,omitempty"` - LocalSrc bool `bson:"local_orig,omitempty"` - LocalDst bool `bson:"local_resp,omitempty"` + Ts int64 `bson:"ts,omitempty"` + UID string `bson:"uid"` + Src string `bson:"id_origin_h,omitempty"` + Spt int `bson:"id_origin_p,omitempty"` + Dst string `bson:"id_resp_h,omitempty"` + Dpt int `bson:"id_resp_p,omitempty"` + Dur float64 `bson:"duration,omitempty"` + Proto string `bson:"proto,omitempty"` + LocalSrc bool `bson:"local_orig,omitempty"` + LocalDst bool `bson:"local_resp,omitempty"` + OriginBytes int64 `bson:"orig_bytes,omitempty"` } // DNS provides structure for a subset of the fields in the diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index e189f4ed..4e3ba3d7 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -18,7 +18,7 @@ type ( Dst string `bson:"dst"` LocalSrc bool `bson:"local_src"` LocalDst bool `bson:"local_dst"` - TotalBytes int `bson:"total_bytes"` + OriginBytes int64 `bson:"origin_bytes"` AverageBytes float32 `bson:"average_bytes"` TotalDuration float32 `bson:"total_duration"` } From a4f61e54f19118b021abce628120958f0e8a3353 Mon Sep 17 00:00:00 2001 From: Melissa Bruno Date: Fri, 9 Jun 2017 11:44:02 -0400 Subject: [PATCH 007/117] Fixed errors in uconn.go and structure.go --- analysis/structure/uconn.go | 4 ---- datatypes/structure/structure.go | 1 + 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/analysis/structure/uconn.go b/analysis/structure/uconn.go index d2bba04a..27566b8c 100644 --- a/analysis/structure/uconn.go +++ b/analysis/structure/uconn.go @@ -88,9 +88,6 @@ func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, [] }}, }}, }}, - {"orig_bytes", bson.D{ - {"$first", "$orig_bytes"}, - }}, {"avg_bytes", bson.D{ {"$avg", bson.D{ {"$add", []interface{}{ @@ -113,7 +110,6 @@ func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, [] {"local_src", 1}, {"local_dst", 1}, {"total_bytes", 1}, - {"orig_bytes", 1}, {"avg_bytes", 1}, {"total_duration", 1}, {"uid", 1}, diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index 4e3ba3d7..631e6d8f 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -18,6 +18,7 @@ type ( Dst string `bson:"dst"` LocalSrc bool `bson:"local_src"` LocalDst bool `bson:"local_dst"` + TotalBytes int `bson:"total_bytes"` OriginBytes int64 `bson:"origin_bytes"` AverageBytes float32 `bson:"average_bytes"` TotalDuration float32 `bson:"total_duration"` From e0799ca3c6085357990c3421683a6438cbdb1d43 Mon Sep 17 00:00:00 2001 From: Melissa Bruno Date: Wed, 21 Jun 2017 20:54:26 -0400 Subject: [PATCH 008/117] Adds new data size metrics --- analysis/beacon/beacon.go | 57 ++++++++++++++++++++------------ commands/show-beacons.go | 22 +++++++----- datatypes/beacon/beacon.go | 14 ++++++-- datatypes/data/data.go | 24 +++++++------- datatypes/structure/structure.go | 1 - 5 files changed, 73 insertions(+), 45 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 103b1bb1..dfdec183 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -46,7 +46,7 @@ type ( uconnID bson.ObjectId // Unique Connection ID ts []int64 // Connection timestamps for this src, dst pair //dur []int64 - orig_bytes []int64 + orig_ip_bytes []int64 //resp_bytes []int64 } ) @@ -194,7 +194,7 @@ func (t *Beacon) collect() { for connIter.Next(&conn) { newInput.ts = append(newInput.ts, conn.Ts) - newInput.orig_bytes = append(newInput.orig_bytes, conn.OriginBytes) + newInput.orig_ip_bytes = append(newInput.orig_ip_bytes, conn.OriginIPBytes) } t.analysisChannel <- newInput } @@ -208,7 +208,7 @@ func (t *Beacon) analyze() { for data := range t.analysisChannel { //sort the size and timestamps since they may have arrived out of order sort.Sort(util.SortableInt64(data.ts)) - sort.Sort(util.SortableInt64(data.orig_bytes)) + sort.Sort(util.SortableInt64(data.orig_ip_bytes)) //remove subsecond communications //these will appear as beacons if we do not remove them @@ -225,7 +225,7 @@ func (t *Beacon) analyze() { //for timestamps this is one less then the data slice length //since we are calculating the times in between readings length := len(data.ts) - 1 - ds_length := len(data.orig_bytes) + ds_length := len(data.orig_ip_bytes) //find the duration of this connection //perfect beacons should fill the observation period @@ -242,7 +242,7 @@ func (t *Beacon) analyze() { //Bowley's measure of skew is used to check symmetry sort.Sort(util.SortableInt64(diff)) bSkew := float64(0) - ds_bSkew := float64(0) + ds_skew := float64(0) //length -1 is used since diff is a zero based slice low := diff[util.Round(.25*float64(length-1))] @@ -251,9 +251,9 @@ func (t *Beacon) analyze() { bNum := low + high - 2*mid bDen := high - low - ds_low := data.orig_bytes[util.Round(.25*float64(ds_length-1))] - ds_mid := data.orig_bytes[util.Round(.5*float64(ds_length-1))] - ds_high := data.orig_bytes[util.Round(.75*float64(ds_length-1))] + ds_low := data.orig_ip_bytes[util.Round(.25*float64(ds_length-1))] + ds_mid := data.orig_ip_bytes[util.Round(.5*float64(ds_length-1))] + ds_high := data.orig_ip_bytes[util.Round(.75*float64(ds_length-1))] ds_bNum := ds_low + ds_high - 2*ds_mid ds_bDen := ds_high - ds_low @@ -263,8 +263,8 @@ func (t *Beacon) analyze() { bSkew = float64(bNum) / float64(bDen) } - if ds_bDen != 0 { - ds_bSkew = float64(ds_bNum) / float64(ds_bDen) + if ds_bDen != 0 && ds_mid != ds_low && ds_mid != ds_high { + ds_skew = float64(ds_bNum) / float64(ds_bDen) } //perfect beacons should have very low dispersion around the @@ -278,7 +278,7 @@ func (t *Beacon) analyze() { ds_devs := make([]int64, ds_length) for i := 0; i < ds_length; i++ { - ds_devs[i] = util.Abs(data.orig_bytes[i] - ds_mid) + ds_devs[i] = util.Abs(data.orig_ip_bytes[i] - ds_mid) } sort.Sort(util.SortableInt64(devs)) @@ -289,11 +289,13 @@ func (t *Beacon) analyze() { //Store the range for human analysis iRange := diff[length-1] - diff[0] + ds_range := data.orig_ip_bytes[ds_length-1] - data.orig_ip_bytes[0] //get a list of the intervals found in the data, //the number of times the interval was found, //and the most occurring interval intervals, intervalCounts, mode, modeCount := createCountMap(diff) + ds_sizes, ds_counts, ds_mode, ds_modeCount := createCountMap(data.orig_ip_bytes) output := dataBeacon.BeaconAnalysisOutput{ UconnID: data.uconnID, @@ -305,32 +307,40 @@ func (t *Beacon) analyze() { TS_iModeCount: modeCount, TS_intervals: intervals, TS_intervalCounts: intervalCounts, + DS_skew: ds_skew, + DS_dispersion: ds_madm, + DS_range: ds_range, + DS_sizes: ds_sizes, + DS_counts: ds_counts, + DS_mode: ds_mode, + DS_modeCount: ds_modeCount, } //more skewed distributions recieve a lower score //less skewed distributions recieve a higher score alpha := 1.0 - math.Abs(bSkew) - ds_alpha := 1.0 - math.Abs(ds_bSkew) + delta := 1.0 - math.Abs(ds_skew) //lower dispersion is better, cutoff dispersion scores at 30 seconds beta := 1.0 - float64(madm)/30.0 if beta < 0 { beta = 0 } - ds_beta := 1.0 - float64(ds_madm) - if ds_beta < 0 { - ds_beta = 0 + //no cutoff dispersion for data size + epsilon := 1.0 - float64(ds_madm) + if epsilon < 0 { + epsilon = 0 } gamma := duration + //smaller data sizes receive a higher score + zeta := 1.0 - (float64(ds_mode) / 65535.0) - //in order of ascending importance: skew, duration, dispersion - output.TS_score = (alpha + beta + gamma) / 3.0 - //in order of ascending importance: skew, dispersion - output.DS_score = (ds_alpha + ds_beta) / 2.0 + //in order of ascending importance: timestamp skew, timestamp duration, + //timestamp dispersion, size skew, size duration, size weight + output.TS_score = (alpha + beta + gamma + delta + epsilon + zeta) / 6.0 t.writeChannel <- &output } - t.analysisWg.Done() } @@ -405,7 +415,6 @@ func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { { {"$project", bson.D{ {"ts_score", 1}, - {"ds_score", 1}, {"src", "$uconn.src"}, {"dst", "$uconn.dst"}, {"local_src", "$uconn.local_src"}, @@ -417,6 +426,12 @@ func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { {"ts_iMode_count", 1}, {"ts_iSkew", 1}, {"ts_duration", 1}, + {"ts_iDispersion", 1}, + {"ds_dispersion", 1}, + {"ds_range", 1}, + {"ds_mode", 1}, + {"ds_mode_count", 1}, + {"ds_skew", 1}, }}, }, } diff --git a/commands/show-beacons.go b/commands/show-beacons.go index 582ceca5..3281972e 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -59,9 +59,11 @@ func showBeacons(c *cli.Context) error { func showBeaconReport(data []beaconData.BeaconAnalysisView) error { table := tablewriter.NewWriter(os.Stdout) - table.SetHeader([]string{"Score", "Size Score", "Source IP", "Destination IP", - "Connections", "Avg. Bytes", "Intvl Range", "Top Intvl", - "Top Intvl Count", "Intvl Skew", "Intvl Dispersion", "Intvl Duration"}) + table.SetHeader([]string{"Score", "Source IP", "Destination IP", + "Connections", "Avg. Bytes", "Intvl Range", "Size Range", "Top Intvl", + "Top Size", "Top Intvl Count", "Top Size Count", "Intvl Skew", + "Size Skew", "Intvl Dispersion", "Size Dispersion", "Intvl Duration"}) + f := func(f float64) string { return strconv.FormatFloat(f, 'g', 6, 64) } @@ -71,18 +73,20 @@ func showBeaconReport(data []beaconData.BeaconAnalysisView) error { for _, d := range data { table.Append( []string{ - f(d.TS_score), f(d.DS_score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), - i(d.TS_iRange), i(d.TS_iMode), i(d.TS_iModeCount), f(d.TS_iSkew), - i(d.TS_iDispersion), f(d.TS_duration)}) + f(d.TS_score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), + i(d.TS_iRange), i(d.DS_range), i(d.TS_iMode), i(d.DS_mode), + i(d.TS_iModeCount), i(d.DS_modeCount), f(d.TS_iSkew), f(d.DS_skew), + i(d.TS_iDispersion), i(d.DS_dispersion), f(d.TS_duration)}) } table.Render() return nil } func showBeaconCsv(data []beaconData.BeaconAnalysisView) error { - tmpl := "{{.TS_score}},{{.DS_score}},{{.Src}},{{.Dst}},{{.Connections}}," - tmpl += "{{.AvgBytes}}{{.TS_iRange}},{{.TS_iMode}},{{.TS_iModeCount}}," - tmpl += "{{.TS_iSkew}},{{.TS_iDispersion}},{{.TS_duration}}\n" + tmpl := "{{.TS_score}},{{.Src}},{{.Dst}},{{.Connections}},{{.AvgBytes}}," + tmpl += "{{.TS_iRange}},{{.DS_range}},{{.TS_iMode}},{{.DS_mode}},{{.TS_iModeCount}}," + tmpl += "{{.DS_modeCount}},{{.TS_iSkew}},{{.DS_skew}},{{.TS_iDispersion}}," + tmpl += "{{.DS_dispersion}},{{.TS_duration}}\n" out, err := template.New("beacon").Parse(tmpl) if err != nil { diff --git a/datatypes/beacon/beacon.go b/datatypes/beacon/beacon.go index f66c5ba0..83deab38 100644 --- a/datatypes/beacon/beacon.go +++ b/datatypes/beacon/beacon.go @@ -16,9 +16,15 @@ type ( TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` TS_score float64 `bson:"ts_score"` - DS_score float64 `bson:"ds_score"` TS_intervals []int64 `bson:"ts_intervals"` TS_intervalCounts []int64 `bson:"ts_interval_counts"` + DS_skew float64 `bson:"ds_skew"` + DS_dispersion int64 `bson:"ds_dispersion"` + DS_range int64 `bson:"ds_range"` + DS_sizes []int64 `bson:"ds_sizes"` + DS_counts []int64 `bson:"ds_counts"` + DS_mode int64 `bson:"ds_mode"` + DS_modeCount int64 `bson:"ds_mode_count"` } //Used in order to join the uconn and beacon tables @@ -37,6 +43,10 @@ type ( TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` TS_score float64 `bson:"ts_score"` - DS_score float64 `bson:"ds_score"` + DS_skew float64 `bson:"ds_skew"` + DS_dispersion int64 `bson:"ds_dispersion"` + DS_range int64 `bson:"ds_range"` + DS_mode int64 `bson:"ds_mode"` + DS_modeCount int64 `bson:"ds_mode_count"` } ) diff --git a/datatypes/data/data.go b/datatypes/data/data.go index 7cb6b74f..21a88d2f 100644 --- a/datatypes/data/data.go +++ b/datatypes/data/data.go @@ -7,18 +7,18 @@ type ( // parser.Conn data structure. If fields are needed that are // not in this Conn structure use parser.Conn instead. Conn struct { - ID bson.ObjectId `bson:"_id,omitempty"` - Ts int64 `bson:"ts,omitempty"` - UID string `bson:"uid"` - Src string `bson:"id_origin_h,omitempty"` - Spt int `bson:"id_origin_p,omitempty"` - Dst string `bson:"id_resp_h,omitempty"` - Dpt int `bson:"id_resp_p,omitempty"` - Dur float64 `bson:"duration,omitempty"` - Proto string `bson:"proto,omitempty"` - LocalSrc bool `bson:"local_orig,omitempty"` - LocalDst bool `bson:"local_resp,omitempty"` - OriginBytes int64 `bson:"orig_bytes,omitempty"` + ID bson.ObjectId `bson:"_id,omitempty"` + Ts int64 `bson:"ts,omitempty"` + UID string `bson:"uid"` + Src string `bson:"id_origin_h,omitempty"` + Spt int `bson:"id_origin_p,omitempty"` + Dst string `bson:"id_resp_h,omitempty"` + Dpt int `bson:"id_resp_p,omitempty"` + Dur float64 `bson:"duration,omitempty"` + Proto string `bson:"proto,omitempty"` + LocalSrc bool `bson:"local_orig,omitempty"` + LocalDst bool `bson:"local_resp,omitempty"` + OriginIPBytes int64 `bson:"orig_ip_bytes,omitempty"` } // DNS provides structure for a subset of the fields in the diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index 631e6d8f..e189f4ed 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -19,7 +19,6 @@ type ( LocalSrc bool `bson:"local_src"` LocalDst bool `bson:"local_dst"` TotalBytes int `bson:"total_bytes"` - OriginBytes int64 `bson:"origin_bytes"` AverageBytes float32 `bson:"average_bytes"` TotalDuration float32 `bson:"total_duration"` } From 3caa12d1eafae6dd77be9d523d49b970ab85d393 Mon Sep 17 00:00:00 2001 From: Melissa Bruno Date: Wed, 28 Jun 2017 14:21:14 -0400 Subject: [PATCH 009/117] Add data size metrics to HTML report --- analysis/beacon/beacon.go | 10 +++++----- commands/show-beacons.go | 4 ++-- datatypes/beacon/beacon.go | 4 ++-- reporting/report-beacons.go | 7 ++++--- reporting/templates/templates.go | 7 ++++--- 5 files changed, 17 insertions(+), 15 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index dfdec183..26a3aa5a 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -53,7 +53,7 @@ type ( func BuildBeaconCollection(res *database.Resources) { collection_name := res.System.BeaconConfig.BeaconTable - collection_keys := []string{"uconn_id", "ts_score"} + collection_keys := []string{"uconn_id", "score"} err := res.DB.CreateCollection(collection_name, collection_keys) if err != nil { res.Log.Error("Failed: ", collection_name, err.Error()) @@ -338,7 +338,7 @@ func (t *Beacon) analyze() { //in order of ascending importance: timestamp skew, timestamp duration, //timestamp dispersion, size skew, size duration, size weight - output.TS_score = (alpha + beta + gamma + delta + epsilon + zeta) / 6.0 + output.Score = (alpha + beta + gamma + delta + epsilon + zeta) / 6.0 t.writeChannel <- &output } t.analysisWg.Done() @@ -391,7 +391,7 @@ func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { return []bson.D{ { {"$match", bson.D{ - {"ts_score", bson.D{ + {"score", bson.D{ {"$gt", cuttoff}, }}, }}, @@ -409,12 +409,12 @@ func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { }, { {"$sort", bson.D{ - {"ts_score", -1}, + {"score", -1}, }}, }, { {"$project", bson.D{ - {"ts_score", 1}, + {"score", 1}, {"src", "$uconn.src"}, {"dst", "$uconn.dst"}, {"local_src", "$uconn.local_src"}, diff --git a/commands/show-beacons.go b/commands/show-beacons.go index 3281972e..0fd1238e 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -73,7 +73,7 @@ func showBeaconReport(data []beaconData.BeaconAnalysisView) error { for _, d := range data { table.Append( []string{ - f(d.TS_score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), + f(d.Score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), i(d.TS_iRange), i(d.DS_range), i(d.TS_iMode), i(d.DS_mode), i(d.TS_iModeCount), i(d.DS_modeCount), f(d.TS_iSkew), f(d.DS_skew), i(d.TS_iDispersion), i(d.DS_dispersion), f(d.TS_duration)}) @@ -83,7 +83,7 @@ func showBeaconReport(data []beaconData.BeaconAnalysisView) error { } func showBeaconCsv(data []beaconData.BeaconAnalysisView) error { - tmpl := "{{.TS_score}},{{.Src}},{{.Dst}},{{.Connections}},{{.AvgBytes}}," + tmpl := "{{.Score}},{{.Src}},{{.Dst}},{{.Connections}},{{.AvgBytes}}," tmpl += "{{.TS_iRange}},{{.DS_range}},{{.TS_iMode}},{{.DS_mode}},{{.TS_iModeCount}}," tmpl += "{{.DS_modeCount}},{{.TS_iSkew}},{{.DS_skew}},{{.TS_iDispersion}}," tmpl += "{{.DS_dispersion}},{{.TS_duration}}\n" diff --git a/datatypes/beacon/beacon.go b/datatypes/beacon/beacon.go index 83deab38..4897a60f 100644 --- a/datatypes/beacon/beacon.go +++ b/datatypes/beacon/beacon.go @@ -15,7 +15,7 @@ type ( TS_iSkew float64 `bson:"ts_iSkew"` TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` - TS_score float64 `bson:"ts_score"` + Score float64 `bson:"score"` TS_intervals []int64 `bson:"ts_intervals"` TS_intervalCounts []int64 `bson:"ts_interval_counts"` DS_skew float64 `bson:"ds_skew"` @@ -42,7 +42,7 @@ type ( TS_iSkew float64 `bson:"ts_iSkew"` TS_iDispersion int64 `bson:"ts_iDispersion"` TS_duration float64 `bson:"ts_duration"` - TS_score float64 `bson:"ts_score"` + Score float64 `bson:"score"` DS_skew float64 `bson:"ds_skew"` DS_dispersion int64 `bson:"ds_dispersion"` DS_range int64 `bson:"ds_range"` diff --git a/reporting/report-beacons.go b/reporting/report-beacons.go index 57b9c907..f4b13805 100644 --- a/reporting/report-beacons.go +++ b/reporting/report-beacons.go @@ -38,9 +38,10 @@ func printBeacons(db string, res *database.Resources) error { } func getBeaconWriter(beacons []beaconData.BeaconAnalysisView) (string, error) { - tmpl := "{{printf \"%.3f\" .TS_score}}{{.Src}}{{.Dst}}{{.Connections}}{{printf \"%.3f\" .AvgBytes}}" - tmpl += "{{.TS_iRange}}{{.TS_iMode}}{{.TS_iModeCount}}" - tmpl += "{{printf \"%.3f\" .TS_iSkew}}{{.TS_iDispersion}}{{printf \"%.3f\" .TS_duration}}\n" + tmpl := "{{printf \"%.3f\" .Score}}{{.Src}}{{.Dst}}{{.Connections}}{{printf \"%.3f\" .AvgBytes}}" + tmpl += "{{.TS_iRange}}{{.DS_range}}{{.TS_iMode}}{{.DS_mode}}{{.TS_iModeCount}}{{.DS_mode}}" + tmpl += "{{printf \"%.3f\" .TS_iSkew}}{{printf \"%.3f\" .DS_skew}}{{.TS_iDispersion}}{{.DS_dispersion}}" + tmpl += "{{printf \"%.3f\" .TS_duration}}\n" out, err := template.New("beacon").Parse(tmpl) if err != nil { diff --git a/reporting/templates/templates.go b/reporting/templates/templates.go index a82161a0..d2f1a01c 100644 --- a/reporting/templates/templates.go +++ b/reporting/templates/templates.go @@ -100,9 +100,10 @@ var DBhometempl = dbHeader + ` var BeaconsTempl = dbHeader + `
- + + {{.Writer}}
TS scoreSourceDestinationConnectionsAvg. Bytes - Intvl. RangeIntvl. ModeIntvl. Mode Count - Intvl. SkewIntvl. DispersionTS Duration
ScoreSourceDestinationConnectionsAvg. Bytes + Intvl. RangeSize RangeIntvl. ModeIntvl. Mode CountSize Mode CountIntvl. SkewSize SkewIntvl. DispersionSize Dispersion + TS Duration
From 9ed4b7c5d2dc4722457b39ebb738cfecf4d54670 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 29 Jun 2017 15:32:33 -0600 Subject: [PATCH 010/117] Fix documentation on GetBeaconResultsView --- analysis/beacon/beacon.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 3a366dfc..01b41a52 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -345,8 +345,11 @@ func createCountMap(data []int64) ([]int64, []int64, int64, int64) { return distinct, counts, mode, max } -// GetViewPipeline creates an aggregation for user views since the beacon table -// stores uconn uid's rather than src, dest pairs +// GetViewPipeline creates an aggregation for user views since the beacon collection +// stores uconn uid's rather than src, dest pairs. cuttoff is the lowest overall +// score to report on. Setting cuttoff to 0 retrieves all the records from the +// beaconing collection. Setting cuttoff to 1 will prevent the aggregation from +// returning any records. func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { return []bson.D{ { From 633215cf5041dbd0a7fd4ba31e352c782dc04d1e Mon Sep 17 00:00:00 2001 From: Melissa Bruno Date: Fri, 30 Jun 2017 14:08:55 -0400 Subject: [PATCH 011/117] Removed stray bracket and fixed DS_modeCount --- reporting/report-beacons.go | 2 +- reporting/templates/templates.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/reporting/report-beacons.go b/reporting/report-beacons.go index f4b13805..4785723a 100644 --- a/reporting/report-beacons.go +++ b/reporting/report-beacons.go @@ -39,7 +39,7 @@ func printBeacons(db string, res *database.Resources) error { func getBeaconWriter(beacons []beaconData.BeaconAnalysisView) (string, error) { tmpl := "{{printf \"%.3f\" .Score}}{{.Src}}{{.Dst}}{{.Connections}}{{printf \"%.3f\" .AvgBytes}}" - tmpl += "{{.TS_iRange}}{{.DS_range}}{{.TS_iMode}}{{.DS_mode}}{{.TS_iModeCount}}{{.DS_mode}}" + tmpl += "{{.TS_iRange}}{{.DS_range}}{{.TS_iMode}}{{.DS_mode}}{{.TS_iModeCount}}{{.DS_modeCount}}" tmpl += "{{printf \"%.3f\" .TS_iSkew}}{{printf \"%.3f\" .DS_skew}}{{.TS_iDispersion}}{{.DS_dispersion}}" tmpl += "{{printf \"%.3f\" .TS_duration}}\n" diff --git a/reporting/templates/templates.go b/reporting/templates/templates.go index d2f1a01c..cbcdfc3a 100644 --- a/reporting/templates/templates.go +++ b/reporting/templates/templates.go @@ -101,7 +101,7 @@ var BeaconsTempl = dbHeader + `
+ Intvl. Range {{.Writer}} From c8abeca408992a7ee4446b895ef754e875605b61 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 29 Jun 2017 18:05:49 -0600 Subject: [PATCH 012/117] Fix file logging --- database/resources.go | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/database/resources.go b/database/resources.go index ee8e8644..9159cbbe 100644 --- a/database/resources.go +++ b/database/resources.go @@ -4,6 +4,7 @@ import ( "fmt" "io/ioutil" "os" + "path" "sync" "time" @@ -116,6 +117,8 @@ func initLog(level int) (*log.Logger, error) { } func addFileLogger(logger *log.Logger, logPath string) { + time := time.Now().Format(util.TimeFormat) + logPath = path.Join(logPath, time) _, err := os.Stat(logPath) if err != nil && os.IsNotExist(err) { err = os.MkdirAll(logPath, 0755) @@ -124,11 +127,14 @@ func addFileLogger(logger *log.Logger, logPath string) { return } } + logger.Hooks.Add(lfshook.NewHook(lfshook.PathMap{ - log.DebugLevel: logPath + "/debug-" + time.Now().Format(util.TimeFormat) + ".log", - log.InfoLevel: logPath + "/info-" + time.Now().Format(util.TimeFormat) + ".log", - log.WarnLevel: logPath + "/warn-" + time.Now().Format(util.TimeFormat) + ".log", - log.ErrorLevel: logPath + "/error-" + time.Now().Format(util.TimeFormat) + ".log", + log.DebugLevel: path.Join(logPath, "debug.log"), + log.InfoLevel: path.Join(logPath, "info.log"), + log.WarnLevel: path.Join(logPath, "warn.log"), + log.ErrorLevel: path.Join(logPath, "error.log"), + log.FatalLevel: path.Join(logPath, "fatal.log"), + log.PanicLevel: path.Join(logPath, "panic.log"), })) } From 01867022ca5d2af120e4028835d3c66fdbd37060 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Tue, 27 Jun 2017 10:51:27 -0600 Subject: [PATCH 013/117] Remove _id indexes --- analysis/beacon/beacon.go | 19 ++++++++++------- analysis/beacon/beacon_test.go | 11 +++++----- analysis/blacklisted/blacklisted.go | 8 +++++-- analysis/crossref/crossref.go | 6 ++++-- analysis/dns/explodedDNS.go | 6 +++++- analysis/dns/hostnames.go | 3 ++- analysis/scanning/scan.go | 33 +++++++++++++++++------------ analysis/structure/hosts.go | 32 +++++++++++++++------------- analysis/structure/uconn.go | 31 ++++++++++++++++----------- analysis/urls/url.go | 14 +++++++++--- analysis/useragent/useragent.go | 12 ++++++++--- database/control.go | 15 ++++++------- parser/mongodatastore.go | 2 ++ 13 files changed, 118 insertions(+), 74 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 87d437d2..88bb6147 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -53,8 +53,11 @@ type ( func BuildBeaconCollection(res *database.Resources) { collection_name := res.System.BeaconConfig.BeaconTable - collection_keys := []string{"uconn_id", "score"} - err := res.DB.CreateCollection(collection_name, collection_keys) + collection_keys := []mgo.Index{ + {Key: []string{"uconn_id"}, Unique: true}, + {Key: []string{"ts_score"}}, + } + err := res.DB.CreateCollection(collection_name, false, collection_keys) if err != nil { res.Log.Error("Failed: ", collection_name, err.Error()) return @@ -262,7 +265,7 @@ func (t *Beacon) analyze() { if bDen != 0 && mid != low && mid != high { bSkew = float64(bNum) / float64(bDen) } - + if ds_bDen != 0 && ds_mid != ds_low && ds_mid != ds_high { ds_skew = float64(ds_bNum) / float64(ds_bDen) } @@ -275,7 +278,7 @@ func (t *Beacon) analyze() { for i := 0; i < length; i++ { devs[i] = util.Abs(diff[i] - mid) } - + ds_devs := make([]int64, ds_length) for i := 0; i < ds_length; i++ { ds_devs[i] = util.Abs(data.orig_ip_bytes[i] - ds_mid) @@ -283,7 +286,7 @@ func (t *Beacon) analyze() { sort.Sort(util.SortableInt64(devs)) sort.Sort(util.SortableInt64(ds_devs)) - + madm := devs[util.Round(.5*float64(length-1))] ds_madm := ds_devs[util.Round(.5*float64(ds_length-1))] @@ -330,8 +333,8 @@ func (t *Beacon) analyze() { epsilon := 1.0 - float64(ds_madm) if epsilon < 0 { epsilon = 0 - } - + } + gamma := duration //smaller data sizes receive a higher score zeta := 1.0 - (float64(ds_mode) / 65535.0) @@ -387,7 +390,7 @@ func createCountMap(data []int64) ([]int64, []int64, int64, int64) { // GetViewPipeline creates an aggregation for user views since the beacon collection // stores uconn uid's rather than src, dest pairs. cuttoff is the lowest overall -// score to report on. Setting cuttoff to 0 retrieves all the records from the +// score to report on. Setting cuttoff to 0 retrieves all the records from the // beaconing collection. Setting cuttoff to 1 will prevent the aggregation from // returning any records. func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { diff --git a/analysis/beacon/beacon_test.go b/analysis/beacon/beacon_test.go index 7dab6d16..47d66baa 100644 --- a/analysis/beacon/beacon_test.go +++ b/analysis/beacon/beacon_test.go @@ -5,9 +5,9 @@ import ( "reflect" "testing" - log "github.com/sirupsen/logrus" "github.com/ocmdev/rita/database" datatype_beacon "github.com/ocmdev/rita/datatypes/beacon" + log "github.com/sirupsen/logrus" ) func printAnalysis(res *datatype_beacon.BeaconAnalysisOutput) string { @@ -35,9 +35,10 @@ func TestAnalysis(t *testing.T) { beaconing.minTime = val.ts[0] beaconing.maxTime = val.ts[len(val.ts)-1] data := &beaconAnalysisInput{ - src: "0.0.0.0", - dst: "0.0.0.0", - ts: val.ts, + src: "0.0.0.0", + dst: "0.0.0.0", + ts: val.ts, + orig_ip_bytes: []int64{5, 5, 5}, } beaconing.analysisWg.Add(1) @@ -48,7 +49,7 @@ func TestAnalysis(t *testing.T) { beaconing.analysisWg.Wait() status := "PASS" - if res.TS_score < val.minScore || res.TS_score > val.maxScore { + if res.Score < val.minScore || res.Score > val.maxScore { fail = true status = "FAIL" } diff --git a/analysis/blacklisted/blacklisted.go b/analysis/blacklisted/blacklisted.go index bf30b36b..d1745335 100644 --- a/analysis/blacklisted/blacklisted.go +++ b/analysis/blacklisted/blacklisted.go @@ -62,8 +62,12 @@ func SetBlacklistSources(res *database.Resources, result *blacklisted.Blacklist) //BuildBlacklistedCollection runs the hosts in the dataset against rita-blacklist func BuildBlacklistedCollection(res *database.Resources) { collectionName := res.System.BlacklistedConfig.BlacklistTable - collectionKeys := []string{"bl_hash", "host"} - err := res.DB.CreateCollection(collectionName, collectionKeys) + //this wil go away in the new blacklist update + collectionKeys := []mgo.Index{ + {Key: []string{"bl_hash"}}, + {Key: []string{"host"}}, + } + err := res.DB.CreateCollection(collectionName, false, collectionKeys) if err != nil { res.Log.Error("Failed: ", collectionName, err.Error()) return diff --git a/analysis/crossref/crossref.go b/analysis/crossref/crossref.go index c78f2c4a..071aa073 100644 --- a/analysis/crossref/crossref.go +++ b/analysis/crossref/crossref.go @@ -3,6 +3,7 @@ package crossref import ( "sync" + mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" "github.com/ocmdev/rita/database" @@ -20,8 +21,9 @@ func getXRefSelectors() []dataXRef.XRefSelector { // BuildXRefCollection runs threaded crossref analysis func BuildXRefCollection(res *database.Resources) { - res.DB.CreateCollection(res.System.CrossrefConfig.InternalTable, []string{"host"}) - res.DB.CreateCollection(res.System.CrossrefConfig.ExternalTable, []string{"host"}) + indexes := []mgo.Index{{Key: []string{"host"}, Unique: true}} + res.DB.CreateCollection(res.System.CrossrefConfig.InternalTable, false, indexes) + res.DB.CreateCollection(res.System.CrossrefConfig.ExternalTable, false, indexes) //maps from analysis types to channels of hosts found internal := make(map[string]<-chan string) diff --git a/analysis/dns/explodedDNS.go b/analysis/dns/explodedDNS.go index b39ee4ab..446b8494 100644 --- a/analysis/dns/explodedDNS.go +++ b/analysis/dns/explodedDNS.go @@ -52,7 +52,11 @@ func buildExplodedDNSUniqSubdomains(res *database.Resources) { func zipExplodedDNSResults(res *database.Resources) { ssn := res.DB.Session.Copy() defer ssn.Close() - res.DB.CreateCollection(res.System.DNSConfig.ExplodedDNSTable, []string{"domain", "subdomains"}) + indexes := []mgo.Index{ + {Key: []string{"domain"}, Unique: true}, + {Key: []string{"subdomains"}}, + } + res.DB.CreateCollection(res.System.DNSConfig.ExplodedDNSTable, false, indexes) res.DB.AggregateCollection(tempVistedCountCollName, ssn, // nolint: vet []bson.D{ diff --git a/analysis/dns/hostnames.go b/analysis/dns/hostnames.go index 94e718a0..1e6d3d80 100644 --- a/analysis/dns/hostnames.go +++ b/analysis/dns/hostnames.go @@ -24,7 +24,8 @@ func BuildHostnamesCollection(res *database.Resources) { res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) - err := res.DB.CreateCollection(hostNamesCollection, []string{"$hashed:host"}) + indexes := []mgo.Index{{Key: []string{"host"}, Unique: true}} + err := res.DB.CreateCollection(hostNamesCollection, false, indexes) if err != nil { res.Log.Error("Could not create ", hostNamesCollection, err) diff --git a/analysis/scanning/scan.go b/analysis/scanning/scan.go index 659b438b..8843b28c 100644 --- a/analysis/scanning/scan.go +++ b/analysis/scanning/scan.go @@ -4,40 +4,45 @@ import ( "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" + mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) +//BuildScanningCollection detects port scans func BuildScanningCollection(res *database.Resources) { // Create the aggregate command - source_collection_name, - new_collection_name, - new_collection_keys, + sourceCollectionName, + newCollectionName, + newCollectionKeys, pipeline := getScanningCollectionScript(res.System) // Create it - err := res.DB.CreateCollection(new_collection_name, new_collection_keys) + err := res.DB.CreateCollection(newCollectionName, false, newCollectionKeys) if err != nil { - res.Log.Error("Failed: ", new_collection_name, err.Error()) + res.Log.Error("Failed: ", newCollectionName, err.Error()) return } ssn := res.DB.Session.Copy() defer ssn.Close() // Aggregate it! - res.DB.AggregateCollection(source_collection_name, ssn, pipeline) + res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) } -func getScanningCollectionScript(sysCfg *config.SystemConfig) (string, string, []string, []bson.D) { +func getScanningCollectionScript(sysCfg *config.SystemConfig) (string, string, []mgo.Index, []bson.D) { // Name of source collection which will be aggregated into the new collection - source_collection_name := sysCfg.StructureConfig.ConnTable + sourceCollectionName := sysCfg.StructureConfig.ConnTable // Name of the new collection - new_collection_name := sysCfg.ScanningConfig.ScanTable + newCollectionName := sysCfg.ScanningConfig.ScanTable // Get scan threshold - scan_thresh := sysCfg.ScanningConfig.ScanThreshold + scanThresh := sysCfg.ScanningConfig.ScanThreshold // Desired indeces - keys := []string{"-port_count", "$hashed:src", "$hashed:dst"} + keys := []mgo.Index{ + {Key: []string{"-port_count"}}, + {Key: []string{"src", "dst"}, Unique: true}, + } // Aggregation script // nolint: vet @@ -107,7 +112,7 @@ func getScanningCollectionScript(sysCfg *config.SystemConfig) (string, string, [ { {"$match", bson.D{ {"port_count", bson.D{ - {"$gt", scan_thresh}, + {"$gt", scanThresh}, }}, }}, }, @@ -117,9 +122,9 @@ func getScanningCollectionScript(sysCfg *config.SystemConfig) (string, string, [ }}, }, { - {"$out", new_collection_name}, + {"$out", newCollectionName}, }, } - return source_collection_name, new_collection_name, keys, pipeline + return sourceCollectionName, newCollectionName, keys, pipeline } diff --git a/analysis/structure/hosts.go b/analysis/structure/hosts.go index 9c3945a8..9b794de2 100644 --- a/analysis/structure/hosts.go +++ b/analysis/structure/hosts.go @@ -3,42 +3,44 @@ package structure import ( "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" + mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) // BuildHostsCollection builds the 'host' collection for this timeframe. // Runs via mongodb aggregation. Sourced from the 'conn' table. -// TODO: Confirm that this section of code is not faster than an aggregation from -// the 'uconn' table which should have less repeated data. func BuildHostsCollection(res *database.Resources) { // Create the aggregate command - source_collection_name, - new_collection_name, - new_collection_keys, + sourceCollectionName, + newCollectionName, + newCollectionKeys, pipeline := getHosts(res.System) // Aggregate it! - error_check := res.DB.CreateCollection(new_collection_name, new_collection_keys) - if error_check != nil { - res.Log.Error("Failed: ", new_collection_name, error_check) + errorCheck := res.DB.CreateCollection(newCollectionName, false, newCollectionKeys) + if errorCheck != nil { + res.Log.Error("Failed: ", newCollectionName, errorCheck) return } ssn := res.DB.Session.Copy() defer ssn.Close() - res.DB.AggregateCollection(source_collection_name, ssn, pipeline) + res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) } -func getHosts(sysCfg *config.SystemConfig) (string, string, []string, []bson.D) { +func getHosts(sysCfg *config.SystemConfig) (string, string, []mgo.Index, []bson.D) { // Name of source collection which will be aggregated into the new collection - source_collection_name := sysCfg.StructureConfig.ConnTable + sourceCollectionName := sysCfg.StructureConfig.ConnTable // Name of the new collection - new_collection_name := sysCfg.StructureConfig.HostTable + newCollectionName := sysCfg.StructureConfig.HostTable // Desired indeces - keys := []string{"$hashed:ip", "local"} + keys := []mgo.Index{ + {Key: []string{"ip"}, Unique: true}, + {Key: []string{"local"}}, + } // Aggregation script // nolint: vet @@ -76,9 +78,9 @@ func getHosts(sysCfg *config.SystemConfig) (string, string, []string, []bson.D) }}, }, { - {"$out", new_collection_name}, + {"$out", newCollectionName}, }, } - return source_collection_name, new_collection_name, keys, pipeline + return sourceCollectionName, newCollectionName, keys, pipeline } diff --git a/analysis/structure/uconn.go b/analysis/structure/uconn.go index 27566b8c..05f8d04f 100644 --- a/analysis/structure/uconn.go +++ b/analysis/structure/uconn.go @@ -4,6 +4,7 @@ import ( "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" + mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) @@ -27,34 +28,40 @@ func GetConnSourcesFromDest(res *database.Resources, ip string) []string { return sources } +//BuildUniqueConnectionsCollection finds the unique connection pairs +//between sources and destinations func BuildUniqueConnectionsCollection(res *database.Resources) { // Create the aggregate command - source_collection_name, - new_collection_name, - new_collection_keys, + sourceCollectionName, + newCollectionName, + newCollectionKeys, pipeline := getUniqueConnectionsScript(res.System) - err := res.DB.CreateCollection(new_collection_name, new_collection_keys) + err := res.DB.CreateCollection(newCollectionName, true, newCollectionKeys) if err != nil { - res.Log.Error("Failed: ", new_collection_name, err.Error()) + res.Log.Error("Failed: ", newCollectionName, err.Error()) return } ssn := res.DB.Session.Copy() defer ssn.Close() - res.DB.AggregateCollection(source_collection_name, ssn, pipeline) + res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) } -func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, []string, []bson.D) { +func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, []mgo.Index, []bson.D) { // Name of source collection which will be aggregated into the new collection - source_collection_name := sysCfg.StructureConfig.ConnTable + sourceCollectionName := sysCfg.StructureConfig.ConnTable // Name of the new collection - new_collection_name := sysCfg.StructureConfig.UniqueConnTable + newCollectionName := sysCfg.StructureConfig.UniqueConnTable // Desired Indeces - keys := []string{"$hashed:src", "$hashed:dst"} + keys := []mgo.Index{ + {Key: []string{"src", "dst"}, Unique: true}, + {Key: []string{"$hashed:src"}}, + {Key: []string{"$hashed:dst"}}, + } // Aggregation script // nolint: vet @@ -116,9 +123,9 @@ func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, [] }}, }, { - {"$out", new_collection_name}, + {"$out", newCollectionName}, }, } - return source_collection_name, new_collection_name, keys, pipeline + return sourceCollectionName, newCollectionName, keys, pipeline } diff --git a/analysis/urls/url.go b/analysis/urls/url.go index e1de655b..8883d866 100644 --- a/analysis/urls/url.go +++ b/analysis/urls/url.go @@ -18,7 +18,7 @@ func BuildUrlsCollection(res *database.Resources) { pipeline := getURLCollectionScript(res.System) // Create it - err := res.DB.CreateCollection(newCollectionName, newCollectionKeys) + err := res.DB.CreateCollection(newCollectionName, false, []mgo.Index{}) if err != nil { res.Log.Error("Failed: ", newCollectionName, err.Error()) return @@ -33,9 +33,13 @@ func BuildUrlsCollection(res *database.Resources) { defer ssn.Close() // Aggregate it res.DB.AggregateCollection(newCollectionName, ssn, pipeline) + for _, index := range newCollectionKeys { + ssn.DB(res.DB.GetSelectedDB()).C(res.System.UrlsConfig.UrlsTable). + EnsureIndex(index) + } } -func getURLCollectionScript(sysCfg *config.SystemConfig) (string, string, []string, mgo.MapReduce, []bson.D) { +func getURLCollectionScript(sysCfg *config.SystemConfig) (string, string, []mgo.Index, mgo.MapReduce, []bson.D) { // Name of source collection which will be aggregated into the new collection sourceCollectionName := sysCfg.StructureConfig.HTTPTable @@ -43,7 +47,11 @@ func getURLCollectionScript(sysCfg *config.SystemConfig) (string, string, []stri newCollectionName := sysCfg.UrlsConfig.UrlsTable // Desired indeces - keys := []string{"$hashed:url", "-length"} + keys := []mgo.Index{ + {Key: []string{"url", "uri"}, Unique: true}, + {Key: []string{"length"}}, + } + // mgo passed MapReduce javascript function code job := mgo.MapReduce{ Map: `function(){ diff --git a/analysis/useragent/useragent.go b/analysis/useragent/useragent.go index 85778dd7..7ae2dcaa 100644 --- a/analysis/useragent/useragent.go +++ b/analysis/useragent/useragent.go @@ -4,6 +4,7 @@ import ( "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" + mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) @@ -16,7 +17,7 @@ func BuildUserAgentCollection(res *database.Resources) { pipeline := getUserAgentCollectionScript(res.System) // Create it - err := res.DB.CreateCollection(newCollectionName, newCollectionKeys) + err := res.DB.CreateCollection(newCollectionName, false, newCollectionKeys) if err != nil { res.Log.Error("Failed: ", newCollectionName, err.Error()) return @@ -29,7 +30,7 @@ func BuildUserAgentCollection(res *database.Resources) { res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) } -func getUserAgentCollectionScript(sysCfg *config.SystemConfig) (string, string, []string, []bson.D) { +func getUserAgentCollectionScript(sysCfg *config.SystemConfig) (string, string, []mgo.Index, []bson.D) { // Name of source collection which will be aggregated into the new collection sourceCollectionName := sysCfg.StructureConfig.HTTPTable @@ -37,7 +38,12 @@ func getUserAgentCollectionScript(sysCfg *config.SystemConfig) (string, string, newCollectionName := sysCfg.UserAgentConfig.UserAgentTable // Desired indeces - keys := []string{"-times_used"} + keys := []mgo.Index{ + {Key: []string{"user_agent"}, Unique: true}, + {Key: []string{"times_used"}}, + } + + //[]string{"-times_used"} // First aggregation script // nolint: vet diff --git a/database/control.go b/database/control.go index 85428074..282ef523 100644 --- a/database/control.go +++ b/database/control.go @@ -51,7 +51,7 @@ func (d *DB) CollectionExists(table string) bool { //CreateCollection creates a new collection in the currently selected //database with the required indeces -func (d *DB) CreateCollection(name string, indeces []string) error { +func (d *DB) CreateCollection(name string, id bool, indeces []mgo.Index) error { // Make a copy of the current session session := d.Session.Copy() defer session.Close() @@ -69,7 +69,9 @@ func (d *DB) CreateCollection(name string, indeces []string) error { // Create new collection by referencing to it, no need to call Create err := session.DB(d.selected).C(name).Create( - &mgo.CollectionInfo{}, + &mgo.CollectionInfo{ + DisableIdIndex: !id, + }, ) // Make sure it actually got created @@ -78,10 +80,7 @@ func (d *DB) CreateCollection(name string, indeces []string) error { } collection := session.DB(d.selected).C(name) - for _, val := range indeces { - index := mgo.Index{ - Key: []string{val}, - } + for _, index := range indeces { err := collection.EnsureIndex(index) if err != nil { return err @@ -113,7 +112,7 @@ func (d *DB) AggregateCollection(sourceCollection string, if iter.Err() != nil { d.resources.Log.WithFields(log.Fields{ "error": iter.Err().Error(), - }).Panic("Failed aggregate operation") + }).Error("Failed aggregate operation") return nil } return iter @@ -140,7 +139,7 @@ func (d *DB) MapReduceCollection(sourceCollection string, job mgo.MapReduce) boo if err != nil { d.resources.Log.WithFields(log.Fields{ "error": err.Error(), - }).Panic("Failed map reduce") + }).Error("Failed map reduce") return false } diff --git a/parser/mongodatastore.go b/parser/mongodatastore.go index ab6bd2d3..ba236fbb 100644 --- a/parser/mongodatastore.go +++ b/parser/mongodatastore.go @@ -80,6 +80,8 @@ func (mongo *MongoDatastore) store(data importedData) { collection: data.file.TargetCollection, indices: data.broData.Indices(), } + mongo.session.DB(coll.database).C(coll.collection). + Create(&mgo.CollectionInfo{DisableIdIndex: true}) collectionMap[data.file.TargetCollection] = coll //start the goroutine for this writer mongo.waitgroup.Add(1) From b8622a0589ef12c1a38e9f5c24841d9c8fcab48e Mon Sep 17 00:00:00 2001 From: logan lembke Date: Wed, 28 Jun 2017 19:31:47 -0600 Subject: [PATCH 014/117] Remove _id from datatypes --- datatypes/beacon/beacon.go | 2 -- datatypes/crossref/crossref.go | 2 -- datatypes/data/data.go | 46 +++++++++++++++----------------- datatypes/dns/dns.go | 3 --- datatypes/structure/structure.go | 1 - 5 files changed, 21 insertions(+), 33 deletions(-) diff --git a/datatypes/beacon/beacon.go b/datatypes/beacon/beacon.go index 4897a60f..7c6fd554 100644 --- a/datatypes/beacon/beacon.go +++ b/datatypes/beacon/beacon.go @@ -7,7 +7,6 @@ import ( type ( //straight output from the beacon analysis BeaconAnalysisOutput struct { - ID bson.ObjectId `bson:"_id,omitempty"` UconnID bson.ObjectId `bson:"uconn_id"` TS_iRange int64 `bson:"ts_iRange"` TS_iMode int64 `bson:"ts_iMode"` @@ -29,7 +28,6 @@ type ( //Used in order to join the uconn and beacon tables BeaconAnalysisView struct { - ID bson.ObjectId `bson:"_id,omitempty"` Src string `bson:"src"` Dst string `bson:"dst"` LocalSrc bool `bson:"local_src"` diff --git a/datatypes/crossref/crossref.go b/datatypes/crossref/crossref.go index b844ba6a..30670a91 100644 --- a/datatypes/crossref/crossref.go +++ b/datatypes/crossref/crossref.go @@ -2,12 +2,10 @@ package crossref import ( "github.com/ocmdev/rita/database" - "gopkg.in/mgo.v2/bson" ) type ( XRef struct { - ID bson.ObjectId `bson:"_id,omitempty"` ModuleName string `bson:"module"` Host string `bson:"host"` } diff --git a/datatypes/data/data.go b/datatypes/data/data.go index 21a88d2f..9f981574 100644 --- a/datatypes/data/data.go +++ b/datatypes/data/data.go @@ -1,40 +1,36 @@ package data -import "gopkg.in/mgo.v2/bson" - type ( // Conn provides structure for a subset of the fields in the // parser.Conn data structure. If fields are needed that are // not in this Conn structure use parser.Conn instead. Conn struct { - ID bson.ObjectId `bson:"_id,omitempty"` - Ts int64 `bson:"ts,omitempty"` - UID string `bson:"uid"` - Src string `bson:"id_origin_h,omitempty"` - Spt int `bson:"id_origin_p,omitempty"` - Dst string `bson:"id_resp_h,omitempty"` - Dpt int `bson:"id_resp_p,omitempty"` - Dur float64 `bson:"duration,omitempty"` - Proto string `bson:"proto,omitempty"` - LocalSrc bool `bson:"local_orig,omitempty"` - LocalDst bool `bson:"local_resp,omitempty"` - OriginIPBytes int64 `bson:"orig_ip_bytes,omitempty"` + Ts int64 `bson:"ts,omitempty"` + UID string `bson:"uid"` + Src string `bson:"id_origin_h,omitempty"` + Spt int `bson:"id_origin_p,omitempty"` + Dst string `bson:"id_resp_h,omitempty"` + Dpt int `bson:"id_resp_p,omitempty"` + Dur float64 `bson:"duration,omitempty"` + Proto string `bson:"proto,omitempty"` + LocalSrc bool `bson:"local_orig,omitempty"` + LocalDst bool `bson:"local_resp,omitempty"` + OriginIPBytes int64 `bson:"orig_ip_bytes,omitempty"` } // DNS provides structure for a subset of the fields in the // parser.DNS data structure. If fields are needed that are // not in this Conn structure use parser.DNS instead. DNS struct { - ID bson.ObjectId `bson:"_id,omitempty"` - Ts int64 `bson:"ts"` - UID string `bson:"uid"` - Src string `bson:"id_origin_h"` - Spt int `bson:"id_origin_p"` - Dst string `bson:"id_resp_h"` - Dpt int `bson:"id_resp_p"` - Proto string `bson:"proto"` - QType string `bson:"qtype_name"` - Query string `bson:"query"` - Answers []string `bson:"answers"` + Ts int64 `bson:"ts"` + UID string `bson:"uid"` + Src string `bson:"id_origin_h"` + Spt int `bson:"id_origin_p"` + Dst string `bson:"id_resp_h"` + Dpt int `bson:"id_resp_p"` + Proto string `bson:"proto"` + QType string `bson:"qtype_name"` + Query string `bson:"query"` + Answers []string `bson:"answers"` } ) diff --git a/datatypes/dns/dns.go b/datatypes/dns/dns.go index 87afc238..96b98991 100644 --- a/datatypes/dns/dns.go +++ b/datatypes/dns/dns.go @@ -1,11 +1,8 @@ package dns -import "gopkg.in/mgo.v2/bson" - type ( //ExplodedDNS maps to an entry in the exploded dns collection ExplodedDNS struct { - ID bson.ObjectId `bson:"_id,omitempty"` Domain string `bson:"domain"` Subdomains int64 `bson:"subdomains"` Visited int64 `bson:"visited"` diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index e189f4ed..137adeaf 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -6,7 +6,6 @@ import ( type ( Host struct { - ID bson.ObjectId `bson:"_id,omitempty"` Ip string `bson:"ip"` Local bool `bson:"local"` } From 935e7c734921041b95e32047099c9b941391bc17 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Tue, 20 Jun 2017 14:38:49 -0600 Subject: [PATCH 015/117] Remove old blacklist module --- analysis/blacklisted/blacklisted.go | 276 --------------------------- analysis/crossref/blacklisted.go | 57 ------ analysis/crossref/crossref.go | 3 +- commands/analyze.go | 4 - commands/show-blacklisted.go | 119 ------------ datatypes/blacklisted/blacklisted.go | 19 -- reporting/report-blacklisted.go | 63 ------ 7 files changed, 1 insertion(+), 540 deletions(-) delete mode 100644 analysis/blacklisted/blacklisted.go delete mode 100644 analysis/crossref/blacklisted.go delete mode 100644 commands/show-blacklisted.go delete mode 100644 datatypes/blacklisted/blacklisted.go delete mode 100644 reporting/report-blacklisted.go diff --git a/analysis/blacklisted/blacklisted.go b/analysis/blacklisted/blacklisted.go deleted file mode 100644 index d1745335..00000000 --- a/analysis/blacklisted/blacklisted.go +++ /dev/null @@ -1,276 +0,0 @@ -package blacklisted - -import ( - "crypto/md5" - "fmt" - "strconv" - "strings" - "sync" - "time" - - blacklist "github.com/ocmdev/rita-blacklist" - "github.com/ocmdev/rita/analysis/dns" - "github.com/ocmdev/rita/analysis/structure" - "github.com/ocmdev/rita/database" - - "github.com/google/safebrowsing" - - "github.com/ocmdev/rita/util" - - "github.com/ocmdev/rita/datatypes/blacklisted" - datatype_structure "github.com/ocmdev/rita/datatypes/structure" - - log "github.com/sirupsen/logrus" - "gopkg.in/mgo.v2" - "gopkg.in/mgo.v2/bson" -) - -type ( - // Blacklisted provides a handle for the blacklist module - Blacklisted struct { - db string // database name (customer) - batchSize int // BatchSize - prefetch float64 // Prefetch - resources *database.Resources // resources - log *log.Logger // logger - channelSize int // channel size - threadCount int // Thread count - blacklistTable string // Name of blacklist table - safeBrowser *safebrowsing.SafeBrowser // Google safebrowsing api - ritaBL *blacklist.BlackList // Blacklisted host database - } - - // URLShort is a shortened version of the URL datatype that only accounts - // for the IP and url (hostname) - URLShort struct { - URL string `bson:"host"` - } -) - -//SetBlacklistSources finds all of the sources which contacted -//the hosts on the blacklist -func SetBlacklistSources(res *database.Resources, result *blacklisted.Blacklist) { - if result.IsURL { - for _, destIP := range dns.GetIPsFromHost(res, result.Host) { - result.Sources = append(result.Sources, structure.GetConnSourcesFromDest(res, destIP)...) - } - } else { - result.Sources = structure.GetConnSourcesFromDest(res, result.Host) - } -} - -//BuildBlacklistedCollection runs the hosts in the dataset against rita-blacklist -func BuildBlacklistedCollection(res *database.Resources) { - collectionName := res.System.BlacklistedConfig.BlacklistTable - //this wil go away in the new blacklist update - collectionKeys := []mgo.Index{ - {Key: []string{"bl_hash"}}, - {Key: []string{"host"}}, - } - err := res.DB.CreateCollection(collectionName, false, collectionKeys) - if err != nil { - res.Log.Error("Failed: ", collectionName, err.Error()) - return - } - newBlacklisted(res).run() -} - -// New will create a new blacklisted module -func newBlacklisted(res *database.Resources) *Blacklisted { - - ret := Blacklisted{ - db: res.DB.GetSelectedDB(), - batchSize: res.System.BatchSize, - prefetch: res.System.Prefetch, - resources: res, - log: res.Log, - channelSize: res.System.BlacklistedConfig.ChannelSize, - threadCount: res.System.BlacklistedConfig.ThreadCount, - blacklistTable: res.System.BlacklistedConfig.BlacklistTable, - } - - // Check if the config file contains a safe browsing key - if len(res.System.SafeBrowsing.APIKey) > 0 { - // Initialize the hook to google's safebrowsing api. - sbConfig := safebrowsing.Config{ - APIKey: res.System.SafeBrowsing.APIKey, - DBPath: res.System.SafeBrowsing.Database, - Logger: res.Log.Writer(), - } - sb, err := safebrowsing.NewSafeBrowser(sbConfig) - if err != nil { - res.Log.WithField("Error", err).Error("Error opening safe browser API") - } else { - ret.safeBrowser = sb - } - } - - // Initialize a rita-blacklist instance. Opens a database connection - // to the blacklist database. This will cause an update if the list is out - // of date. - ritabl := blacklist.NewBlackList() - hostport := strings.Split(res.System.DatabaseHost, ":") - if len(hostport) > 1 { - port, err := strconv.Atoi(hostport[1]) - if err == nil { - ritabl.Init(hostport[0], port, res.System.BlacklistedConfig.BlacklistDatabase) - ret.ritaBL = ritabl - } else { - res.Log.WithField("Error", err).Error("Error opening rita-blacklist hook") - } - } - return &ret -} - -// Run runs the module -func (b *Blacklisted) run() { - ipssn := b.resources.DB.Session.Copy() - defer ipssn.Close() - urlssn := b.resources.DB.Session.Copy() - defer urlssn.Close() - - // build up cursors - ipcur := ipssn.DB(b.db).C(b.resources.System.StructureConfig.HostTable) - urlcur := urlssn.DB(b.db).C(b.resources.System.DNSConfig.HostnamesTable) - - ipaddrs := make(chan string, b.channelSize) - urls := make(chan URLShort, b.channelSize) - cash := util.NewCache() - waitgroup := new(sync.WaitGroup) - waitgroup.Add(2 * b.threadCount) - for i := 0; i < b.threadCount; i++ { - go b.processIPs(ipaddrs, waitgroup) - go b.processURLs(urls, waitgroup, cash) - } - - ipit := ipcur.Find(bson.M{"local": false}). - Batch(b.resources.System.BatchSize). - Prefetch(b.resources.System.Prefetch). - Iter() - - urlit := urlcur.Find(nil). - Batch(b.resources.System.BatchSize). - Prefetch(b.resources.System.Prefetch). - Iter() - - rwg := new(sync.WaitGroup) - rwg.Add(2) - - go func(iter *mgo.Iter, ipchan chan string) { - defer rwg.Done() - var r datatype_structure.Host - for iter.Next(&r) { - if util.RFC1918(r.Ip) { - continue - } - ipchan <- r.Ip - } - }(ipit, ipaddrs) - - go func(iter *mgo.Iter, urlchan chan URLShort, ipchan chan string) { - defer rwg.Done() - - var u URLShort - for iter.Next(&u) { - urlchan <- u - } - }(urlit, urls, ipaddrs) - - rwg.Wait() - close(ipaddrs) - close(urls) - - waitgroup.Wait() -} - -// processIPs goes through all of the ips in the ip channel -func (b *Blacklisted) processIPs(ip chan string, waitgroup *sync.WaitGroup) { - defer waitgroup.Done() - ipssn := b.resources.DB.Session.Copy() - defer ipssn.Close() - cur := ipssn.DB(b.db).C(b.resources.System.BlacklistedConfig.BlacklistTable) - - for { - ip, ok := <-ip - if !ok { - return - } - // Append the sources that determined this host - // was blacklisted - sourcelist := []string{} - - score := 0 - result := b.ritaBL.CheckHosts([]string{ip}, b.resources.System.BlacklistedConfig.BlacklistDatabase) - if len(result) > 0 { - for _, val := range result[0].Results { - score++ - sourcelist = append(sourcelist, val.HostList) - } - } - - if score > 0 { - err := cur.Insert(&blacklisted.Blacklist{ - BLHash: fmt.Sprintf("%x", md5.Sum([]byte(ip))), - BlType: "ip", - Score: score, - DateChecked: time.Now().Unix(), - Host: ip, - IsIp: true, - IsURL: false, - BlacklistSource: sourcelist, - }) - - if err != nil { - b.log.WithFields(log.Fields{ - "error": err.Error(), - "cur": cur, - }).Error("Error inserting into the blacklist table") - } - } - } -} - -// processURLs goes through all of the urls in the url channel -func (b *Blacklisted) processURLs(urls chan URLShort, waitgroup *sync.WaitGroup, cash util.Cache) { - defer waitgroup.Done() - urlssn := b.resources.DB.Session.Copy() - defer urlssn.Close() - cur := urlssn.DB(b.db).C(b.resources.System.BlacklistedConfig.BlacklistTable) - - for url := range urls { - actualURL := url.URL - hsh := fmt.Sprintf("%x", md5.Sum([]byte(actualURL))) - if cash.Lookup(hsh) { - continue - } - - score := 0 - - urlList := []string{actualURL} - - if b.safeBrowser != nil { - result, _ := b.safeBrowser.LookupURLs(urlList) - if len(result) > 0 && len(result[0]) > 0 { - score = 1 - } - } - - if score > 0 { - err := cur.Insert(&blacklisted.Blacklist{ - BLHash: hsh, - BlType: "url", - Score: score, - DateChecked: time.Now().Unix(), - Host: actualURL, - IsIp: false, - IsURL: true, - }) - - if err != nil { - b.log.WithFields(log.Fields{ - "error": err.Error(), - }).Error("Error inserting into the blacklist table") - } - } - } -} diff --git a/analysis/crossref/blacklisted.go b/analysis/crossref/blacklisted.go deleted file mode 100644 index 1956550f..00000000 --- a/analysis/crossref/blacklisted.go +++ /dev/null @@ -1,57 +0,0 @@ -package crossref - -import ( - "github.com/ocmdev/rita/analysis/blacklisted" - "github.com/ocmdev/rita/analysis/dns" - "github.com/ocmdev/rita/database" - blacklistedData "github.com/ocmdev/rita/datatypes/blacklisted" -) - -type ( - //BlacklistedSelector implements the XRefSelector interface for blacklisted - BlacklistedSelector struct{} -) - -//GetName returns "blacklisted" -func (b BlacklistedSelector) GetName() string { - return "blacklisted" -} - -//Select selects blacklisted hosts for XRef analysis -func (b BlacklistedSelector) Select(res *database.Resources) (<-chan string, <-chan string) { - // make channels to return - internalHosts := make(chan string) - externalHosts := make(chan string) - // run the read code async and return the channels immediately - go func() { - ssn := res.DB.Session.Copy() - defer ssn.Close() - - iter := ssn.DB(res.DB.GetSelectedDB()). - C(res.System.BlacklistedConfig.BlacklistTable).Find(nil).Iter() - - //iterate through blacklist table - var data blacklistedData.Blacklist - for iter.Next(&data) { - - //load the ips of those who visited the blacklisted site into the struct - //and write them to xref - blacklisted.SetBlacklistSources(res, &data) - for _, src := range data.Sources { - internalHosts <- src - } - - //write the blacklisted site to xref, handle hostname appropriately - if data.IsURL { - for _, dst := range dns.GetIPsFromHost(res, data.Host) { - externalHosts <- dst - } - } else { - externalHosts <- data.Host - } - } - close(internalHosts) - close(externalHosts) - }() - return internalHosts, externalHosts -} diff --git a/analysis/crossref/crossref.go b/analysis/crossref/crossref.go index 071aa073..a76e1f29 100644 --- a/analysis/crossref/crossref.go +++ b/analysis/crossref/crossref.go @@ -14,9 +14,8 @@ import ( func getXRefSelectors() []dataXRef.XRefSelector { beaconing := BeaconingSelector{} scanning := ScanningSelector{} - blacklisted := BlacklistedSelector{} - return []dataXRef.XRefSelector{beaconing, scanning, blacklisted} + return []dataXRef.XRefSelector{beaconing, scanning} } // BuildXRefCollection runs threaded crossref analysis diff --git a/commands/analyze.go b/commands/analyze.go index d74ac872..b189d0d6 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -5,7 +5,6 @@ import ( "time" "github.com/ocmdev/rita/analysis/beacon" - "github.com/ocmdev/rita/analysis/blacklisted" "github.com/ocmdev/rita/analysis/crossref" "github.com/ocmdev/rita/analysis/dns" "github.com/ocmdev/rita/analysis/scanning" @@ -98,9 +97,6 @@ func analyze(inDb string, configFile string) error { logAnalysisFunc("Beaconing", td, res, beacon.BuildBeaconCollection, ) - logAnalysisFunc("Blacklisted", td, res, - blacklisted.BuildBlacklistedCollection, - ) logAnalysisFunc("Scanning", td, res, scanning.BuildScanningCollection, ) diff --git a/commands/show-blacklisted.go b/commands/show-blacklisted.go deleted file mode 100644 index 85def566..00000000 --- a/commands/show-blacklisted.go +++ /dev/null @@ -1,119 +0,0 @@ -package commands - -import ( - "fmt" - "os" - "strconv" - "strings" - - "github.com/alecthomas/template" - "github.com/ocmdev/rita/analysis/blacklisted" - "github.com/ocmdev/rita/database" - blacklistedData "github.com/ocmdev/rita/datatypes/blacklisted" - "github.com/olekukonko/tablewriter" - "github.com/urfave/cli" -) - -var sourcesFlag bool - -func init() { - command := cli.Command{ - Name: "show-blacklisted", - Usage: "Print blacklisted information to standard out", - Flags: []cli.Flag{ - databaseFlag, - humanFlag, - cli.BoolFlag{ - Name: "sources, s", - Usage: "Show sources with results", - Destination: &sourcesFlag, - }, - configFlag, - }, - Action: showBlacklisted, - } - - bootstrapCommands(command) -} - -func showBlacklisted(c *cli.Context) error { - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) - } - - res := database.InitResources(c.String("config")) - res.DB.SelectDB(c.String("database")) - - var result blacklistedData.Blacklist - var results []blacklistedData.Blacklist - - coll := res.DB.Session.DB(c.String("database")).C(res.System.BlacklistedConfig.BlacklistTable) - iter := coll.Find(nil).Sort("-count").Iter() - - if iter.Done() { - return cli.NewExitError("No results were found for "+c.String("database"), -1) - } - - for iter.Next(&result) { - if sourcesFlag { - blacklisted.SetBlacklistSources(res, &result) - } - results = append(results, result) - } - - if c.Bool("human-readable") { - err := showBlacklistedHuman(results) - if err != nil { - return cli.NewExitError(err.Error(), -1) - } - } - err := showBlacklistedCsv(results) - if err != nil { - return cli.NewExitError(err.Error(), -1) - } - return nil -} - -// showBlacklisted prints all blacklisted for a given database -func showBlacklistedHuman(results []blacklistedData.Blacklist) error { - table := tablewriter.NewWriter(os.Stdout) - table.SetColWidth(100) - if sourcesFlag { - table.SetHeader([]string{"Host", "Score", "Sources"}) - for _, result := range results { - table.Append([]string{ - result.Host, strconv.Itoa(result.Score), strings.Join(result.Sources, ", "), - }) - } - } else { - table.SetHeader([]string{"Host", "Score"}) - for _, result := range results { - table.Append([]string{result.Host, strconv.Itoa(result.Score)}) - } - } - - table.Render() - return nil -} - -func showBlacklistedCsv(results []blacklistedData.Blacklist) error { - tmpl := "{{.Host}}," + `{{.Score}}` - if sourcesFlag { - tmpl += ",{{range $idx, $src := .Sources}}{{if $idx}} {{end}}{{ $src }}{{end}}\n" - } else { - tmpl += "\n" - } - out, err := template.New("bl").Parse(tmpl) - if err != nil { - return err - } - - for _, result := range results { - err := out.Execute(os.Stdout, result) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - } - } - - return nil -} diff --git a/datatypes/blacklisted/blacklisted.go b/datatypes/blacklisted/blacklisted.go deleted file mode 100644 index 73f1d640..00000000 --- a/datatypes/blacklisted/blacklisted.go +++ /dev/null @@ -1,19 +0,0 @@ -package blacklisted - -import "gopkg.in/mgo.v2/bson" - -type ( - /*** Collection/Reporting Structure ***/ - Blacklist struct { - ID bson.ObjectId `bson:"_id,omitempty"` - BLHash string `bson:"bl_hash"` - Host string `bson:"host"` - Score int `bson:"count"` - DateChecked int64 `bson:"date_checked"` - BlType string `bson:"blacklist_type"` - IsURL bool `bson:"is_url"` - IsIp bool `bson:"is_ip"` - BlacklistSource []string `bson:"blacklist_sources"` - Sources []string `bson:"sources,omitempty"` - } -) diff --git a/reporting/report-blacklisted.go b/reporting/report-blacklisted.go deleted file mode 100644 index 5d4d3e6a..00000000 --- a/reporting/report-blacklisted.go +++ /dev/null @@ -1,63 +0,0 @@ -package reporting - -import ( - "bytes" - "html/template" - "os" - - "github.com/ocmdev/rita/analysis/blacklisted" - "github.com/ocmdev/rita/database" - blacklistedData "github.com/ocmdev/rita/datatypes/blacklisted" - htmlTempl "github.com/ocmdev/rita/reporting/templates" -) - -func printBlacklisted(db string, res *database.Resources) error { - f, err := os.Create("blacklisted.html") - if err != nil { - return err - } - defer f.Close() - - out, err := template.New("blacklisted.html").Parse(htmlTempl.BlacklistedTempl) - if err != nil { - return err - } - - res.DB.SelectDB(db) - - var result blacklistedData.Blacklist - var results []blacklistedData.Blacklist - - coll := res.DB.Session.DB(db).C(res.System.BlacklistedConfig.BlacklistTable) - iter := coll.Find(nil).Sort("-count").Iter() - - for iter.Next(&result) { - blacklisted.SetBlacklistSources(res, &result) - results = append(results, result) - } - - w, err := getBlacklistWriter(results) - if err != nil { - return err - } - - return out.Execute(f, &htmlTempl.ReportingInfo{DB: db, Writer: template.HTML(w)}) -} - -func getBlacklistWriter(results []blacklistedData.Blacklist) (string, error) { - tmpl := "\n" - w := new(bytes.Buffer) - out, err := template.New("blacklist").Parse(tmpl) - if err != nil { - return "", err - } - - for _, result := range results { - err := out.Execute(w, result) - if err != nil { - return "", err - } - } - - return w.String(), nil -} From 4b058a20dcaa8c5687d4ee8a4c4f18ec07041cf2 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Tue, 20 Jun 2017 17:38:35 -0600 Subject: [PATCH 016/117] Fetch data and check it against rita-bl --- analysis/blacklist/blacklist.go | 85 ++++++++++++++++++++++++ analysis/blacklist/hostnames.go | 59 +++++++++++++++++ analysis/blacklist/ips.go | 111 ++++++++++++++++++++++++++++++++ analysis/blacklist/urls.go | 65 +++++++++++++++++++ 4 files changed, 320 insertions(+) create mode 100644 analysis/blacklist/blacklist.go create mode 100644 analysis/blacklist/hostnames.go create mode 100644 analysis/blacklist/ips.go create mode 100644 analysis/blacklist/urls.go diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go new file mode 100644 index 00000000..53da9f70 --- /dev/null +++ b/analysis/blacklist/blacklist.go @@ -0,0 +1,85 @@ +package blacklist + +import ( + bl "github.com/ocmdev/rita-blacklist2" + blDB "github.com/ocmdev/rita-blacklist2/database" + "github.com/ocmdev/rita-blacklist2/sources/lists" + "github.com/ocmdev/rita-blacklist2/sources/rpc" + "github.com/ocmdev/rita/database" +) + +type resultsChan chan map[string][]blDB.BlacklistResult + +//BuildBlacklistedCollections builds the blacklisted sources, +//blacklisted destinations, blacklist hostnames, and blacklisted urls +//collections +func BuildBlacklistedCollections(res *database.Resources) { + //set up rita-blacklist + ritaBL := bl.NewBlacklist( + blDB.NewMongoDB, + res.System.DatabaseHost, + "rita-blacklist2", + func(err error) { + res.Log.Error(err) + }, + ) + + googleRPC, err := rpc.NewGoogleSafeBrowsingURLsRPC( + res.System.SafeBrowsing.APIKey, + res.System.SafeBrowsing.Database, + res.Log.Writer(), + ) + if err == nil { + ritaBL.SetRPCs(googleRPC) + } else { + res.Log.Error("could not open up google safebrowsing for blacklist checks") + } + + ritaBL.SetLists(lists.NewMyIPmsList(), lists.NewMdlList()) + ritaBL.Update() + + //get our data sources + ssn := res.DB.Session.Copy() + defer ssn.Close() + uniqueSourcesAggregation := getUniqueIPFromUconnPipeline("src") + uniqueDestAggregation := getUniqueIPFromUconnPipeline("dst") + uniqueSourceIter := res.DB.AggregateCollection( + res.System.StructureConfig.UniqueConnTable, + ssn, + uniqueSourcesAggregation, + ) + uniqueDestIter := res.DB.AggregateCollection( + res.System.StructureConfig.UniqueConnTable, + ssn, + uniqueDestAggregation, + ) + hostnamesIter := ssn.DB(res.DB.GetSelectedDB()).C( + res.System.DNSConfig.HostnamesTable, + ).Find(nil).Iter() + urlIter := ssn.DB(res.DB.GetSelectedDB()).C( + res.System.UrlsConfig.UrlsTable, + ).Find(nil).Iter() + + bufferSize := 1000 + + buildBlacklistedSourceIPs( + uniqueSourceIter, ssn, ritaBL, + "blSourceIPs", bufferSize, + ) + + buildBlacklistedDestIPs( + uniqueDestIter, ssn, ritaBL, + "blDestIPs", bufferSize, + ) + + buildBlacklistedHostnames( + hostnamesIter, ssn, ritaBL, + "blHostnames", bufferSize, + ) + + buildBlacklistedURLs( + urlIter, ssn, ritaBL, + "blURLs", bufferSize, + ) + +} diff --git a/analysis/blacklist/hostnames.go b/analysis/blacklist/hostnames.go new file mode 100644 index 00000000..80dae5cf --- /dev/null +++ b/analysis/blacklist/hostnames.go @@ -0,0 +1,59 @@ +package blacklist + +import ( + "unsafe" + + "github.com/ocmdev/rita-blacklist2/list" + + bl "github.com/ocmdev/rita-blacklist2" + mgo "gopkg.in/mgo.v2" +) + +type hostnameShort struct { + Host string `bson:"host"` +} + +func buildBlacklistedHostnames(hostnames *mgo.Iter, ssnToCopy *mgo.Session, + blHandle *bl.Blacklist, destCollection string, bufferSize int) { + //create session to write to + ssn := ssnToCopy.Copy() + defer ssn.Close() + + //create type for communicating rita-bl results + resultsChannel := make(resultsChan) + + go checkRitaBlacklistHostnames(hostnames, blHandle, bufferSize, resultsChannel) + + for results := range resultsChannel { + for hostname, individualResults := range results { + if len(individualResults) > 0 { + _ = hostname + //store the results + } + } + } +} + +func checkRitaBlacklistHostnames(hostnames *mgo.Iter, blHandle *bl.Blacklist, + bufferSize int, resultsChannel resultsChan) { + i := 0 + //read in bufferSize entries and check them. Then ship them off to the writer/ + var buff = make([]hostnameShort, bufferSize) + for hostnames.Next(&buff[i]) { + if i == bufferSize { + //see comment in checkRitaBlacklistIPs + indexesArray := (*[]string)(unsafe.Pointer(&buff)) + resultsChannel <- blHandle.CheckEntries(list.BlacklistedHostnameType, (*indexesArray)...) + //reset the buffer + i = 0 + } + i++ + } + //if there are left overs in the buffer + if i != 0 { + buffSlice := buff[:i] + indexesArray := (*[]string)(unsafe.Pointer(&buffSlice)) + resultsChannel <- blHandle.CheckEntries(list.BlacklistedHostnameType, (*indexesArray)...) + } + close(resultsChannel) +} diff --git a/analysis/blacklist/ips.go b/analysis/blacklist/ips.go new file mode 100644 index 00000000..cc9255cf --- /dev/null +++ b/analysis/blacklist/ips.go @@ -0,0 +1,111 @@ +package blacklist + +import ( + "unsafe" + + "github.com/ocmdev/rita-blacklist2/list" + "gopkg.in/mgo.v2/bson" + + bl "github.com/ocmdev/rita-blacklist2" + mgo "gopkg.in/mgo.v2" +) + +type ipAggregateResult struct { + IP string `bson:"ip"` +} + +func getUniqueIPFromUconnPipeline(field string) []bson.D { + return []bson.D{ + { + {"$project", bson.D{ + {"ip", "$" + field}, + }}, + }, + { + {"$group", bson.D{ + {"_id", bson.D{ + {"ip", "$ip"}, + }}, + }}, + }, + { + {"$project", bson.D{ + {"_id", 0}, + {"ip", "$_id.ip"}, + }}, + }, + } +} + +func buildBlacklistedSourceIPs(sourceIPs *mgo.Iter, ssnToCopy *mgo.Session, + blHandle *bl.Blacklist, destCollection string, bufferSize int) { + //create session to write to + ssn := ssnToCopy.Copy() + defer ssn.Close() + + //create type for communicating rita-bl results + resultsChannel := make(resultsChan) + + go checkRitaBlacklistIPs(sourceIPs, blHandle, bufferSize, resultsChannel) + + for results := range resultsChannel { + for ipAddr, individualResults := range results { + if len(individualResults) > 0 { + _ = ipAddr + //store the results + } + } + } +} + +func buildBlacklistedDestIPs(destIPs *mgo.Iter, ssnToCopy *mgo.Session, + blHandle *bl.Blacklist, destCollection string, bufferSize int) { + //create session to write to + ssn := ssnToCopy.Copy() + defer ssn.Close() + + //create type for communicating rita-bl results + resultsChannel := make(resultsChan) + + go checkRitaBlacklistIPs(destIPs, blHandle, bufferSize, resultsChannel) + + for results := range resultsChannel { + for ipAddr, individualResults := range results { + if len(individualResults) > 0 { + _ = ipAddr + //store the results + } + } + } +} + +func checkRitaBlacklistIPs(ips *mgo.Iter, blHandle *bl.Blacklist, + bufferSize int, resultsChannel resultsChan) { + i := 0 + //read in bufferSize entries and check them. Then ship them off to the writer/ + var buff = make([]ipAggregateResult, bufferSize) + for ips.Next(&buff[i]) { + if i == bufferSize { + //excuse the memory hacking to get better performance + //We need the buffer to be of type ipAggregateResult for + //proper marshalling, but we need strings for rita-blacklist. + //The underlying memory for ipAggregateResult is that of a string + //since it is the only field in the struct. + //So we can safely view buff as an array of strings using a + //reinterpret cast. Then, we can dereference the pointer to the array + //and use the variadic syntax to pass the array to CheckEntries. + indexesArray := (*[]string)(unsafe.Pointer(&buff)) + resultsChannel <- blHandle.CheckEntries(list.BlacklistedIPType, (*indexesArray)...) + //reset the buffer + i = 0 + } + i++ + } + //if there are left overs in the buffer + if i != 0 { + buffSlice := buff[:i] + indexesArray := (*[]string)(unsafe.Pointer(&buffSlice)) + resultsChannel <- blHandle.CheckEntries(list.BlacklistedIPType, (*indexesArray)...) + } + close(resultsChannel) +} diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go new file mode 100644 index 00000000..9ca496c0 --- /dev/null +++ b/analysis/blacklist/urls.go @@ -0,0 +1,65 @@ +package blacklist + +import ( + "strings" + + "github.com/ocmdev/rita-blacklist2/list" + + bl "github.com/ocmdev/rita-blacklist2" + mgo "gopkg.in/mgo.v2" +) + +type urlShort struct { + URL string `bson:"url"` + URI string `bson:"uri"` +} + +func buildBlacklistedURLs(urls *mgo.Iter, ssnToCopy *mgo.Session, + blHandle *bl.Blacklist, destCollection string, bufferSize int) { + //create session to write to + ssn := ssnToCopy.Copy() + defer ssn.Close() + + //create type for communicating rita-bl results + resultsChannel := make(resultsChan) + + go checkRitaBlacklistURLs(urls, blHandle, bufferSize, resultsChannel) + + for results := range resultsChannel { + for url, individualResults := range results { + if len(individualResults) > 0 { + _ = url + //TODO: resplit proto, url, and uri out :( + //store the results + } + } + } +} + +func checkRitaBlacklistURLs(urls *mgo.Iter, blHandle *bl.Blacklist, + bufferSize int, resultsChannel resultsChan) { + i := 0 + //read in bufferSize entries and check them. Then ship them off to the writer/ + var buff = make([]string, bufferSize) + var holder urlShort + for urls.Next(&holder) { + + //assume http url if not specified + if !strings.Contains(holder.URI, "://") { + holder.URI = "http://" + holder.URI + } + buff[i] = holder.URI + holder.URL + + if i == bufferSize { + resultsChannel <- blHandle.CheckEntries(list.BlacklistedURLType, buff...) + //reset the buffer + i = 0 + } + i++ + } + //if there are left overs in the buffer + if i != 0 { + resultsChannel <- blHandle.CheckEntries(list.BlacklistedURLType, buff[:i]...) + } + close(resultsChannel) +} From 5ea7240a354683e182e6f3aff98c7fab49a92d54 Mon Sep 17 00:00:00 2001 From: logan Date: Wed, 21 Jun 2017 17:32:44 -0600 Subject: [PATCH 017/117] Integrated rita-blacklist with rita --- analysis/blacklist/blacklist.go | 43 +++++++------- analysis/blacklist/hostnames.go | 70 +++++++++++++++++++--- analysis/blacklist/ips.go | 99 ++++++++++++++++++++++---------- analysis/blacklist/urls.go | 96 ++++++++++++++++++++++++++----- commands/analyze.go | 4 ++ datatypes/blacklist/blacklist.go | 29 ++++++++++ reporting/report.go | 5 +- 7 files changed, 267 insertions(+), 79 deletions(-) create mode 100644 datatypes/blacklist/blacklist.go diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 53da9f70..04ebcb11 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -6,6 +6,7 @@ import ( "github.com/ocmdev/rita-blacklist2/sources/lists" "github.com/ocmdev/rita-blacklist2/sources/rpc" "github.com/ocmdev/rita/database" + log "github.com/sirupsen/logrus" ) type resultsChan chan map[string][]blDB.BlacklistResult @@ -14,16 +15,22 @@ type resultsChan chan map[string][]blDB.BlacklistResult //blacklisted destinations, blacklist hostnames, and blacklisted urls //collections func BuildBlacklistedCollections(res *database.Resources) { + //capture the current value for the error closure below + currentDB := res.DB.GetSelectedDB() + //set up rita-blacklist ritaBL := bl.NewBlacklist( - blDB.NewMongoDB, - res.System.DatabaseHost, - "rita-blacklist2", - func(err error) { - res.Log.Error(err) + blDB.NewMongoDB, //Use MongoDB for data storage + res.System.DatabaseHost, //Use the DatabaseHost as the connection + "rita-blacklist2", //database + func(err error) { //error handler + res.Log.WithFields(log.Fields{ + "db": currentDB, + }).Error(err) }, ) + //set up google url checker googleRPC, err := rpc.NewGoogleSafeBrowsingURLsRPC( res.System.SafeBrowsing.APIKey, res.System.SafeBrowsing.Database, @@ -35,12 +42,14 @@ func BuildBlacklistedCollections(res *database.Resources) { res.Log.Error("could not open up google safebrowsing for blacklist checks") } + //set up ritaBL to pull from myIP.ms and MDL ritaBL.SetLists(lists.NewMyIPmsList(), lists.NewMdlList()) ritaBL.Update() //get our data sources ssn := res.DB.Session.Copy() defer ssn.Close() + uniqueSourcesAggregation := getUniqueIPFromUconnPipeline("src") uniqueDestAggregation := getUniqueIPFromUconnPipeline("dst") uniqueSourceIter := res.DB.AggregateCollection( @@ -53,33 +62,21 @@ func BuildBlacklistedCollections(res *database.Resources) { ssn, uniqueDestAggregation, ) - hostnamesIter := ssn.DB(res.DB.GetSelectedDB()).C( + /*hostnamesIter := ssn.DB(res.DB.GetSelectedDB()).C( res.System.DNSConfig.HostnamesTable, ).Find(nil).Iter() urlIter := ssn.DB(res.DB.GetSelectedDB()).C( res.System.UrlsConfig.UrlsTable, - ).Find(nil).Iter() + ).Find(nil).Iter()*/ bufferSize := 1000 - buildBlacklistedSourceIPs( - uniqueSourceIter, ssn, ritaBL, - "blSourceIPs", bufferSize, - ) + buildBlacklistedIPs(uniqueSourceIter, res, ritaBL, bufferSize, true) - buildBlacklistedDestIPs( - uniqueDestIter, ssn, ritaBL, - "blDestIPs", bufferSize, - ) + buildBlacklistedIPs(uniqueDestIter, res, ritaBL, bufferSize, false) - buildBlacklistedHostnames( - hostnamesIter, ssn, ritaBL, - "blHostnames", bufferSize, - ) + //buildBlacklistedHostnames(hostnamesIter, res, ritaBL, bufferSize) - buildBlacklistedURLs( - urlIter, ssn, ritaBL, - "blURLs", bufferSize, - ) + //buildBlacklistedURLs(urlIter, res, ritaBL, bufferSize, "http://") } diff --git a/analysis/blacklist/hostnames.go b/analysis/blacklist/hostnames.go index 80dae5cf..5a4034fa 100644 --- a/analysis/blacklist/hostnames.go +++ b/analysis/blacklist/hostnames.go @@ -4,31 +4,59 @@ import ( "unsafe" "github.com/ocmdev/rita-blacklist2/list" + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/dns" + "github.com/ocmdev/rita/datatypes/structure" bl "github.com/ocmdev/rita-blacklist2" + data "github.com/ocmdev/rita/datatypes/blacklist" + log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" + "gopkg.in/mgo.v2/bson" ) type hostnameShort struct { Host string `bson:"host"` } -func buildBlacklistedHostnames(hostnames *mgo.Iter, ssnToCopy *mgo.Session, - blHandle *bl.Blacklist, destCollection string, bufferSize int) { +//buildBlacklistedHostnames builds a set of blacklisted hostnames from the +//iterator provided, the system config, a handle to rita-blacklist, +//and a buffer of hostnames to check at a time +func buildBlacklistedHostnames(hostnames *mgo.Iter, res *database.Resources, + blHandle *bl.Blacklist, bufferSize int) { //create session to write to - ssn := ssnToCopy.Copy() + ssn := res.DB.Session.Copy() defer ssn.Close() + outputCollection := ssn.DB(res.DB.GetSelectedDB()).C("bl-hostnames") //create type for communicating rita-bl results resultsChannel := make(resultsChan) go checkRitaBlacklistHostnames(hostnames, blHandle, bufferSize, resultsChannel) + //results are maps from ip addresses to arrays of their respective results for results := range resultsChannel { + //loop over the map for hostname, individualResults := range results { + //if the hostname has blacklist results if len(individualResults) > 0 { - _ = hostname - //store the results + blHostname := data.BlacklistedHostname{Hostname: hostname} + err := fillBlacklistedHostname( + &blHostname, + res.DB.GetSelectedDB(), + res.System.DNSConfig.HostnamesTable, + res.System.StructureConfig.UniqueConnTable, + ssn, + ) + if err != nil { + res.Log.WithFields(log.Fields{ + "err": err.Error(), + "hostname": hostname, + "db": res.DB.GetSelectedDB(), + }).Error("could not aggregate info on blacklisted hostname") + continue + } + outputCollection.Insert(&blHostname) } } } @@ -37,10 +65,10 @@ func buildBlacklistedHostnames(hostnames *mgo.Iter, ssnToCopy *mgo.Session, func checkRitaBlacklistHostnames(hostnames *mgo.Iter, blHandle *bl.Blacklist, bufferSize int, resultsChannel resultsChan) { i := 0 - //read in bufferSize entries and check them. Then ship them off to the writer/ + //read in bufferSize entries and check them. Then ship them off to the writer var buff = make([]hostnameShort, bufferSize) for hostnames.Next(&buff[i]) { - if i == bufferSize { + if i == bufferSize-1 { //see comment in checkRitaBlacklistIPs indexesArray := (*[]string)(unsafe.Pointer(&buff)) resultsChannel <- blHandle.CheckEntries(list.BlacklistedHostnameType, (*indexesArray)...) @@ -57,3 +85,31 @@ func checkRitaBlacklistHostnames(hostnames *mgo.Iter, blHandle *bl.Blacklist, } close(resultsChannel) } + +func fillBlacklistedHostname(blHostname *data.BlacklistedHostname, db, + hostnamesCollection, uconnCollection string, ssn *mgo.Session) error { + hostnameQuery := bson.M{"host": blHostname.Hostname} + var blHostnameFull dns.Hostname + err := ssn.DB(db).C(hostnamesCollection).Find(hostnameQuery).One(&blHostnameFull) + if err != nil { + return err + } + + connQuery := bson.M{"dst": bson.M{"$in": blHostnameFull.IPs}} + + var totalBytes int + var totalConnections int + var uniqueConnCount int + uniqueConnections := ssn.DB(db).C(uconnCollection).Find(connQuery).Iter() + var uconn structure.UniqueConnection + for uniqueConnections.Next(&uconn) { + totalBytes += uconn.TotalBytes + totalConnections += uconn.ConnectionCount + uniqueConnCount++ + } + blHostname.Connections = totalConnections + blHostname.UniqueConnections = uniqueConnCount + blHostname.TotalBytes = totalBytes + + return nil +} diff --git a/analysis/blacklist/ips.go b/analysis/blacklist/ips.go index cc9255cf..13079c12 100644 --- a/analysis/blacklist/ips.go +++ b/analysis/blacklist/ips.go @@ -3,11 +3,14 @@ package blacklist import ( "unsafe" - "github.com/ocmdev/rita-blacklist2/list" - "gopkg.in/mgo.v2/bson" - bl "github.com/ocmdev/rita-blacklist2" + "github.com/ocmdev/rita-blacklist2/list" + "github.com/ocmdev/rita/database" + data "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" + "gopkg.in/mgo.v2/bson" ) type ipAggregateResult struct { @@ -37,43 +40,53 @@ func getUniqueIPFromUconnPipeline(field string) []bson.D { } } -func buildBlacklistedSourceIPs(sourceIPs *mgo.Iter, ssnToCopy *mgo.Session, - blHandle *bl.Blacklist, destCollection string, bufferSize int) { +//buildBlacklistedIPs builds a set of blacklisted ips from the +//iterator provided, the system config, a handle to rita-blacklist, +//a buffer of ips to check at a time, and a boolean designating +//whether or not the ips are connection sources or destinations +func buildBlacklistedIPs(ips *mgo.Iter, res *database.Resources, + blHandle *bl.Blacklist, bufferSize int, source bool) { //create session to write to - ssn := ssnToCopy.Copy() + ssn := res.DB.Session.Copy() defer ssn.Close() - //create type for communicating rita-bl results - resultsChannel := make(resultsChan) - - go checkRitaBlacklistIPs(sourceIPs, blHandle, bufferSize, resultsChannel) - - for results := range resultsChannel { - for ipAddr, individualResults := range results { - if len(individualResults) > 0 { - _ = ipAddr - //store the results - } - } + //choose the output collection + var outputCollection *mgo.Collection + if source { + outputCollection = ssn.DB(res.DB.GetSelectedDB()).C("bl-sourceIPs") + } else { + outputCollection = ssn.DB(res.DB.GetSelectedDB()).C("bl-destIPs") } -} - -func buildBlacklistedDestIPs(destIPs *mgo.Iter, ssnToCopy *mgo.Session, - blHandle *bl.Blacklist, destCollection string, bufferSize int) { - //create session to write to - ssn := ssnToCopy.Copy() - defer ssn.Close() //create type for communicating rita-bl results resultsChannel := make(resultsChan) - go checkRitaBlacklistIPs(destIPs, blHandle, bufferSize, resultsChannel) + //kick off the checking process + go checkRitaBlacklistIPs(ips, blHandle, bufferSize, resultsChannel) + //results are maps from ip addresses to arrays of their respective results for results := range resultsChannel { + //loop over the map for ipAddr, individualResults := range results { + //if the ip address has blacklist results if len(individualResults) > 0 { - _ = ipAddr - //store the results + blIP := data.BlacklistedIP{IP: ipAddr} + err := fillBlacklistedIP( + &blIP, + res.DB.GetSelectedDB(), + res.System.StructureConfig.UniqueConnTable, + ssn, + source, + ) + if err != nil { + res.Log.WithFields(log.Fields{ + "err": err.Error(), + "ip": ipAddr, + "db": res.DB.GetSelectedDB(), + }).Error("could not aggregate info on blacklisted IP") + continue + } + outputCollection.Insert(&blIP) } } } @@ -82,10 +95,10 @@ func buildBlacklistedDestIPs(destIPs *mgo.Iter, ssnToCopy *mgo.Session, func checkRitaBlacklistIPs(ips *mgo.Iter, blHandle *bl.Blacklist, bufferSize int, resultsChannel resultsChan) { i := 0 - //read in bufferSize entries and check them. Then ship them off to the writer/ + //read in bufferSize entries and check them. Then ship them off to the writer var buff = make([]ipAggregateResult, bufferSize) for ips.Next(&buff[i]) { - if i == bufferSize { + if i == bufferSize-1 { //excuse the memory hacking to get better performance //We need the buffer to be of type ipAggregateResult for //proper marshalling, but we need strings for rita-blacklist. @@ -109,3 +122,29 @@ func checkRitaBlacklistIPs(ips *mgo.Iter, blHandle *bl.Blacklist, } close(resultsChannel) } + +func fillBlacklistedIP(blIP *data.BlacklistedIP, db, uconnCollection string, + ssn *mgo.Session, source bool) error { + var connQuery bson.M + if source { + connQuery = bson.M{"src": blIP.IP} + } else { + connQuery = bson.M{"dst": blIP.IP} + } + + var totalBytes int + var totalConnections int + var uniqueConnCount int + uniqueConnections := ssn.DB(db).C(uconnCollection).Find(connQuery).Iter() + var uconn structure.UniqueConnection + for uniqueConnections.Next(&uconn) { + totalBytes += uconn.TotalBytes + totalConnections += uconn.ConnectionCount + uniqueConnCount++ + } + blIP.Connections = totalConnections + blIP.UniqueConnections = uniqueConnCount + blIP.TotalBytes = totalBytes + + return nil +} diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 9ca496c0..07f593a9 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -1,12 +1,19 @@ package blacklist import ( + "errors" "strings" "github.com/ocmdev/rita-blacklist2/list" bl "github.com/ocmdev/rita-blacklist2" + "github.com/ocmdev/rita/database" + data "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "github.com/ocmdev/rita/datatypes/urls" + log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" + "gopkg.in/mgo.v2/bson" ) type urlShort struct { @@ -14,43 +21,63 @@ type urlShort struct { URI string `bson:"uri"` } -func buildBlacklistedURLs(urls *mgo.Iter, ssnToCopy *mgo.Session, - blHandle *bl.Blacklist, destCollection string, bufferSize int) { +//buildBlacklistedURLs builds a set of blacklsited urls from the +//iterator provided, the system config, a handle to rita-blacklist, +//a buffer of urls to check at a time, and protocol prefix string to +//append to results coming from the iterator +func buildBlacklistedURLs(urls *mgo.Iter, res *database.Resources, + blHandle *bl.Blacklist, bufferSize int, prefix string) { //create session to write to - ssn := ssnToCopy.Copy() + ssn := res.DB.Session.Copy() defer ssn.Close() + outputCollection := ssn.DB(res.DB.GetSelectedDB()).C("bl-urls") //create type for communicating rita-bl results resultsChannel := make(resultsChan) - go checkRitaBlacklistURLs(urls, blHandle, bufferSize, resultsChannel) + go checkRitaBlacklistURLs(urls, blHandle, bufferSize, resultsChannel, prefix) + //results are maps from ip addresses to arrays of their respective results for results := range resultsChannel { + //loop over the map for url, individualResults := range results { + //if the ip address has blacklist results if len(individualResults) > 0 { - _ = url - //TODO: resplit proto, url, and uri out :( - //store the results + blURL := data.BlacklistedURL{} + err := fillBlacklistedURL( + &blURL, + url, + res.DB.GetSelectedDB(), + res.System.UrlsConfig.UrlsTable, + res.System.StructureConfig.UniqueConnTable, + ssn, + prefix, + ) + if err != nil { + res.Log.WithFields(log.Fields{ + "err": err.Error(), + "url": url, + "db": res.DB.GetSelectedDB(), + }).Error("could not aggregate info on blacklisted url") + continue + } + outputCollection.Insert(&blURL) } } } } func checkRitaBlacklistURLs(urls *mgo.Iter, blHandle *bl.Blacklist, - bufferSize int, resultsChannel resultsChan) { + bufferSize int, resultsChannel resultsChan, prefix string) { i := 0 //read in bufferSize entries and check them. Then ship them off to the writer/ var buff = make([]string, bufferSize) var holder urlShort for urls.Next(&holder) { + //assume http url + buff[i] = prefix + holder.URI + holder.URL - //assume http url if not specified - if !strings.Contains(holder.URI, "://") { - holder.URI = "http://" + holder.URI - } - buff[i] = holder.URI + holder.URL - - if i == bufferSize { + if i == bufferSize-1 { resultsChannel <- blHandle.CheckEntries(list.BlacklistedURLType, buff...) //reset the buffer i = 0 @@ -63,3 +90,42 @@ func checkRitaBlacklistURLs(urls *mgo.Iter, blHandle *bl.Blacklist, } close(resultsChannel) } + +func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, + urlCollection, uconnCollection string, ssn *mgo.Session, prefix string) error { + var urlQuery bson.M + urlTrimmed := strings.TrimPrefix(longURL, prefix) + resourceIdx := strings.Index(urlTrimmed, "/") + if resourceIdx == -1 { + return errors.New("url does not specify a resource") + } + host := urlTrimmed[:resourceIdx] + resource := urlTrimmed[resourceIdx+1:] + + urlQuery = bson.M{"url": host, "uri": resource} + var blURLFull urls.URL + err := ssn.DB(db).C(urlCollection).Find(urlQuery).One(&blURLFull) + if err != nil { + return err + } + blURL.Host = host + blURL.Resource = resource + + connQuery := bson.M{"dst": bson.M{"$in": blURLFull.IPs}} + + var totalBytes int + var totalConnections int + var uniqueConnCount int + uniqueConnections := ssn.DB(db).C(uconnCollection).Find(connQuery).Iter() + var uconn structure.UniqueConnection + for uniqueConnections.Next(&uconn) { + totalBytes += uconn.TotalBytes + totalConnections += uconn.ConnectionCount + uniqueConnCount++ + } + blURL.Connections = totalConnections + blURL.UniqueConnections = uniqueConnCount + blURL.TotalBytes = totalBytes + + return nil +} diff --git a/commands/analyze.go b/commands/analyze.go index b189d0d6..eb6791fe 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -5,6 +5,7 @@ import ( "time" "github.com/ocmdev/rita/analysis/beacon" + "github.com/ocmdev/rita/analysis/blacklist" "github.com/ocmdev/rita/analysis/crossref" "github.com/ocmdev/rita/analysis/dns" "github.com/ocmdev/rita/analysis/scanning" @@ -94,6 +95,9 @@ func analyze(inDb string, configFile string) error { logAnalysisFunc("User Agent", td, res, useragent.BuildUserAgentCollection, ) + logAnalysisFunc("Blacklisted", td, res, + blacklist.BuildBlacklistedCollections, + ) logAnalysisFunc("Beaconing", td, res, beacon.BuildBeaconCollection, ) diff --git a/datatypes/blacklist/blacklist.go b/datatypes/blacklist/blacklist.go new file mode 100644 index 00000000..7d64974e --- /dev/null +++ b/datatypes/blacklist/blacklist.go @@ -0,0 +1,29 @@ +package blacklist + +//BlacklistedIP holds information on a blacklisted IP address and +//the summary statistics on the host +type BlacklistedIP struct { + IP string `bson:"ip"` + Connections int `bson:"conn"` + UniqueConnections int `bson:"uconn"` + TotalBytes int `bson:"total_bytes"` +} + +//BlacklistedHostname holds information on a blacklisted hostname and +//the summary statistics associated with the hosts behind the hostname +type BlacklistedHostname struct { + Hostname string `bson:"hostname"` + Connections int `bson:"conn"` + UniqueConnections int `bson:"uconn"` + TotalBytes int `bson:"total_bytes"` +} + +//BlacklistedURL holds information on a blacklisted URL and the +//summary statistics associated with the hosts behind the url +type BlacklistedURL struct { + Host string `bson:"host"` + Resource string `bson:"resource"` + Connections int `bson:"conn"` + UniqueConnections int `bson:"uconn"` + TotalBytes int `bson:"total_bytes"` +} diff --git a/reporting/report.go b/reporting/report.go index bc155a32..0f0cf209 100644 --- a/reporting/report.go +++ b/reporting/report.go @@ -159,10 +159,7 @@ func writeDB(db string, wd string, res *database.Resources) error { if err != nil { return err } - err = printBlacklisted(db, res) - if err != nil { - return err - } + err = printDNS(db, res) if err != nil { return err From 336c8c88923f86f5e69c33d895115604881ed6cf Mon Sep 17 00:00:00 2001 From: logan lembke Date: Wed, 21 Jun 2017 22:14:52 -0600 Subject: [PATCH 018/117] tested bl ip --- analysis/blacklist/blacklist.go | 8 ++++---- analysis/blacklist/ips.go | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 04ebcb11..62b7aa72 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -62,12 +62,12 @@ func BuildBlacklistedCollections(res *database.Resources) { ssn, uniqueDestAggregation, ) - /*hostnamesIter := ssn.DB(res.DB.GetSelectedDB()).C( + hostnamesIter := ssn.DB(res.DB.GetSelectedDB()).C( res.System.DNSConfig.HostnamesTable, ).Find(nil).Iter() urlIter := ssn.DB(res.DB.GetSelectedDB()).C( res.System.UrlsConfig.UrlsTable, - ).Find(nil).Iter()*/ + ).Find(nil).Iter() bufferSize := 1000 @@ -75,8 +75,8 @@ func BuildBlacklistedCollections(res *database.Resources) { buildBlacklistedIPs(uniqueDestIter, res, ritaBL, bufferSize, false) - //buildBlacklistedHostnames(hostnamesIter, res, ritaBL, bufferSize) + buildBlacklistedHostnames(hostnamesIter, res, ritaBL, bufferSize) - //buildBlacklistedURLs(urlIter, res, ritaBL, bufferSize, "http://") + buildBlacklistedURLs(urlIter, res, ritaBL, bufferSize, "http://") } diff --git a/analysis/blacklist/ips.go b/analysis/blacklist/ips.go index 13079c12..6beadf91 100644 --- a/analysis/blacklist/ips.go +++ b/analysis/blacklist/ips.go @@ -18,6 +18,7 @@ type ipAggregateResult struct { } func getUniqueIPFromUconnPipeline(field string) []bson.D { + //nolint: vet return []bson.D{ { {"$project", bson.D{ From 3c5f620f81828803a5b157bd7fb10832a06128e9 Mon Sep 17 00:00:00 2001 From: logan Date: Wed, 21 Jun 2017 22:31:41 -0600 Subject: [PATCH 019/117] Index bl collections --- analysis/blacklist/blacklist.go | 40 ++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 62b7aa72..8ec2213b 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -7,6 +7,7 @@ import ( "github.com/ocmdev/rita-blacklist2/sources/rpc" "github.com/ocmdev/rita/database" log "github.com/sirupsen/logrus" + mgo "gopkg.in/mgo.v2" ) type resultsChan chan map[string][]blDB.BlacklistResult @@ -70,7 +71,13 @@ func BuildBlacklistedCollections(res *database.Resources) { ).Find(nil).Iter() bufferSize := 1000 - + collections := []string{"bl-sourceIPs", "bl-destIPs", "bl-hostnames", "bl-urls"} + for _, collection := range collections { + ssn.DB(currentDB).C(collection).Create(&mgo.CollectionInfo{ + DisableIdIndex: true, + }) + } + //TODO: refactor these into modules buildBlacklistedIPs(uniqueSourceIter, res, ritaBL, bufferSize, true) buildBlacklistedIPs(uniqueDestIter, res, ritaBL, bufferSize, false) @@ -79,4 +86,35 @@ func BuildBlacklistedCollections(res *database.Resources) { buildBlacklistedURLs(urlIter, res, ritaBL, bufferSize, "http://") + ensureBLIndexes(ssn, currentDB, "bl-sourceIPs") + ensureBLIndexes(ssn, currentDB, "bl-destIPs") + ensureBLIndexes(ssn, currentDB, "bl-hostnames") + ensureBLIndexes(ssn, currentDB, "bl-urls") + + ssn.DB(currentDB).C("bl-sourceIPs").EnsureIndex(mgo.Index{ + Key: []string{"$hashed:ip"}, + }) + + ssn.DB(currentDB).C("bl-destIPs").EnsureIndex(mgo.Index{ + Key: []string{"$hashed:ip"}, + }) + ssn.DB(currentDB).C("bl-hostnames").EnsureIndex(mgo.Index{ + Key: []string{"$hashed:hostname"}, + }) + ssn.DB(currentDB).C("bl-urls").EnsureIndex(mgo.Index{ + Key: []string{"host", "resource"}, + }) + +} + +func ensureBLIndexes(ssn *mgo.Session, currentDB, collName string) { + ssn.DB(currentDB).C(collName).EnsureIndex(mgo.Index{ + Key: []string{"conn"}, + }) + ssn.DB(currentDB).C(collName).EnsureIndex(mgo.Index{ + Key: []string{"uconn"}, + }) + ssn.DB(currentDB).C(collName).EnsureIndex(mgo.Index{ + Key: []string{"total_bytes"}, + }) } From 8e76dca3de8df3633f1b55a97679481cb6bc80fb Mon Sep 17 00:00:00 2001 From: logan lembke Date: Wed, 21 Jun 2017 23:35:59 -0600 Subject: [PATCH 020/117] Test hostnames. Fixed urls and tested them --- analysis/blacklist/blacklist.go | 5 ++++- analysis/blacklist/urls.go | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 8ec2213b..a75324db 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -44,7 +44,10 @@ func BuildBlacklistedCollections(res *database.Resources) { } //set up ritaBL to pull from myIP.ms and MDL - ritaBL.SetLists(lists.NewMyIPmsList(), lists.NewMdlList()) + ritaBL.SetLists( + lists.NewMyIPmsList(), + lists.NewMdlList(), + ) ritaBL.Update() //get our data sources diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 07f593a9..4e524bbc 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -75,7 +75,7 @@ func checkRitaBlacklistURLs(urls *mgo.Iter, blHandle *bl.Blacklist, var holder urlShort for urls.Next(&holder) { //assume http url - buff[i] = prefix + holder.URI + holder.URL + buff[i] = prefix + holder.URL + holder.URI if i == bufferSize-1 { resultsChannel <- blHandle.CheckEntries(list.BlacklistedURLType, buff...) @@ -100,7 +100,7 @@ func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, return errors.New("url does not specify a resource") } host := urlTrimmed[:resourceIdx] - resource := urlTrimmed[resourceIdx+1:] + resource := urlTrimmed[resourceIdx:] urlQuery = bson.M{"url": host, "uri": resource} var blURLFull urls.URL From 6f055e8bd90343007ca755d047dafc117bd67214 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Fri, 23 Jun 2017 15:21:27 -0600 Subject: [PATCH 021/117] Add custom bl's. Alter config file. Fix urls --- analysis/blacklist/blacklist.go | 155 +++++++++++++++++++++++++------- analysis/blacklist/hostnames.go | 12 ++- analysis/blacklist/ips.go | 16 +++- analysis/blacklist/urls.go | 4 +- config/config.go | 51 +++++++---- etc/rita.yaml | 47 ++++++++-- 6 files changed, 216 insertions(+), 69 deletions(-) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index a75324db..30b020b0 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -1,10 +1,16 @@ package blacklist import ( + "io" + "net/http" + "os" + bl "github.com/ocmdev/rita-blacklist2" blDB "github.com/ocmdev/rita-blacklist2/database" + "github.com/ocmdev/rita-blacklist2/list" "github.com/ocmdev/rita-blacklist2/sources/lists" "github.com/ocmdev/rita-blacklist2/sources/rpc" + "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" @@ -31,23 +37,13 @@ func BuildBlacklistedCollections(res *database.Resources) { }, ) - //set up google url checker - googleRPC, err := rpc.NewGoogleSafeBrowsingURLsRPC( - res.System.SafeBrowsing.APIKey, - res.System.SafeBrowsing.Database, - res.Log.Writer(), - ) - if err == nil { - ritaBL.SetRPCs(googleRPC) - } else { - res.Log.Error("could not open up google safebrowsing for blacklist checks") - } + //set up the lists to check against + ritaBL.SetLists(buildBlacklists(res.System)...) - //set up ritaBL to pull from myIP.ms and MDL - ritaBL.SetLists( - lists.NewMyIPmsList(), - lists.NewMdlList(), - ) + //set up remote calls to check against + ritaBL.SetRPCs(buildBlacklistRPCS(res)...) + + //update the lists ritaBL.Update() //get our data sources @@ -56,6 +52,7 @@ func BuildBlacklistedCollections(res *database.Resources) { uniqueSourcesAggregation := getUniqueIPFromUconnPipeline("src") uniqueDestAggregation := getUniqueIPFromUconnPipeline("dst") + uniqueSourceIter := res.DB.AggregateCollection( res.System.StructureConfig.UniqueConnTable, ssn, @@ -66,21 +63,27 @@ func BuildBlacklistedCollections(res *database.Resources) { ssn, uniqueDestAggregation, ) - hostnamesIter := ssn.DB(res.DB.GetSelectedDB()).C( - res.System.DNSConfig.HostnamesTable, - ).Find(nil).Iter() - urlIter := ssn.DB(res.DB.GetSelectedDB()).C( - res.System.UrlsConfig.UrlsTable, - ).Find(nil).Iter() - - bufferSize := 1000 - collections := []string{"bl-sourceIPs", "bl-destIPs", "bl-hostnames", "bl-urls"} + hostnamesIter := ssn.DB(currentDB).C(res.System.DNSConfig.HostnamesTable). + Find(nil).Iter() + urlIter := ssn.DB(currentDB).C(res.System.UrlsConfig.UrlsTable). + Find(nil).Iter() + + //create the collections + sourceIPs := res.System.BlacklistedConfig.SourceIPsTable + destIPs := res.System.BlacklistedConfig.DestIPsTable + hostnames := res.System.BlacklistedConfig.HostnamesTable + urls := res.System.BlacklistedConfig.UrlsTable + + collections := []string{sourceIPs, destIPs, hostnames, urls} for _, collection := range collections { ssn.DB(currentDB).C(collection).Create(&mgo.CollectionInfo{ DisableIdIndex: true, }) } + + //create the data //TODO: refactor these into modules + bufferSize := 1000 buildBlacklistedIPs(uniqueSourceIter, res, ritaBL, bufferSize, true) buildBlacklistedIPs(uniqueDestIter, res, ritaBL, bufferSize, false) @@ -89,22 +92,22 @@ func BuildBlacklistedCollections(res *database.Resources) { buildBlacklistedURLs(urlIter, res, ritaBL, bufferSize, "http://") - ensureBLIndexes(ssn, currentDB, "bl-sourceIPs") - ensureBLIndexes(ssn, currentDB, "bl-destIPs") - ensureBLIndexes(ssn, currentDB, "bl-hostnames") - ensureBLIndexes(ssn, currentDB, "bl-urls") + //index the data + for _, collection := range collections { + ensureBLIndexes(ssn, currentDB, collection) + } - ssn.DB(currentDB).C("bl-sourceIPs").EnsureIndex(mgo.Index{ + ssn.DB(currentDB).C(sourceIPs).EnsureIndex(mgo.Index{ Key: []string{"$hashed:ip"}, }) - ssn.DB(currentDB).C("bl-destIPs").EnsureIndex(mgo.Index{ + ssn.DB(currentDB).C(destIPs).EnsureIndex(mgo.Index{ Key: []string{"$hashed:ip"}, }) - ssn.DB(currentDB).C("bl-hostnames").EnsureIndex(mgo.Index{ + ssn.DB(currentDB).C(hostnames).EnsureIndex(mgo.Index{ Key: []string{"$hashed:hostname"}, }) - ssn.DB(currentDB).C("bl-urls").EnsureIndex(mgo.Index{ + ssn.DB(currentDB).C(urls).EnsureIndex(mgo.Index{ Key: []string{"host", "resource"}, }) @@ -121,3 +124,89 @@ func ensureBLIndexes(ssn *mgo.Session, currentDB, collName string) { Key: []string{"total_bytes"}, }) } + +func buildBlacklists(system *config.SystemConfig) []list.List { + //build up the lists + var blacklists []list.List + //use prebuilt lists + if system.BlacklistedConfig.UseIPms { + blacklists = append(blacklists, lists.NewMyIPmsList()) + } + if system.BlacklistedConfig.UseDNSBH { + blacklists = append(blacklists, lists.NewDNSBHList()) + } + if system.BlacklistedConfig.UseMDL { + blacklists = append(blacklists, lists.NewMdlList()) + } + //use custom lists + ipLists := buildCustomBlacklists( + list.BlacklistedIPType, + system.BlacklistedConfig.IPBlacklists, + ) + + hostLists := buildCustomBlacklists( + list.BlacklistedHostnameType, + system.BlacklistedConfig.HostnameBlacklists, + ) + + urlLists := buildCustomBlacklists( + list.BlacklistedURLType, + system.BlacklistedConfig.URLBlacklists, + ) + blacklists = append(blacklists, ipLists...) + blacklists = append(blacklists, hostLists...) + blacklists = append(blacklists, urlLists...) + return blacklists +} + +func buildCustomBlacklists(entryType list.BlacklistedEntryType, paths []string) []list.List { + var blacklists []list.List + for _, path := range paths { + newList := lists.NewLineSeperatedList( + entryType, + path, + 86400, + tryOpenFileThenURL(path), + ) + blacklists = append(blacklists, newList) + } + return blacklists +} + +//provide a closure over path to read the file into a line separated blacklist +func tryOpenFileThenURL(path string) func() (io.ReadCloser, error) { + return func() (io.ReadCloser, error) { + _, err := os.Stat(path) + if err == nil { + file, err := os.Open(path) + if err != nil { + return nil, err + } + return file, nil + } + resp, err := http.Get(path) + if err != nil { + return nil, err + } + return resp.Body, nil + } +} + +func buildBlacklistRPCS(res *database.Resources) []rpc.RPC { + var rpcs []rpc.RPC + //set up google url checker + if len(res.System.BlacklistedConfig.SafeBrowsing.APIKey) > 0 && + len(res.System.BlacklistedConfig.SafeBrowsing.Database) > 0 { + googleRPC, err := rpc.NewGoogleSafeBrowsingURLsRPC( + res.System.BlacklistedConfig.SafeBrowsing.APIKey, + res.System.BlacklistedConfig.SafeBrowsing.Database, + res.Log.Writer(), + ) + if err == nil { + rpcs = append(rpcs, googleRPC) + } else { + res.Log.Warn("could not open up google safebrowsing for blacklist checks") + } + } + return rpcs +} diff --git a/analysis/blacklist/hostnames.go b/analysis/blacklist/hostnames.go index 5a4034fa..25c5392c 100644 --- a/analysis/blacklist/hostnames.go +++ b/analysis/blacklist/hostnames.go @@ -28,7 +28,9 @@ func buildBlacklistedHostnames(hostnames *mgo.Iter, res *database.Resources, ssn := res.DB.Session.Copy() defer ssn.Close() - outputCollection := ssn.DB(res.DB.GetSelectedDB()).C("bl-hostnames") + outputCollection := ssn.DB(res.DB.GetSelectedDB()).C( + res.System.BlacklistedConfig.HostnamesTable, + ) //create type for communicating rita-bl results resultsChannel := make(resultsChan) @@ -71,7 +73,9 @@ func checkRitaBlacklistHostnames(hostnames *mgo.Iter, blHandle *bl.Blacklist, if i == bufferSize-1 { //see comment in checkRitaBlacklistIPs indexesArray := (*[]string)(unsafe.Pointer(&buff)) - resultsChannel <- blHandle.CheckEntries(list.BlacklistedHostnameType, (*indexesArray)...) + resultsChannel <- blHandle.CheckEntries( + list.BlacklistedHostnameType, (*indexesArray)..., + ) //reset the buffer i = 0 } @@ -81,7 +85,9 @@ func checkRitaBlacklistHostnames(hostnames *mgo.Iter, blHandle *bl.Blacklist, if i != 0 { buffSlice := buff[:i] indexesArray := (*[]string)(unsafe.Pointer(&buffSlice)) - resultsChannel <- blHandle.CheckEntries(list.BlacklistedHostnameType, (*indexesArray)...) + resultsChannel <- blHandle.CheckEntries( + list.BlacklistedHostnameType, (*indexesArray)..., + ) } close(resultsChannel) } diff --git a/analysis/blacklist/ips.go b/analysis/blacklist/ips.go index 6beadf91..127717a2 100644 --- a/analysis/blacklist/ips.go +++ b/analysis/blacklist/ips.go @@ -54,9 +54,13 @@ func buildBlacklistedIPs(ips *mgo.Iter, res *database.Resources, //choose the output collection var outputCollection *mgo.Collection if source { - outputCollection = ssn.DB(res.DB.GetSelectedDB()).C("bl-sourceIPs") + outputCollection = ssn.DB(res.DB.GetSelectedDB()).C( + res.System.BlacklistedConfig.SourceIPsTable, + ) } else { - outputCollection = ssn.DB(res.DB.GetSelectedDB()).C("bl-destIPs") + outputCollection = ssn.DB(res.DB.GetSelectedDB()).C( + res.System.BlacklistedConfig.DestIPsTable, + ) } //create type for communicating rita-bl results @@ -109,7 +113,9 @@ func checkRitaBlacklistIPs(ips *mgo.Iter, blHandle *bl.Blacklist, //reinterpret cast. Then, we can dereference the pointer to the array //and use the variadic syntax to pass the array to CheckEntries. indexesArray := (*[]string)(unsafe.Pointer(&buff)) - resultsChannel <- blHandle.CheckEntries(list.BlacklistedIPType, (*indexesArray)...) + resultsChannel <- blHandle.CheckEntries( + list.BlacklistedIPType, (*indexesArray)..., + ) //reset the buffer i = 0 } @@ -119,7 +125,9 @@ func checkRitaBlacklistIPs(ips *mgo.Iter, blHandle *bl.Blacklist, if i != 0 { buffSlice := buff[:i] indexesArray := (*[]string)(unsafe.Pointer(&buffSlice)) - resultsChannel <- blHandle.CheckEntries(list.BlacklistedIPType, (*indexesArray)...) + resultsChannel <- blHandle.CheckEntries( + list.BlacklistedIPType, (*indexesArray)..., + ) } close(resultsChannel) } diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 4e524bbc..36760c92 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -31,7 +31,9 @@ func buildBlacklistedURLs(urls *mgo.Iter, res *database.Resources, ssn := res.DB.Session.Copy() defer ssn.Close() - outputCollection := ssn.DB(res.DB.GetSelectedDB()).C("bl-urls") + outputCollection := ssn.DB(res.DB.GetSelectedDB()).C( + res.System.BlacklistedConfig.UrlsTable, + ) //create type for communicating rita-bl results resultsChannel := make(resultsChan) diff --git a/config/config.go b/config/config.go index e66918af..4196ad92 100644 --- a/config/config.go +++ b/config/config.go @@ -16,21 +16,20 @@ var VERSION = "undefined" type ( //SystemConfig is the container for other config sections SystemConfig struct { - BatchSize int `yaml:"BatchSize"` - DatabaseHost string `yaml:"DatabaseHost"` - Prefetch float64 `yaml:"Prefetch"` - LogConfig LogCfg `yaml:"LogConfig"` - BlacklistedConfig BlacklistedCfg `yaml:"BlackListed"` - DNSConfig DNSCfg `yaml:"Dns"` - CrossrefConfig CrossrefCfg `yaml:"Crossref"` - ScanningConfig ScanningCfg `yaml:"Scanning"` - StructureConfig StructureCfg `yaml:"Structure"` - BeaconConfig BeaconCfg `yaml:"Beacon"` - UrlsConfig UrlsCfg `yaml:"Urls"` - UserAgentConfig UserAgentCfg `yaml:"UserAgent"` - BroConfig BroCfg `yaml:"Bro"` - SafeBrowsing SafeBrowsingCfg `yaml:"SafeBrowsing"` - MetaTables MetaCfg `yaml:"MetaTables"` + BatchSize int `yaml:"BatchSize"` + DatabaseHost string `yaml:"DatabaseHost"` + Prefetch float64 `yaml:"Prefetch"` + LogConfig LogCfg `yaml:"LogConfig"` + BlacklistedConfig BlacklistedCfg `yaml:"BlackListed"` + DNSConfig DNSCfg `yaml:"Dns"` + CrossrefConfig CrossrefCfg `yaml:"Crossref"` + ScanningConfig ScanningCfg `yaml:"Scanning"` + StructureConfig StructureCfg `yaml:"Structure"` + BeaconConfig BeaconCfg `yaml:"Beacon"` + UrlsConfig UrlsCfg `yaml:"Urls"` + UserAgentConfig UserAgentCfg `yaml:"UserAgent"` + BroConfig BroCfg `yaml:"Bro"` + MetaTables MetaCfg `yaml:"MetaTables"` Version string } @@ -54,10 +53,18 @@ type ( //BlacklistedCfg is used to control the blacklisted analysis module BlacklistedCfg struct { - ThreadCount int `yaml:"ThreadCount"` - ChannelSize int `yaml:"ChannelSize"` - BlacklistTable string `yaml:"BlackListTable"` - BlacklistDatabase string `yaml:"Database"` + BlacklistDatabase string `yaml:"Database"` + UseIPms bool `yaml:"myIP.ms"` + UseDNSBH bool `yaml:"MalwareDomains.com"` + UseMDL bool `yaml:"MalwareDomainList.com"` + SafeBrowsing SafeBrowsingCfg `yaml:"SafeBrowsing"` + IPBlacklists []string `yaml:"CustomIPBlacklists"` + HostnameBlacklists []string `yaml:"CustomHostnameBlacklists"` + URLBlacklists []string `yaml:"CustomURLBlacklists"` + SourceIPsTable string `yaml:"SourceIPsTable"` + DestIPsTable string `yaml:"DestIPsTable"` + HostnamesTable string `yaml:"HostnamesTable"` + UrlsTable string `yaml:"UrlsTable"` } //DNSCfg is used to control the dns analysis module @@ -176,6 +183,12 @@ func expandConfig(reflected reflect.Value) { expandConfig(f) } else if f.Kind() == reflect.String { f.SetString(os.ExpandEnv(f.String())) + } else if f.Kind() == reflect.Slice && f.Type().Elem().Kind() == reflect.String { + strs := f.Interface().([]string) + for i, str := range strs { + strs[i] = os.ExpandEnv(str) + } + f.Set(reflect.ValueOf(strs)) } } } diff --git a/etc/rita.yaml b/etc/rita.yaml index 136f5555..9f315f79 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -56,9 +56,44 @@ Bro: # records unimported if there is an error ImportBuffer: 100000 -SafeBrowsing: - APIKey: "" - Database: $HOME/.rita/safebrowsing +BlackListed: + Database: rita-blacklist + # These are blacklists built into rita-blacklist. Set these to false + # to disable checks against them. + myIP.ms: true + MalwareDomains.com: true + MalwareDomainList.com: true + + # Google SafeBrowsing requires an api key and a file to cache the results in. + # If either APIKey or Database is an empty string, Google SafeBrowsing will + # not be queried. + SafeBrowsing: + APIKey: "" + Database: $HOME/.rita/safebrowsing + + # These are custom blacklists that you may define. They are lists of either + # file paths or urls. These custom blacklists are expected to be simple, + # line separated text documents containing a list of blacklisted entries. + + # Example: CustomIPBlacklists: ["$HOME/.rita/myIPBlacklist.txt"] + # myIPBlacklist.txt would look like this: + # 192.168.0.1 + # 10.10.174.1 + + # Lists containing both IPv4 and IPv6 addresses are acceptable + CustomIPBlacklists: [] + # Lists containing hostnames, domain names, and FQDNs are acceptable + CustomHostnameBlacklists: [] + # URLs must each contain a protocol, a host, and a resource + # Ex: http://google.com/ + # Ex: ftp://myftpserver.com/a/file/over/here.txt + CustomURLBlacklists: [] + + # Table names + SourceIPsTable: blSourceIPs + DestIPsTable: blDestIPs + HostnamesTable: blHostnames + UrlsTable: blUrls # NOTE: DO NOT CHANGE THE SETTINGS BELOW UNLESS YOU ARE FAMILIAR WITH THE CODE # Structure: @@ -68,12 +103,6 @@ Structure: UniqueConnectionTable: uconn HostTable: host -BlackListed: - ThreadCount: 2 - ChannelSize: 1000 - BlackListTable: blacklisted - Database: rita-blacklist - Dns: ExplodedDnsTable: explodedDns HostnamesTable: hostnames From 98a03e95c2a04352e89352698ae9139c4d6ba830 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Fri, 23 Jun 2017 15:43:29 -0600 Subject: [PATCH 022/117] Add which lists produced the results --- analysis/blacklist/blacklist.go | 6 +++++- analysis/blacklist/hostnames.go | 3 +++ analysis/blacklist/ips.go | 3 +++ analysis/blacklist/urls.go | 3 +++ datatypes/blacklist/blacklist.go | 29 ++++++++++++++++------------- 5 files changed, 30 insertions(+), 14 deletions(-) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 30b020b0..699d6e2d 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -110,9 +110,10 @@ func BuildBlacklistedCollections(res *database.Resources) { ssn.DB(currentDB).C(urls).EnsureIndex(mgo.Index{ Key: []string{"host", "resource"}, }) - } +//ensureBLIndexes ensures the sortable fields are indexed +//on the blacklist results func ensureBLIndexes(ssn *mgo.Session, currentDB, collName string) { ssn.DB(currentDB).C(collName).EnsureIndex(mgo.Index{ Key: []string{"conn"}, @@ -125,6 +126,7 @@ func ensureBLIndexes(ssn *mgo.Session, currentDB, collName string) { }) } +//buildBlacklists gathers the blacklists to check against func buildBlacklists(system *config.SystemConfig) []list.List { //build up the lists var blacklists []list.List @@ -159,6 +161,7 @@ func buildBlacklists(system *config.SystemConfig) []list.List { return blacklists } +//buildCustomBlacklists gathers a custom blacklist from a url or file path func buildCustomBlacklists(entryType list.BlacklistedEntryType, paths []string) []list.List { var blacklists []list.List for _, path := range paths { @@ -192,6 +195,7 @@ func tryOpenFileThenURL(path string) func() (io.ReadCloser, error) { } } +//buildBlacklistRPCS gathers the remote procedures to check against func buildBlacklistRPCS(res *database.Resources) []rpc.RPC { var rpcs []rpc.RPC //set up google url checker diff --git a/analysis/blacklist/hostnames.go b/analysis/blacklist/hostnames.go index 25c5392c..0a926ed4 100644 --- a/analysis/blacklist/hostnames.go +++ b/analysis/blacklist/hostnames.go @@ -43,6 +43,9 @@ func buildBlacklistedHostnames(hostnames *mgo.Iter, res *database.Resources, //if the hostname has blacklist results if len(individualResults) > 0 { blHostname := data.BlacklistedHostname{Hostname: hostname} + for _, result := range individualResults { + blHostname.Lists = append(blHostname.Lists, result.List) + } err := fillBlacklistedHostname( &blHostname, res.DB.GetSelectedDB(), diff --git a/analysis/blacklist/ips.go b/analysis/blacklist/ips.go index 127717a2..eaee036c 100644 --- a/analysis/blacklist/ips.go +++ b/analysis/blacklist/ips.go @@ -76,6 +76,9 @@ func buildBlacklistedIPs(ips *mgo.Iter, res *database.Resources, //if the ip address has blacklist results if len(individualResults) > 0 { blIP := data.BlacklistedIP{IP: ipAddr} + for _, result := range individualResults { + blIP.Lists = append(blIP.Lists, result.List) + } err := fillBlacklistedIP( &blIP, res.DB.GetSelectedDB(), diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 36760c92..4d47e8b3 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -46,6 +46,9 @@ func buildBlacklistedURLs(urls *mgo.Iter, res *database.Resources, //if the ip address has blacklist results if len(individualResults) > 0 { blURL := data.BlacklistedURL{} + for _, result := range individualResults { + blURL.Lists = append(blURL.Lists, result.List) + } err := fillBlacklistedURL( &blURL, url, diff --git a/datatypes/blacklist/blacklist.go b/datatypes/blacklist/blacklist.go index 7d64974e..5feebe13 100644 --- a/datatypes/blacklist/blacklist.go +++ b/datatypes/blacklist/blacklist.go @@ -3,27 +3,30 @@ package blacklist //BlacklistedIP holds information on a blacklisted IP address and //the summary statistics on the host type BlacklistedIP struct { - IP string `bson:"ip"` - Connections int `bson:"conn"` - UniqueConnections int `bson:"uconn"` - TotalBytes int `bson:"total_bytes"` + IP string `bson:"ip"` + Connections int `bson:"conn"` + UniqueConnections int `bson:"uconn"` + TotalBytes int `bson:"total_bytes"` + Lists []string `bson:"lists"` } //BlacklistedHostname holds information on a blacklisted hostname and //the summary statistics associated with the hosts behind the hostname type BlacklistedHostname struct { - Hostname string `bson:"hostname"` - Connections int `bson:"conn"` - UniqueConnections int `bson:"uconn"` - TotalBytes int `bson:"total_bytes"` + Hostname string `bson:"hostname"` + Connections int `bson:"conn"` + UniqueConnections int `bson:"uconn"` + TotalBytes int `bson:"total_bytes"` + Lists []string `bson:"lists"` } //BlacklistedURL holds information on a blacklisted URL and the //summary statistics associated with the hosts behind the url type BlacklistedURL struct { - Host string `bson:"host"` - Resource string `bson:"resource"` - Connections int `bson:"conn"` - UniqueConnections int `bson:"uconn"` - TotalBytes int `bson:"total_bytes"` + Host string `bson:"host"` + Resource string `bson:"resource"` + Connections int `bson:"conn"` + UniqueConnections int `bson:"uconn"` + TotalBytes int `bson:"total_bytes"` + Lists []string `bson:"lists"` } From 89d446715be29151b6f49928389101ad97aae873 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Fri, 23 Jun 2017 16:01:14 -0600 Subject: [PATCH 023/117] write initial show-blacklisted --- commands/show-blacklisted.go | 60 ++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 commands/show-blacklisted.go diff --git a/commands/show-blacklisted.go b/commands/show-blacklisted.go new file mode 100644 index 00000000..eb3433e6 --- /dev/null +++ b/commands/show-blacklisted.go @@ -0,0 +1,60 @@ +package commands + +import ( + "github.com/ocmdev/rita/database" + "github.com/urfave/cli" +) + +func init() { + + blSourceIPs := cli.Command{ + Name: "show-bl-source-ips", + Flags: []cli.Flag{ + humanFlag, + configFlag, + }, + Usage: "Print blacklisted IPs which initiated connections", + Action: printBLSourceIPs, + } + + blDestIPs := cli.Command{ + Name: "show-bl-dest-ips", + Usage: "Print blacklisted IPs which recieved connections", + } + + blHostnames := cli.Command{ + Name: "show-bl-hostnames", + Usage: "Print blacklisted hostnames which recieved connections", + } + + blURLs := cli.Command{ + Name: "show-bl-urls", + Usage: "Print blacklisted URLs which were visited", + } + + bootstrapCommands(blSourceIPs, blDestIPs, blHostnames, blURLs) +} + +func printBLSourceIPs(c *cli.Context) error { + res := database.InitResources(c.String("config")) + _ = res + return nil +} + +func printBLDestIPs(c *cli.Context) error { + res := database.InitResources(c.String("config")) + _ = res + return nil +} + +func printBLHostnames(c *cli.Context) error { + res := database.InitResources(c.String("config")) + _ = res + return nil +} + +func printBLURLs(c *cli.Context) error { + res := database.InitResources(c.String("config")) + _ = res + return nil +} From f71ad8190dfdf244abca12e0752fbb861289bb5d Mon Sep 17 00:00:00 2001 From: logan lembke Date: Fri, 23 Jun 2017 18:06:23 -0600 Subject: [PATCH 024/117] Rudimentary printing for all bl types --- commands/show-blacklisted.go | 212 +++++++++++++++++++++++++++++-- datatypes/blacklist/blacklist.go | 3 + 2 files changed, 205 insertions(+), 10 deletions(-) diff --git a/commands/show-blacklisted.go b/commands/show-blacklisted.go index eb3433e6..2c0563d9 100644 --- a/commands/show-blacklisted.go +++ b/commands/show-blacklisted.go @@ -1,16 +1,35 @@ package commands import ( + "fmt" + + "github.com/ocmdev/rita/analysis/dns" "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "github.com/ocmdev/rita/datatypes/urls" "github.com/urfave/cli" + "gopkg.in/mgo.v2/bson" ) func init() { + sortFlag := cli.StringFlag{ + Name: "sort, s", + Usage: "Sort by conn (# of connections), uconn (# of unique connections), total_bytes (# of bytes)", + Value: "conn", + } + connFlag := cli.BoolFlag{ + Name: "connected, C", + Usage: "Show hosts which were connected to this blacklisted entry", + } blSourceIPs := cli.Command{ Name: "show-bl-source-ips", Flags: []cli.Flag{ + databaseFlag, humanFlag, + connFlag, + sortFlag, configFlag, }, Usage: "Print blacklisted IPs which initiated connections", @@ -18,43 +37,216 @@ func init() { } blDestIPs := cli.Command{ - Name: "show-bl-dest-ips", - Usage: "Print blacklisted IPs which recieved connections", + Name: "show-bl-dest-ips", + Flags: []cli.Flag{ + databaseFlag, + humanFlag, + connFlag, + sortFlag, + configFlag, + }, + Usage: "Print blacklisted IPs which recieved connections", + Action: printBLDestIPs, } blHostnames := cli.Command{ - Name: "show-bl-hostnames", - Usage: "Print blacklisted hostnames which recieved connections", + Name: "show-bl-hostnames", + Flags: []cli.Flag{ + databaseFlag, + humanFlag, + connFlag, + sortFlag, + configFlag, + }, + Usage: "Print blacklisted hostnames which recieved connections", + Action: printBLHostnames, } blURLs := cli.Command{ - Name: "show-bl-urls", - Usage: "Print blacklisted URLs which were visited", + Name: "show-bl-urls", + Flags: []cli.Flag{ + databaseFlag, + humanFlag, + connFlag, + sortFlag, + configFlag, + }, + Usage: "Print blacklisted URLs which were visited", + Action: printBLURLs, } bootstrapCommands(blSourceIPs, blDestIPs, blHostnames, blURLs) } +func parseArgs(c *cli.Context) (string, string, bool, error) { + db := c.String("database") + sort := c.String("sort") + connected := c.Bool("connected") + if db == "" { + return db, sort, connected, cli.NewExitError("Specify a database with -d", -1) + } + if sort != "conn" && sort != "uconn" && sort != "total_bytes" { + return db, sort, connected, cli.NewExitError("Invalid option passed to sort flag", -1) + } + return db, sort, connected, nil +} + func printBLSourceIPs(c *cli.Context) error { + db, sort, connected, err := parseArgs(c) + if err != nil { + return err + } res := database.InitResources(c.String("config")) - _ = res + + var blIPs []blacklist.BlacklistedIP + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.SourceIPsTable). + Find(nil).Sort("-" + sort).All(&blIPs) + + if len(blIPs) == 0 { + return cli.NewExitError("No results were found for "+db, -1) + } + + if connected { + for i, ip := range blIPs { + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"src": ip.IP}, + ).All(&connected) + for _, uconn := range connected { + blIPs[i].ConnectedHosts = append(blIPs[i].ConnectedHosts, uconn.Dst) + } + } + } + + for _, entry := range blIPs { + fmt.Println(entry) + } return nil } func printBLDestIPs(c *cli.Context) error { + db, sort, connected, err := parseArgs(c) + if err != nil { + return err + } res := database.InitResources(c.String("config")) - _ = res + + var blIPs []blacklist.BlacklistedIP + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.DestIPsTable). + Find(nil).Sort("-" + sort).All(&blIPs) + + if len(blIPs) == 0 { + return cli.NewExitError("No results were found for "+db, -1) + } + + if connected { + for i, ip := range blIPs { + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip.IP}, + ).All(&connected) + for _, uconn := range connected { + blIPs[i].ConnectedHosts = append(blIPs[i].ConnectedHosts, uconn.Src) + } + } + } + + for _, entry := range blIPs { + fmt.Println(entry) + } return nil } func printBLHostnames(c *cli.Context) error { + db, sort, connected, err := parseArgs(c) + if err != nil { + return err + } res := database.InitResources(c.String("config")) - _ = res + res.DB.SelectDB(db) //so we can use the dns.GetIPsFromHost method + + var blHosts []blacklist.BlacklistedHostname + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.HostnamesTable). + Find(nil).Sort("-" + sort).All(&blHosts) + + if len(blHosts) == 0 { + return cli.NewExitError("No results were found for "+db, -1) + } + + if connected { + //for each blacklisted host + for i, host := range blHosts { + //get the ips associated with the host + ips := dns.GetIPsFromHost(res, host.Hostname) + //and loop over the ips + for _, ip := range ips { + //then find all of the hosts which talked to the ip + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip}, + ).All(&connected) + //and aggregate the source ip addresses + for _, uconn := range connected { + blHosts[i].ConnectedHosts = append(blHosts[i].ConnectedHosts, uconn.Src) + } + } + } + } + + for _, entry := range blHosts { + fmt.Println(entry) + } return nil } func printBLURLs(c *cli.Context) error { + db, sort, connected, err := parseArgs(c) + if err != nil { + return err + } res := database.InitResources(c.String("config")) - _ = res + + var blURLs []blacklist.BlacklistedURL + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.UrlsTable). + Find(nil).Sort("-" + sort).All(&blURLs) + + if len(blURLs) == 0 { + return cli.NewExitError("No results were found for "+db, -1) + } + + if connected { + //for each blacklisted url + for i, blURL := range blURLs { + //get the ips associated with the url + var urlEntry urls.URL + res.DB.Session.DB(db).C(res.System.UrlsConfig.UrlsTable). + Find(bson.M{"url": blURL.Host, "uri": blURL.Resource}).One(&urlEntry) + ips := urlEntry.IPs + //and loop over the ips + for _, ip := range ips { + //then find all of the hosts which talked to the ip + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip}, + ).All(&connected) + //and aggregate the source ip addresses + for _, uconn := range connected { + blURLs[i].ConnectedHosts = append(blURLs[i].ConnectedHosts, uconn.Src) + } + } + } + } + + for _, entry := range blURLs { + fmt.Println(entry) + } return nil } diff --git a/datatypes/blacklist/blacklist.go b/datatypes/blacklist/blacklist.go index 5feebe13..aa89dd31 100644 --- a/datatypes/blacklist/blacklist.go +++ b/datatypes/blacklist/blacklist.go @@ -8,6 +8,7 @@ type BlacklistedIP struct { UniqueConnections int `bson:"uconn"` TotalBytes int `bson:"total_bytes"` Lists []string `bson:"lists"` + ConnectedHosts []string `bson:",omitempty"` } //BlacklistedHostname holds information on a blacklisted hostname and @@ -18,6 +19,7 @@ type BlacklistedHostname struct { UniqueConnections int `bson:"uconn"` TotalBytes int `bson:"total_bytes"` Lists []string `bson:"lists"` + ConnectedHosts []string `bson:",omitempty"` } //BlacklistedURL holds information on a blacklisted URL and the @@ -29,4 +31,5 @@ type BlacklistedURL struct { UniqueConnections int `bson:"uconn"` TotalBytes int `bson:"total_bytes"` Lists []string `bson:"lists"` + ConnectedHosts []string `bson:",omitempty"` } From 2536c981d30d844700603f4ae19f44e097e42db6 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Mon, 26 Jun 2017 12:44:35 -0600 Subject: [PATCH 025/117] Console output finished --- commands/commands.go | 11 ++ commands/show-bl-hostname.go | 141 ++++++++++++++++++++ commands/show-bl-ip.go | 204 ++++++++++++++++++++++++++++ commands/show-bl-url.go | 142 ++++++++++++++++++++ commands/show-blacklisted.go | 252 ----------------------------------- commands/show-explodedDns.go | 8 +- 6 files changed, 502 insertions(+), 256 deletions(-) create mode 100644 commands/show-bl-hostname.go create mode 100644 commands/show-bl-ip.go create mode 100644 commands/show-bl-url.go delete mode 100644 commands/show-blacklisted.go diff --git a/commands/commands.go b/commands/commands.go index 7e380141..d8903b35 100644 --- a/commands/commands.go +++ b/commands/commands.go @@ -38,6 +38,17 @@ var ( Name: "human-readable, H", Usage: "print a report instead of csv", } + + blSortFlag = cli.StringFlag{ + Name: "sort, s", + Usage: "Sort by conn (# of connections), uconn (# of unique connections), total_bytes (# of bytes)", + Value: "conn", + } + + blConnFlag = cli.BoolFlag{ + Name: "connected, C", + Usage: "Show hosts which were connected to this blacklisted entry", + } ) // bootstrapCommands simply adds a given command to the allCommands array diff --git a/commands/show-bl-hostname.go b/commands/show-bl-hostname.go new file mode 100644 index 00000000..3a1df4bf --- /dev/null +++ b/commands/show-bl-hostname.go @@ -0,0 +1,141 @@ +package commands + +import ( + "fmt" + "html/template" + "os" + "sort" + "strconv" + "strings" + + "github.com/ocmdev/rita/analysis/dns" + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "github.com/olekukonko/tablewriter" + "github.com/urfave/cli" + "gopkg.in/mgo.v2/bson" +) + +func init() { + + blHostnames := cli.Command{ + Name: "show-bl-hostnames", + Flags: []cli.Flag{ + databaseFlag, + humanFlag, + blConnFlag, + blSortFlag, + configFlag, + }, + Usage: "Print blacklisted hostnames which recieved connections", + Action: printBLHostnames, + } + + bootstrapCommands(blHostnames) +} + +func printBLHostnames(c *cli.Context) error { + db, sort, connected, human, err := parseBLArgs(c) + if err != nil { + return err + } + res := database.InitResources(c.String("config")) + res.DB.SelectDB(db) //so we can use the dns.GetIPsFromHost method + + var blHosts []blacklist.BlacklistedHostname + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.HostnamesTable). + Find(nil).Sort("-" + sort).All(&blHosts) + + if len(blHosts) == 0 { + return cli.NewExitError("No results were found for "+db, -1) + } + + if connected { + //for each blacklisted host + for i, host := range blHosts { + //get the ips associated with the host + ips := dns.GetIPsFromHost(res, host.Hostname) + //and loop over the ips + for _, ip := range ips { + //then find all of the hosts which talked to the ip + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip}, + ).All(&connected) + //and aggregate the source ip addresses + for _, uconn := range connected { + blHosts[i].ConnectedHosts = append(blHosts[i].ConnectedHosts, uconn.Src) + } + } + } + } + + if human { + err = showBLHostnamesHuman(blHosts, connected) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + } else { + err = showBLHostnames(blHosts, connected) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + } + + return nil +} + +func showBLHostnames(hostnames []blacklist.BlacklistedHostname, connectedHosts bool) error { + tmpl := "{{.Hostname}},{{.Connections}},{{.UniqueConnections}},{{.TotalBytes}}," + tmpl += "{{range $idx, $list := .Lists}}{{if $idx}} {{end}}{{ $list }}{{end}}" + if connectedHosts { + tmpl += ",{{range $idx, $host := .ConnectedHosts}}{{if $idx}} {{end}}{{ $host }}{{end}}" + } + tmpl += "\r\n" + + out, err := template.New("blhostname").Parse(tmpl) + if err != nil { + return err + } + + for _, hostname := range hostnames { + sort.Strings(hostname.Lists) + if connectedHosts { + sort.Strings(hostname.ConnectedHosts) + } + err := out.Execute(os.Stdout, hostname) + if err != nil { + fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) + } + } + return nil +} + +func showBLHostnamesHuman(hostnames []blacklist.BlacklistedHostname, connectedHosts bool) error { + table := tablewriter.NewWriter(os.Stdout) + headers := []string{"Hostname", "Connections", "Unique Connections", "Total Bytes", "Lists"} + if connectedHosts { + headers = append(headers, "Sources") + } + table.SetHeader(headers) + for _, hostname := range hostnames { + sort.Strings(hostname.Lists) + serialized := []string{ + hostname.Hostname, + strconv.Itoa(hostname.Connections), + strconv.Itoa(hostname.UniqueConnections), + strconv.Itoa(hostname.TotalBytes), + strings.Join(hostname.Lists, " "), + } + if connectedHosts { + sort.Strings(hostname.ConnectedHosts) + serialized = append(serialized, strings.Join(hostname.ConnectedHosts, " ")) + } + table.Append(serialized) + } + table.Render() + return nil +} diff --git a/commands/show-bl-ip.go b/commands/show-bl-ip.go new file mode 100644 index 00000000..dc4e7ae0 --- /dev/null +++ b/commands/show-bl-ip.go @@ -0,0 +1,204 @@ +package commands + +import ( + "fmt" + "html/template" + "os" + "sort" + "strconv" + "strings" + + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "github.com/olekukonko/tablewriter" + "github.com/urfave/cli" + "gopkg.in/mgo.v2/bson" +) + +func init() { + blSourceIPs := cli.Command{ + Name: "show-bl-source-ips", + Flags: []cli.Flag{ + databaseFlag, + humanFlag, + blConnFlag, + blSortFlag, + configFlag, + }, + Usage: "Print blacklisted IPs which initiated connections", + Action: printBLSourceIPs, + } + + blDestIPs := cli.Command{ + Name: "show-bl-dest-ips", + Flags: []cli.Flag{ + databaseFlag, + humanFlag, + blConnFlag, + blSortFlag, + configFlag, + }, + Usage: "Print blacklisted IPs which recieved connections", + Action: printBLDestIPs, + } + + bootstrapCommands(blSourceIPs, blDestIPs) +} + +func parseBLArgs(c *cli.Context) (string, string, bool, bool, error) { + db := c.String("database") + sort := c.String("sort") + connected := c.Bool("connected") + human := c.Bool("human-readable") + if db == "" { + return db, sort, connected, human, cli.NewExitError("Specify a database with -d", -1) + } + if sort != "conn" && sort != "uconn" && sort != "total_bytes" { + return db, sort, connected, human, cli.NewExitError("Invalid option passed to sort flag", -1) + } + return db, sort, connected, human, nil +} + +func printBLSourceIPs(c *cli.Context) error { + db, sort, connected, human, err := parseBLArgs(c) + if err != nil { + return err + } + res := database.InitResources(c.String("config")) + + var blIPs []blacklist.BlacklistedIP + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.SourceIPsTable). + Find(nil).Sort("-" + sort).All(&blIPs) + + if len(blIPs) == 0 { + return cli.NewExitError("No results were found for "+db, -1) + } + + if connected { + for i, ip := range blIPs { + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"src": ip.IP}, + ).All(&connected) + for _, uconn := range connected { + blIPs[i].ConnectedHosts = append(blIPs[i].ConnectedHosts, uconn.Dst) + } + } + } + + if human { + err = showBLIPsHuman(blIPs, connected, true) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + } else { + err = showBLIPs(blIPs, connected, true) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + } + return nil +} + +func printBLDestIPs(c *cli.Context) error { + db, sort, connected, human, err := parseBLArgs(c) + if err != nil { + return err + } + res := database.InitResources(c.String("config")) + + var blIPs []blacklist.BlacklistedIP + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.DestIPsTable). + Find(nil).Sort("-" + sort).All(&blIPs) + + if len(blIPs) == 0 { + return cli.NewExitError("No results were found for "+db, -1) + } + + if connected { + for i, ip := range blIPs { + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip.IP}, + ).All(&connected) + for _, uconn := range connected { + blIPs[i].ConnectedHosts = append(blIPs[i].ConnectedHosts, uconn.Src) + } + } + } + + if human { + err = showBLIPsHuman(blIPs, connected, false) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + } else { + err = showBLIPs(blIPs, connected, false) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + } + return nil +} + +func showBLIPs(ips []blacklist.BlacklistedIP, connectedHosts, source bool) error { + //source is unused until we add column headers + tmpl := "{{.IP}},{{.Connections}},{{.UniqueConnections}},{{.TotalBytes}}," + tmpl += "{{range $idx, $list := .Lists}}{{if $idx}} {{end}}{{ $list }}{{end}}" + if connectedHosts { + tmpl += ",{{range $idx, $host := .ConnectedHosts}}{{if $idx}} {{end}}{{ $host }}{{end}}" + } + tmpl += "\r\n" + + out, err := template.New("blip").Parse(tmpl) + if err != nil { + return err + } + + for _, ip := range ips { + sort.Strings(ip.Lists) + if connectedHosts { + sort.Strings(ip.ConnectedHosts) + } + err := out.Execute(os.Stdout, ip) + if err != nil { + fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) + } + } + return nil +} + +func showBLIPsHuman(ips []blacklist.BlacklistedIP, connectedHosts, source bool) error { + table := tablewriter.NewWriter(os.Stdout) + headers := []string{"IP", "Connections", "Unique Connections", "Total Bytes", "Lists"} + if connectedHosts { + if source { + headers = append(headers, "Destinations") + } else { + headers = append(headers, "Sources") + } + } + table.SetHeader(headers) + for _, ip := range ips { + sort.Strings(ip.Lists) + serialized := []string{ + ip.IP, + strconv.Itoa(ip.Connections), + strconv.Itoa(ip.UniqueConnections), + strconv.Itoa(ip.TotalBytes), + strings.Join(ip.Lists, " "), + } + if connectedHosts { + sort.Strings(ip.ConnectedHosts) + serialized = append(serialized, strings.Join(ip.ConnectedHosts, " ")) + } + table.Append(serialized) + } + table.Render() + return nil +} diff --git a/commands/show-bl-url.go b/commands/show-bl-url.go new file mode 100644 index 00000000..056d2998 --- /dev/null +++ b/commands/show-bl-url.go @@ -0,0 +1,142 @@ +package commands + +import ( + "fmt" + "html/template" + "os" + "sort" + "strconv" + "strings" + + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "github.com/ocmdev/rita/datatypes/urls" + "github.com/olekukonko/tablewriter" + "github.com/urfave/cli" + "gopkg.in/mgo.v2/bson" +) + +func init() { + blURLs := cli.Command{ + Name: "show-bl-urls", + Flags: []cli.Flag{ + databaseFlag, + humanFlag, + blConnFlag, + blSortFlag, + configFlag, + }, + Usage: "Print blacklisted URLs which were visited", + Action: printBLURLs, + } + + bootstrapCommands(blURLs) +} + +func printBLURLs(c *cli.Context) error { + db, sort, connected, human, err := parseBLArgs(c) + if err != nil { + return err + } + res := database.InitResources(c.String("config")) + + var blURLs []blacklist.BlacklistedURL + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.UrlsTable). + Find(nil).Sort("-" + sort).All(&blURLs) + + if len(blURLs) == 0 { + return cli.NewExitError("No results were found for "+db, -1) + } + + if connected { + //for each blacklisted url + for i, blURL := range blURLs { + //get the ips associated with the url + var urlEntry urls.URL + res.DB.Session.DB(db).C(res.System.UrlsConfig.UrlsTable). + Find(bson.M{"url": blURL.Host, "uri": blURL.Resource}).One(&urlEntry) + ips := urlEntry.IPs + //and loop over the ips + for _, ip := range ips { + //then find all of the hosts which talked to the ip + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip}, + ).All(&connected) + //and aggregate the source ip addresses + for _, uconn := range connected { + blURLs[i].ConnectedHosts = append(blURLs[i].ConnectedHosts, uconn.Src) + } + } + } + } + if human { + err = showBLURLsHuman(blURLs, connected) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + } else { + err = showBLURLs(blURLs, connected) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + } + + return nil +} + +func showBLURLs(urls []blacklist.BlacklistedURL, connectedHosts bool) error { + tmpl := "{{.Host}},{{.Resource}},{{.Connections}},{{.UniqueConnections}},{{.TotalBytes}}," + tmpl += "{{range $idx, $list := .Lists}}{{if $idx}} {{end}}{{ $list }}{{end}}" + if connectedHosts { + tmpl += ",{{range $idx, $url := .ConnectedHosts}}{{if $idx}} {{end}}{{ $url }}{{end}}" + } + tmpl += "\r\n" + + out, err := template.New("blurl").Parse(tmpl) + if err != nil { + return err + } + + for _, url := range urls { + sort.Strings(url.Lists) + if connectedHosts { + sort.Strings(url.ConnectedHosts) + } + err := out.Execute(os.Stdout, url) + if err != nil { + fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) + } + } + return nil +} + +func showBLURLsHuman(urls []blacklist.BlacklistedURL, connectedHosts bool) error { + table := tablewriter.NewWriter(os.Stdout) + headers := []string{"Host", "Resource", "Connections", "Unique Connections", "Total Bytes", "Lists"} + if connectedHosts { + headers = append(headers, "Sources") + } + table.SetHeader(headers) + for _, url := range urls { + sort.Strings(url.Lists) + serialized := []string{ + url.Host, + url.Resource, + strconv.Itoa(url.Connections), + strconv.Itoa(url.UniqueConnections), + strconv.Itoa(url.TotalBytes), + strings.Join(url.Lists, " "), + } + if connectedHosts { + sort.Strings(url.ConnectedHosts) + serialized = append(serialized, strings.Join(url.ConnectedHosts, " ")) + } + table.Append(serialized) + } + table.Render() + return nil +} diff --git a/commands/show-blacklisted.go b/commands/show-blacklisted.go deleted file mode 100644 index 2c0563d9..00000000 --- a/commands/show-blacklisted.go +++ /dev/null @@ -1,252 +0,0 @@ -package commands - -import ( - "fmt" - - "github.com/ocmdev/rita/analysis/dns" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" - "github.com/ocmdev/rita/datatypes/urls" - "github.com/urfave/cli" - "gopkg.in/mgo.v2/bson" -) - -func init() { - sortFlag := cli.StringFlag{ - Name: "sort, s", - Usage: "Sort by conn (# of connections), uconn (# of unique connections), total_bytes (# of bytes)", - Value: "conn", - } - connFlag := cli.BoolFlag{ - Name: "connected, C", - Usage: "Show hosts which were connected to this blacklisted entry", - } - - blSourceIPs := cli.Command{ - Name: "show-bl-source-ips", - Flags: []cli.Flag{ - databaseFlag, - humanFlag, - connFlag, - sortFlag, - configFlag, - }, - Usage: "Print blacklisted IPs which initiated connections", - Action: printBLSourceIPs, - } - - blDestIPs := cli.Command{ - Name: "show-bl-dest-ips", - Flags: []cli.Flag{ - databaseFlag, - humanFlag, - connFlag, - sortFlag, - configFlag, - }, - Usage: "Print blacklisted IPs which recieved connections", - Action: printBLDestIPs, - } - - blHostnames := cli.Command{ - Name: "show-bl-hostnames", - Flags: []cli.Flag{ - databaseFlag, - humanFlag, - connFlag, - sortFlag, - configFlag, - }, - Usage: "Print blacklisted hostnames which recieved connections", - Action: printBLHostnames, - } - - blURLs := cli.Command{ - Name: "show-bl-urls", - Flags: []cli.Flag{ - databaseFlag, - humanFlag, - connFlag, - sortFlag, - configFlag, - }, - Usage: "Print blacklisted URLs which were visited", - Action: printBLURLs, - } - - bootstrapCommands(blSourceIPs, blDestIPs, blHostnames, blURLs) -} - -func parseArgs(c *cli.Context) (string, string, bool, error) { - db := c.String("database") - sort := c.String("sort") - connected := c.Bool("connected") - if db == "" { - return db, sort, connected, cli.NewExitError("Specify a database with -d", -1) - } - if sort != "conn" && sort != "uconn" && sort != "total_bytes" { - return db, sort, connected, cli.NewExitError("Invalid option passed to sort flag", -1) - } - return db, sort, connected, nil -} - -func printBLSourceIPs(c *cli.Context) error { - db, sort, connected, err := parseArgs(c) - if err != nil { - return err - } - res := database.InitResources(c.String("config")) - - var blIPs []blacklist.BlacklistedIP - res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.SourceIPsTable). - Find(nil).Sort("-" + sort).All(&blIPs) - - if len(blIPs) == 0 { - return cli.NewExitError("No results were found for "+db, -1) - } - - if connected { - for i, ip := range blIPs { - var connected []structure.UniqueConnection - res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( - bson.M{"src": ip.IP}, - ).All(&connected) - for _, uconn := range connected { - blIPs[i].ConnectedHosts = append(blIPs[i].ConnectedHosts, uconn.Dst) - } - } - } - - for _, entry := range blIPs { - fmt.Println(entry) - } - return nil -} - -func printBLDestIPs(c *cli.Context) error { - db, sort, connected, err := parseArgs(c) - if err != nil { - return err - } - res := database.InitResources(c.String("config")) - - var blIPs []blacklist.BlacklistedIP - res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.DestIPsTable). - Find(nil).Sort("-" + sort).All(&blIPs) - - if len(blIPs) == 0 { - return cli.NewExitError("No results were found for "+db, -1) - } - - if connected { - for i, ip := range blIPs { - var connected []structure.UniqueConnection - res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( - bson.M{"dst": ip.IP}, - ).All(&connected) - for _, uconn := range connected { - blIPs[i].ConnectedHosts = append(blIPs[i].ConnectedHosts, uconn.Src) - } - } - } - - for _, entry := range blIPs { - fmt.Println(entry) - } - return nil -} - -func printBLHostnames(c *cli.Context) error { - db, sort, connected, err := parseArgs(c) - if err != nil { - return err - } - res := database.InitResources(c.String("config")) - res.DB.SelectDB(db) //so we can use the dns.GetIPsFromHost method - - var blHosts []blacklist.BlacklistedHostname - res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.HostnamesTable). - Find(nil).Sort("-" + sort).All(&blHosts) - - if len(blHosts) == 0 { - return cli.NewExitError("No results were found for "+db, -1) - } - - if connected { - //for each blacklisted host - for i, host := range blHosts { - //get the ips associated with the host - ips := dns.GetIPsFromHost(res, host.Hostname) - //and loop over the ips - for _, ip := range ips { - //then find all of the hosts which talked to the ip - var connected []structure.UniqueConnection - res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( - bson.M{"dst": ip}, - ).All(&connected) - //and aggregate the source ip addresses - for _, uconn := range connected { - blHosts[i].ConnectedHosts = append(blHosts[i].ConnectedHosts, uconn.Src) - } - } - } - } - - for _, entry := range blHosts { - fmt.Println(entry) - } - return nil -} - -func printBLURLs(c *cli.Context) error { - db, sort, connected, err := parseArgs(c) - if err != nil { - return err - } - res := database.InitResources(c.String("config")) - - var blURLs []blacklist.BlacklistedURL - res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.UrlsTable). - Find(nil).Sort("-" + sort).All(&blURLs) - - if len(blURLs) == 0 { - return cli.NewExitError("No results were found for "+db, -1) - } - - if connected { - //for each blacklisted url - for i, blURL := range blURLs { - //get the ips associated with the url - var urlEntry urls.URL - res.DB.Session.DB(db).C(res.System.UrlsConfig.UrlsTable). - Find(bson.M{"url": blURL.Host, "uri": blURL.Resource}).One(&urlEntry) - ips := urlEntry.IPs - //and loop over the ips - for _, ip := range ips { - //then find all of the hosts which talked to the ip - var connected []structure.UniqueConnection - res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( - bson.M{"dst": ip}, - ).All(&connected) - //and aggregate the source ip addresses - for _, uconn := range connected { - blURLs[i].ConnectedHosts = append(blURLs[i].ConnectedHosts, uconn.Src) - } - } - } - } - - for _, entry := range blURLs { - fmt.Println(entry) - } - return nil -} diff --git a/commands/show-explodedDns.go b/commands/show-explodedDns.go index 11f51fdd..7fe2902e 100644 --- a/commands/show-explodedDns.go +++ b/commands/show-explodedDns.go @@ -39,12 +39,12 @@ func init() { } if c.Bool("human-readable") { - err := showResultsHuman(explodedResults) + err := showDNSResultsHuman(explodedResults) if err != nil { return cli.NewExitError(err.Error(), -1) } } - err := showResults(explodedResults) + err := showDNSResults(explodedResults) if err != nil { return cli.NewExitError(err.Error(), -1) } @@ -54,7 +54,7 @@ func init() { bootstrapCommands(command) } -func showResults(dnsResults []dns.ExplodedDNS) error { +func showDNSResults(dnsResults []dns.ExplodedDNS) error { tmpl := "{{.Domain}},{{.Subdomains}},{{.Visited}}\n" out, err := template.New("exploded-dns").Parse(tmpl) @@ -71,7 +71,7 @@ func showResults(dnsResults []dns.ExplodedDNS) error { return nil } -func showResultsHuman(dnsResults []dns.ExplodedDNS) error { +func showDNSResultsHuman(dnsResults []dns.ExplodedDNS) error { table := tablewriter.NewWriter(os.Stdout) table.SetHeader([]string{"Domain", "Unique Subdomains", "Times Looked Up"}) for _, result := range dnsResults { From 0f149011354ab227f3cfb83ba52fd7834ff8a123 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Mon, 26 Jun 2017 13:43:31 -0600 Subject: [PATCH 026/117] Added html reporting for new blacklist --- commands/show-bl-hostname.go | 4 +- commands/show-bl-ip.go | 7 ++- commands/show-bl-url.go | 4 +- reporting/report-bl-dest-ips.go | 49 +++++++++++++++++ reporting/report-bl-hostnames.go | 85 +++++++++++++++++++++++++++++ reporting/report-bl-source-ips.go | 76 ++++++++++++++++++++++++++ reporting/report-bl-urls.go | 88 +++++++++++++++++++++++++++++++ reporting/report.go | 17 ++++++ reporting/templates/templates.go | 49 ++++++++++++++--- 9 files changed, 365 insertions(+), 14 deletions(-) create mode 100644 reporting/report-bl-dest-ips.go create mode 100644 reporting/report-bl-hostnames.go create mode 100644 reporting/report-bl-source-ips.go create mode 100644 reporting/report-bl-urls.go diff --git a/commands/show-bl-hostname.go b/commands/show-bl-hostname.go index 3a1df4bf..7f80c350 100644 --- a/commands/show-bl-hostname.go +++ b/commands/show-bl-hostname.go @@ -90,11 +90,11 @@ func printBLHostnames(c *cli.Context) error { func showBLHostnames(hostnames []blacklist.BlacklistedHostname, connectedHosts bool) error { tmpl := "{{.Hostname}},{{.Connections}},{{.UniqueConnections}},{{.TotalBytes}}," - tmpl += "{{range $idx, $list := .Lists}}{{if $idx}} {{end}}{{ $list }}{{end}}" + tmpl += blacklistListsTemplate if connectedHosts { tmpl += ",{{range $idx, $host := .ConnectedHosts}}{{if $idx}} {{end}}{{ $host }}{{end}}" } - tmpl += "\r\n" + tmpl += endl out, err := template.New("blhostname").Parse(tmpl) if err != nil { diff --git a/commands/show-bl-ip.go b/commands/show-bl-ip.go index dc4e7ae0..55deb89c 100644 --- a/commands/show-bl-ip.go +++ b/commands/show-bl-ip.go @@ -16,6 +16,9 @@ import ( "gopkg.in/mgo.v2/bson" ) +const blacklistListsTemplate = "{{range $idx, $list := .Lists}}{{if $idx}} {{end}}{{ $list }}{{end}}" +const endl = "\r\n" + func init() { blSourceIPs := cli.Command{ Name: "show-bl-source-ips", @@ -149,11 +152,11 @@ func printBLDestIPs(c *cli.Context) error { func showBLIPs(ips []blacklist.BlacklistedIP, connectedHosts, source bool) error { //source is unused until we add column headers tmpl := "{{.IP}},{{.Connections}},{{.UniqueConnections}},{{.TotalBytes}}," - tmpl += "{{range $idx, $list := .Lists}}{{if $idx}} {{end}}{{ $list }}{{end}}" + tmpl += blacklistListsTemplate if connectedHosts { tmpl += ",{{range $idx, $host := .ConnectedHosts}}{{if $idx}} {{end}}{{ $host }}{{end}}" } - tmpl += "\r\n" + tmpl += endl out, err := template.New("blip").Parse(tmpl) if err != nil { diff --git a/commands/show-bl-url.go b/commands/show-bl-url.go index 056d2998..c3fc692e 100644 --- a/commands/show-bl-url.go +++ b/commands/show-bl-url.go @@ -90,11 +90,11 @@ func printBLURLs(c *cli.Context) error { func showBLURLs(urls []blacklist.BlacklistedURL, connectedHosts bool) error { tmpl := "{{.Host}},{{.Resource}},{{.Connections}},{{.UniqueConnections}},{{.TotalBytes}}," - tmpl += "{{range $idx, $list := .Lists}}{{if $idx}} {{end}}{{ $list }}{{end}}" + tmpl += blacklistListsTemplate if connectedHosts { tmpl += ",{{range $idx, $url := .ConnectedHosts}}{{if $idx}} {{end}}{{ $url }}{{end}}" } - tmpl += "\r\n" + tmpl += endl out, err := template.New("blurl").Parse(tmpl) if err != nil { diff --git a/reporting/report-bl-dest-ips.go b/reporting/report-bl-dest-ips.go new file mode 100644 index 00000000..2d28101e --- /dev/null +++ b/reporting/report-bl-dest-ips.go @@ -0,0 +1,49 @@ +package reporting + +import ( + "html/template" + "os" + + "gopkg.in/mgo.v2/bson" + + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "github.com/ocmdev/rita/reporting/templates" +) + +func printBLDestIPs(db string, res *database.Resources) error { + f, err := os.Create("bl-dest-ips.html") + if err != nil { + return err + } + defer f.Close() + + var blIPs []blacklist.BlacklistedIP + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.DestIPsTable). + Find(nil).Sort("-conn").All(&blIPs) + + for i, ip := range blIPs { + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip.IP}, + ).All(&connected) + for _, uconn := range connected { + blIPs[i].ConnectedHosts = append(blIPs[i].ConnectedHosts, uconn.Src) + } + } + + out, err := template.New("bl-dest-ips.html").Parse(templates.BLDestIPTempl) + if err != nil { + return err + } + + w, err := getBLIPWriter(blIPs) + if err != nil { + return err + } + + return out.Execute(f, &templates.ReportingInfo{DB: db, Writer: template.HTML(w)}) +} diff --git a/reporting/report-bl-hostnames.go b/reporting/report-bl-hostnames.go new file mode 100644 index 00000000..3680e1b7 --- /dev/null +++ b/reporting/report-bl-hostnames.go @@ -0,0 +1,85 @@ +package reporting + +import ( + "bytes" + "html/template" + "os" + "sort" + + "gopkg.in/mgo.v2/bson" + + "github.com/ocmdev/rita/analysis/dns" + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "github.com/ocmdev/rita/reporting/templates" +) + +func printBLHostnames(db string, res *database.Resources) error { + f, err := os.Create("bl-hostnames.html") + if err != nil { + return err + } + defer f.Close() + + var blHosts []blacklist.BlacklistedHostname + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.HostnamesTable). + Find(nil).Sort("-conn").All(&blHosts) + + //for each blacklisted host + for i, host := range blHosts { + //get the ips associated with the host + ips := dns.GetIPsFromHost(res, host.Hostname) + //and loop over the ips + for _, ip := range ips { + //then find all of the hosts which talked to the ip + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip}, + ).All(&connected) + //and aggregate the source ip addresses + for _, uconn := range connected { + blHosts[i].ConnectedHosts = append(blHosts[i].ConnectedHosts, uconn.Src) + } + } + } + + out, err := template.New("bl-hostnames.html").Parse(templates.BLHostnameTempl) + if err != nil { + return err + } + + w, err := getBLHostnameWriter(blHosts) + if err != nil { + return err + } + + return out.Execute(f, &templates.ReportingInfo{DB: db, Writer: template.HTML(w)}) +} + +func getBLHostnameWriter(results []blacklist.BlacklistedHostname) (string, error) { + tmpl := "" + + "" + + "" + + "" + + "\n" + + out, err := template.New("blhostname").Parse(tmpl) + if err != nil { + return "", err + } + + w := new(bytes.Buffer) + + for _, result := range results { + sort.Strings(result.Lists) + sort.Strings(result.ConnectedHosts) + err := out.Execute(w, result) + if err != nil { + return "", err + } + } + return w.String(), nil +} diff --git a/reporting/report-bl-source-ips.go b/reporting/report-bl-source-ips.go new file mode 100644 index 00000000..6aa79a16 --- /dev/null +++ b/reporting/report-bl-source-ips.go @@ -0,0 +1,76 @@ +package reporting + +import ( + "bytes" + "html/template" + "os" + "sort" + + "gopkg.in/mgo.v2/bson" + + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "github.com/ocmdev/rita/reporting/templates" +) + +func printBLSourceIPs(db string, res *database.Resources) error { + f, err := os.Create("bl-source-ips.html") + if err != nil { + return err + } + defer f.Close() + + var blIPs []blacklist.BlacklistedIP + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.SourceIPsTable). + Find(nil).Sort("-conn").All(&blIPs) + + for i, ip := range blIPs { + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"src": ip.IP}, + ).All(&connected) + for _, uconn := range connected { + blIPs[i].ConnectedHosts = append(blIPs[i].ConnectedHosts, uconn.Dst) + } + } + + out, err := template.New("bl-source-ips.html").Parse(templates.BLSourceIPTempl) + if err != nil { + return err + } + + w, err := getBLIPWriter(blIPs) + if err != nil { + return err + } + + return out.Execute(f, &templates.ReportingInfo{DB: db, Writer: template.HTML(w)}) +} + +func getBLIPWriter(results []blacklist.BlacklistedIP) (string, error) { + tmpl := "" + + "" + + "" + + "" + + "\n" + + out, err := template.New("blip").Parse(tmpl) + if err != nil { + return "", err + } + + w := new(bytes.Buffer) + + for _, result := range results { + sort.Strings(result.Lists) + sort.Strings(result.ConnectedHosts) + err := out.Execute(w, result) + if err != nil { + return "", err + } + } + return w.String(), nil +} diff --git a/reporting/report-bl-urls.go b/reporting/report-bl-urls.go new file mode 100644 index 00000000..532a41d6 --- /dev/null +++ b/reporting/report-bl-urls.go @@ -0,0 +1,88 @@ +package reporting + +import ( + "bytes" + "html/template" + "os" + "sort" + + "gopkg.in/mgo.v2/bson" + + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "github.com/ocmdev/rita/datatypes/urls" + "github.com/ocmdev/rita/reporting/templates" +) + +func printBLURLs(db string, res *database.Resources) error { + f, err := os.Create("bl-urls.html") + if err != nil { + return err + } + defer f.Close() + + var blURLs []blacklist.BlacklistedURL + res.DB.Session.DB(db). + C(res.System.BlacklistedConfig.UrlsTable). + Find(nil).Sort("-conn").All(&blURLs) + + //for each blacklisted url + for i, blURL := range blURLs { + //get the ips associated with the url + var urlEntry urls.URL + res.DB.Session.DB(db).C(res.System.UrlsConfig.UrlsTable). + Find(bson.M{"url": blURL.Host, "uri": blURL.Resource}).One(&urlEntry) + ips := urlEntry.IPs + //and loop over the ips + for _, ip := range ips { + //then find all of the hosts which talked to the ip + var connected []structure.UniqueConnection + res.DB.Session.DB(db). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip}, + ).All(&connected) + //and aggregate the source ip addresses + for _, uconn := range connected { + blURLs[i].ConnectedHosts = append(blURLs[i].ConnectedHosts, uconn.Src) + } + } + } + + out, err := template.New("bl-url.html").Parse(templates.BLURLTempl) + if err != nil { + return err + } + + w, err := getBLURLWriter(blURLs) + if err != nil { + return err + } + + return out.Execute(f, &templates.ReportingInfo{DB: db, Writer: template.HTML(w)}) +} + +func getBLURLWriter(results []blacklist.BlacklistedURL) (string, error) { + tmpl := "" + + "" + + "" + + "" + + "\n" + + out, err := template.New("blurl").Parse(tmpl) + if err != nil { + return "", err + } + + w := new(bytes.Buffer) + + for _, result := range results { + sort.Strings(result.Lists) + sort.Strings(result.ConnectedHosts) + err := out.Execute(w, result) + if err != nil { + return "", err + } + } + return w.String(), nil +} diff --git a/reporting/report.go b/reporting/report.go index 0f0cf209..00f3f315 100644 --- a/reporting/report.go +++ b/reporting/report.go @@ -150,6 +150,7 @@ func writeDB(db string, wd string, res *database.Resources) error { return err } } + res.DB.SelectDB(db) err = writeDBHomePage(db) if err != nil { @@ -164,6 +165,22 @@ func writeDB(db string, wd string, res *database.Resources) error { if err != nil { return err } + err = printBLSourceIPs(db, res) + if err != nil { + return err + } + err = printBLDestIPs(db, res) + if err != nil { + return err + } + err = printBLHostnames(db, res) + if err != nil { + return err + } + err = printBLURLs(db, res) + if err != nil { + return err + } err = printBeacons(db, res) if err != nil { return err diff --git a/reporting/templates/templates.go b/reporting/templates/templates.go index cbcdfc3a..c2552c91 100644 --- a/reporting/templates/templates.go +++ b/reporting/templates/templates.go @@ -24,13 +24,16 @@ var dbHeader = `
  • RITA
  • Viewing: {{.DB}}
  • Beacons
  • -
  • Blacklisted
  • -
  • DNS
  • -
  • Scans
  • -
  • Long Connections
  • +
  • DNS
  • +
  • BL Source IPs
  • +
  • BL Dest. IPs
  • +
  • BL Hostnames
  • +
  • BL URLs
  • +
  • Scans
  • +
  • Long Connections
  • Long URLs
  • User Agents
  • -
  • +
  • RITA on @@ -109,11 +112,41 @@ var BeaconsTempl = dbHeader + ` ` -// BlacklistedTempl is our beacons html template -var BlacklistedTempl = dbHeader + ` +// BLSourceIPTempl is our blacklisted source ip html template +var BLSourceIPTempl = dbHeader + ` +
    +
  • ScoreSourceDestinationConnectionsAvg. Bytes - Intvl. RangeSize RangeIntvl. ModeIntvl. Mode CountSize RangeIntvl. ModeSize ModeIntvl. Mode Count Size Mode CountIntvl. SkewSize SkewIntvl. DispersionSize Dispersion TS Duration
    {{.Host}}{{.Score}}{{range $idx, $src := .Sources}}{{if $idx}}, {{end}}{{$src}}{{end}}
    {{.Hostname}}{{.Connections}}{{.UniqueConnections}}{{.TotalBytes}}{{range $idx, $list := .Lists}}{{if $idx}}, {{end}}{{ $list }}{{end}}{{range $idx, $host := .ConnectedHosts}}{{if $idx}}, {{end}}{{ $host }}{{end}}
    {{.IP}}{{.Connections}}{{.UniqueConnections}}{{.TotalBytes}}{{range $idx, $list := .Lists}}{{if $idx}}, {{end}}{{ $list }}{{end}}{{range $idx, $host := .ConnectedHosts}}{{if $idx}}, {{end}}{{ $host }}{{end}}
    {{.Host}}{{.Resource}}{{.Connections}}{{.UniqueConnections}}{{.TotalBytes}}{{range $idx, $list := .Lists}}{{if $idx}}, {{end}}{{ $list }}{{end}}{{range $idx, $host := .ConnectedHosts}}{{if $idx}}, {{end}}{{ $host }}{{end}}
    + + {{.Writer}} +
    IPConnectionsUnique ConnectionsTotal BytesListsDestinations
    +
    +` + +// BLDestIPTempl is our blacklisted destination ip html template +var BLDestIPTempl = dbHeader + ` +
    + + + {{.Writer}} +
    IPConnectionsUnique ConnectionsTotal BytesListsSources
    +
    +` + +// BLHostnameTempl is our blacklisted hostname html template +var BLHostnameTempl = dbHeader + ` +
    + + + {{.Writer}} +
    HostnameConnectionsUnique ConnectionsTotal BytesListsSources
    +
    +` + +// BLURLTempl is our blacklisted url html template +var BLURLTempl = dbHeader + `
    - + {{.Writer}}
    DestinationScoreSource(s)
    HostResourceConnectionsUnique ConnectionsTotal BytesListsSources
    From 12fdd427081563095ddb6b829b8f9851307d9ebd Mon Sep 17 00:00:00 2001 From: logan lembke Date: Mon, 26 Jun 2017 14:25:11 -0600 Subject: [PATCH 027/117] Add blacklisted back to xref, simplify xref to source and destionation collections --- analysis/crossref/beaconing.go | 22 ++++------ analysis/crossref/blacklist-dest-ips.go | 50 +++++++++++++++++++++++ analysis/crossref/blacklist-source-ips.go | 50 +++++++++++++++++++++++ analysis/crossref/crossref.go | 31 +++++++------- analysis/crossref/scanning.go | 22 ++++------ config/config.go | 4 +- etc/rita.yaml | 4 +- 7 files changed, 133 insertions(+), 50 deletions(-) create mode 100644 analysis/crossref/blacklist-dest-ips.go create mode 100644 analysis/crossref/blacklist-source-ips.go diff --git a/analysis/crossref/beaconing.go b/analysis/crossref/beaconing.go index 3228b4e1..45c45019 100644 --- a/analysis/crossref/beaconing.go +++ b/analysis/crossref/beaconing.go @@ -19,8 +19,8 @@ func (s BeaconingSelector) GetName() string { //Select selects beaconing hosts for XRef analysis func (s BeaconingSelector) Select(res *database.Resources) (<-chan string, <-chan string) { // make channels to return - internalHosts := make(chan string) - externalHosts := make(chan string) + sourceHosts := make(chan string) + destHosts := make(chan string) // run the read code async and return the channels immediately go func() { ssn := res.DB.Session.Copy() @@ -31,19 +31,11 @@ func (s BeaconingSelector) Select(res *database.Resources) (<-chan string, <-cha //however, this is accounted for in the finalizing step of xref var data dataBeacon.BeaconAnalysisView for iter.Next(&data) { - if data.LocalSrc { - internalHosts <- data.Src - } else { - externalHosts <- data.Src - } - if data.LocalDst { - internalHosts <- data.Dst - } else { - externalHosts <- data.Dst - } + sourceHosts <- data.Src + destHosts <- data.Dst } - close(internalHosts) - close(externalHosts) + close(sourceHosts) + close(destHosts) }() - return internalHosts, externalHosts + return sourceHosts, destHosts } diff --git a/analysis/crossref/blacklist-dest-ips.go b/analysis/crossref/blacklist-dest-ips.go new file mode 100644 index 00000000..d3f1c513 --- /dev/null +++ b/analysis/crossref/blacklist-dest-ips.go @@ -0,0 +1,50 @@ +package crossref + +import ( + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "gopkg.in/mgo.v2/bson" +) + +type ( + //BLDestIPSelector implements the XRefSelector interface for blacklisted destination ips + BLDestIPSelector struct{} +) + +//GetName returns "bl-dest-ips" +func (s BLDestIPSelector) GetName() string { + return "bl-dest-ip" +} + +//Select selects blacklisted dest ips for XRef analysis +func (s BLDestIPSelector) Select(res *database.Resources) (<-chan string, <-chan string) { + // make channels to return + sourceHosts := make(chan string) + destHosts := make(chan string) + // run the read code async and return the channels immediately + go func() { + ssn := res.DB.Session.Copy() + defer ssn.Close() + + var blIPs []blacklist.BlacklistedIP + ssn.DB(res.DB.GetSelectedDB()). + C(res.System.BlacklistedConfig.DestIPsTable). + Find(nil).All(&blIPs) + + for _, ip := range blIPs { + var connected []structure.UniqueConnection + ssn.DB(res.DB.GetSelectedDB()). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"dst": ip.IP}, + ).All(&connected) + for _, uconn := range connected { + sourceHosts <- uconn.Src + } + destHosts <- ip.IP + } + close(sourceHosts) + close(destHosts) + }() + return sourceHosts, destHosts +} diff --git a/analysis/crossref/blacklist-source-ips.go b/analysis/crossref/blacklist-source-ips.go new file mode 100644 index 00000000..0a716872 --- /dev/null +++ b/analysis/crossref/blacklist-source-ips.go @@ -0,0 +1,50 @@ +package crossref + +import ( + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/datatypes/blacklist" + "github.com/ocmdev/rita/datatypes/structure" + "gopkg.in/mgo.v2/bson" +) + +type ( + //BLSourceIPSelector implements the XRefSelector interface for blacklisted source ips + BLSourceIPSelector struct{} +) + +//GetName returns "bl-source-ip" +func (s BLSourceIPSelector) GetName() string { + return "bl-source-ip" +} + +//Select selects blacklisted source ips for XRef analysis +func (s BLSourceIPSelector) Select(res *database.Resources) (<-chan string, <-chan string) { + // make channels to return + sourceHosts := make(chan string) + destHosts := make(chan string) + // run the read code async and return the channels immediately + go func() { + ssn := res.DB.Session.Copy() + defer ssn.Close() + + var blIPs []blacklist.BlacklistedIP + ssn.DB(res.DB.GetSelectedDB()). + C(res.System.BlacklistedConfig.SourceIPsTable). + Find(nil).All(&blIPs) + + for _, ip := range blIPs { + var connected []structure.UniqueConnection + ssn.DB(res.DB.GetSelectedDB()). + C(res.System.StructureConfig.UniqueConnTable).Find( + bson.M{"src": ip.IP}, + ).All(&connected) + for _, uconn := range connected { + destHosts <- uconn.Dst + } + sourceHosts <- ip.IP + } + close(sourceHosts) + close(destHosts) + }() + return sourceHosts, destHosts +} diff --git a/analysis/crossref/crossref.go b/analysis/crossref/crossref.go index a76e1f29..1a35e5fa 100644 --- a/analysis/crossref/crossref.go +++ b/analysis/crossref/crossref.go @@ -14,37 +14,39 @@ import ( func getXRefSelectors() []dataXRef.XRefSelector { beaconing := BeaconingSelector{} scanning := ScanningSelector{} + blSourceIPs := BLSourceIPSelector{} + blDestIPs := BLDestIPSelector{} - return []dataXRef.XRefSelector{beaconing, scanning} + return []dataXRef.XRefSelector{beaconing, scanning, blSourceIPs, blDestIPs} } // BuildXRefCollection runs threaded crossref analysis func BuildXRefCollection(res *database.Resources) { indexes := []mgo.Index{{Key: []string{"host"}, Unique: true}} - res.DB.CreateCollection(res.System.CrossrefConfig.InternalTable, false, indexes) - res.DB.CreateCollection(res.System.CrossrefConfig.ExternalTable, false, indexes) + res.DB.CreateCollection(res.System.CrossrefConfig.SourceTable, false, indexes) + res.DB.CreateCollection(res.System.CrossrefConfig.DestTable, false, indexes) //maps from analysis types to channels of hosts found - internal := make(map[string]<-chan string) - external := make(map[string]<-chan string) + sources := make(map[string]<-chan string) + destinations := make(map[string]<-chan string) //kick off reads for _, selector := range getXRefSelectors() { - internalHosts, externalHosts := selector.Select(res) - internal[selector.GetName()] = internalHosts - external[selector.GetName()] = externalHosts + sourceHosts, destinationHosts := selector.Select(res) + sources[selector.GetName()] = sourceHosts + destinations[selector.GetName()] = destinationHosts } xRefWG := new(sync.WaitGroup) xRefWG.Add(2) //kick off writes - go multiplexXRef(res, res.System.CrossrefConfig.InternalTable, internal, xRefWG) - go multiplexXRef(res, res.System.CrossrefConfig.ExternalTable, external, xRefWG) + go multiplexXRef(res, res.System.CrossrefConfig.SourceTable, sources, xRefWG) + go multiplexXRef(res, res.System.CrossrefConfig.DestTable, destinations, xRefWG) xRefWG.Wait() //group by host ip and put module findings into an array - finalizeXRef(res, res.System.CrossrefConfig.InternalTable) - finalizeXRef(res, res.System.CrossrefConfig.ExternalTable) + finalizeXRef(res, res.System.CrossrefConfig.SourceTable) + finalizeXRef(res, res.System.CrossrefConfig.DestTable) } //multiplexXRef takes a target colllection, and a map from @@ -88,9 +90,6 @@ func finalizeXRef(res *database.Resources, collection string) { {"_id", bson.D{ {"host", "$host"}, }}, - {"host", bson.D{ - {"$first", "$host"}, - }}, {"modules", bson.D{ {"$addToSet", "$module"}, }}, @@ -99,7 +98,7 @@ func finalizeXRef(res *database.Resources, collection string) { { {"$project", bson.D{ {"_id", 0}, - {"host", 1}, + {"host", "$_id.host"}, {"modules", 1}, }}, }, diff --git a/analysis/crossref/scanning.go b/analysis/crossref/scanning.go index e3c59e23..384641e3 100644 --- a/analysis/crossref/scanning.go +++ b/analysis/crossref/scanning.go @@ -18,8 +18,8 @@ func (s ScanningSelector) GetName() string { //Select selects scanning and scanned hosts for XRef analysis func (s ScanningSelector) Select(res *database.Resources) (<-chan string, <-chan string) { // make channels to return - internalHosts := make(chan string) - externalHosts := make(chan string) + sourceHosts := make(chan string) + destHosts := make(chan string) // run the read code async and return the channels immediately go func() { ssn := res.DB.Session.Copy() @@ -29,19 +29,11 @@ func (s ScanningSelector) Select(res *database.Resources) (<-chan string, <-chan var data scanning.Scan for iter.Next(&data) { - if data.LocalSrc { - internalHosts <- data.Src - } else { - externalHosts <- data.Src - } - if data.LocalDst { - internalHosts <- data.Dst - } else { - externalHosts <- data.Dst - } + sourceHosts <- data.Src + destHosts <- data.Dst } - close(internalHosts) - close(externalHosts) + close(sourceHosts) + close(destHosts) }() - return internalHosts, externalHosts + return sourceHosts, destHosts } diff --git a/config/config.go b/config/config.go index 4196ad92..47f5d5c4 100644 --- a/config/config.go +++ b/config/config.go @@ -75,8 +75,8 @@ type ( //CrossrefCfg is used to control the crossref analysis module CrossrefCfg struct { - InternalTable string `yaml:"InternalTable"` - ExternalTable string `yaml:"ExternalTable"` + SourceTable string `yaml:"SourceTable"` + DestTable string `yaml:"DestinationTable"` BeaconThreshold float64 `yaml:"BeaconThreshold"` } diff --git a/etc/rita.yaml b/etc/rita.yaml index 9f315f79..a4865435 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -108,8 +108,8 @@ Dns: HostnamesTable: hostnames Crossref: - InternalTable: internXREF - ExternalTable: externXREF + SourceTable: sourceXREF + DestinationTable: destXREF BeaconThreshold: .7 Scanning: From e53f02d34a7f52f6b072c759b3c0767012b7f68f Mon Sep 17 00:00:00 2001 From: logan lembke Date: Mon, 26 Jun 2017 14:52:58 -0600 Subject: [PATCH 028/117] Rename rita-blacklist2 to rita-bl --- analysis/blacklist/blacklist.go | 12 ++++++------ analysis/blacklist/hostnames.go | 4 ++-- analysis/blacklist/ips.go | 4 ++-- analysis/blacklist/urls.go | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 699d6e2d..70d4298c 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -5,11 +5,11 @@ import ( "net/http" "os" - bl "github.com/ocmdev/rita-blacklist2" - blDB "github.com/ocmdev/rita-blacklist2/database" - "github.com/ocmdev/rita-blacklist2/list" - "github.com/ocmdev/rita-blacklist2/sources/lists" - "github.com/ocmdev/rita-blacklist2/sources/rpc" + bl "github.com/ocmdev/rita-bl" + blDB "github.com/ocmdev/rita-bl/database" + "github.com/ocmdev/rita-bl/list" + "github.com/ocmdev/rita-bl/sources/lists" + "github.com/ocmdev/rita-bl/sources/rpc" "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" log "github.com/sirupsen/logrus" @@ -29,7 +29,7 @@ func BuildBlacklistedCollections(res *database.Resources) { ritaBL := bl.NewBlacklist( blDB.NewMongoDB, //Use MongoDB for data storage res.System.DatabaseHost, //Use the DatabaseHost as the connection - "rita-blacklist2", //database + "rita-bl", //database func(err error) { //error handler res.Log.WithFields(log.Fields{ "db": currentDB, diff --git a/analysis/blacklist/hostnames.go b/analysis/blacklist/hostnames.go index 0a926ed4..27d09d2f 100644 --- a/analysis/blacklist/hostnames.go +++ b/analysis/blacklist/hostnames.go @@ -3,12 +3,12 @@ package blacklist import ( "unsafe" - "github.com/ocmdev/rita-blacklist2/list" + "github.com/ocmdev/rita-bl/list" "github.com/ocmdev/rita/database" "github.com/ocmdev/rita/datatypes/dns" "github.com/ocmdev/rita/datatypes/structure" - bl "github.com/ocmdev/rita-blacklist2" + bl "github.com/ocmdev/rita-bl" data "github.com/ocmdev/rita/datatypes/blacklist" log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" diff --git a/analysis/blacklist/ips.go b/analysis/blacklist/ips.go index eaee036c..63f41e13 100644 --- a/analysis/blacklist/ips.go +++ b/analysis/blacklist/ips.go @@ -3,8 +3,8 @@ package blacklist import ( "unsafe" - bl "github.com/ocmdev/rita-blacklist2" - "github.com/ocmdev/rita-blacklist2/list" + bl "github.com/ocmdev/rita-bl" + "github.com/ocmdev/rita-bl/list" "github.com/ocmdev/rita/database" data "github.com/ocmdev/rita/datatypes/blacklist" "github.com/ocmdev/rita/datatypes/structure" diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 4d47e8b3..08250513 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -4,9 +4,9 @@ import ( "errors" "strings" - "github.com/ocmdev/rita-blacklist2/list" + "github.com/ocmdev/rita-bl/list" - bl "github.com/ocmdev/rita-blacklist2" + bl "github.com/ocmdev/rita-bl" "github.com/ocmdev/rita/database" data "github.com/ocmdev/rita/datatypes/blacklist" "github.com/ocmdev/rita/datatypes/structure" From 112fd882d9cfa350240833b80ef6bf06b5c68ff4 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 29 Jun 2017 17:47:55 -0600 Subject: [PATCH 029/117] Update PR to match tls compatible rita-bl --- analysis/blacklist/blacklist.go | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 70d4298c..335c2677 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -1,10 +1,12 @@ package blacklist import ( + "fmt" "io" "net/http" "os" + "github.com/ocmdev/mgosec" bl "github.com/ocmdev/rita-bl" blDB "github.com/ocmdev/rita-bl/database" "github.com/ocmdev/rita-bl/list" @@ -25,11 +27,16 @@ func BuildBlacklistedCollections(res *database.Resources) { //capture the current value for the error closure below currentDB := res.DB.GetSelectedDB() + blDB, err := blDB.NewMongoDB(res.System.DatabaseHost, mgosec.None, "rita-bl") + if err != nil { + res.Log.Error(err) + fmt.Println("\t[!] Could not connect to blacklist database") + return + } + //set up rita-blacklist ritaBL := bl.NewBlacklist( - blDB.NewMongoDB, //Use MongoDB for data storage - res.System.DatabaseHost, //Use the DatabaseHost as the connection - "rita-bl", //database + blDB, func(err error) { //error handler res.Log.WithFields(log.Fields{ "db": currentDB, @@ -181,9 +188,9 @@ func tryOpenFileThenURL(path string) func() (io.ReadCloser, error) { return func() (io.ReadCloser, error) { _, err := os.Stat(path) if err == nil { - file, err := os.Open(path) - if err != nil { - return nil, err + file, err2 := os.Open(path) + if err2 != nil { + return nil, err2 } return file, nil } From 27e468c98905b1f46128c5333f06c53e425318bb Mon Sep 17 00:00:00 2001 From: logan lembke Date: Fri, 7 Jul 2017 13:59:24 -0600 Subject: [PATCH 030/117] change bufferSize to a constant, added a comment to ritabl interaction --- analysis/blacklist/blacklist.go | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 335c2677..14696fae 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -20,6 +20,8 @@ import ( type resultsChan chan map[string][]blDB.BlacklistResult +const ritaBLBufferSize = 1000 + //BuildBlacklistedCollections builds the blacklisted sources, //blacklisted destinations, blacklist hostnames, and blacklisted urls //collections @@ -90,14 +92,13 @@ func BuildBlacklistedCollections(res *database.Resources) { //create the data //TODO: refactor these into modules - bufferSize := 1000 - buildBlacklistedIPs(uniqueSourceIter, res, ritaBL, bufferSize, true) + buildBlacklistedIPs(uniqueSourceIter, res, ritaBL, ritaBLBufferSize, true) - buildBlacklistedIPs(uniqueDestIter, res, ritaBL, bufferSize, false) + buildBlacklistedIPs(uniqueDestIter, res, ritaBL, ritaBLBufferSize, false) - buildBlacklistedHostnames(hostnamesIter, res, ritaBL, bufferSize) + buildBlacklistedHostnames(hostnamesIter, res, ritaBL, ritaBLBufferSize) - buildBlacklistedURLs(urlIter, res, ritaBL, bufferSize, "http://") + buildBlacklistedURLs(urlIter, res, ritaBL, ritaBLBufferSize, "http://") //index the data for _, collection := range collections { @@ -175,7 +176,7 @@ func buildCustomBlacklists(entryType list.BlacklistedEntryType, paths []string) newList := lists.NewLineSeperatedList( entryType, path, - 86400, + 86400, // default cache time of 1 day tryOpenFileThenURL(path), ) blacklists = append(blacklists, newList) From ed0c77d12376b236493dddbcf262687b84852580 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 29 Jun 2017 16:36:04 -0600 Subject: [PATCH 031/117] Add tls support for RITA --- config/config.go | 15 +++++++++++- database/resources.go | 55 +++++++++++++++++++++++++++++++++---------- etc/rita.yaml | 13 ++++++++-- 3 files changed, 68 insertions(+), 15 deletions(-) diff --git a/config/config.go b/config/config.go index 47f5d5c4..ad21a3cc 100644 --- a/config/config.go +++ b/config/config.go @@ -17,7 +17,7 @@ type ( //SystemConfig is the container for other config sections SystemConfig struct { BatchSize int `yaml:"BatchSize"` - DatabaseHost string `yaml:"DatabaseHost"` + MongoDBConfig MongoDBCfg `yaml:"MongoDB"` Prefetch float64 `yaml:"Prefetch"` LogConfig LogCfg `yaml:"LogConfig"` BlacklistedConfig BlacklistedCfg `yaml:"BlackListed"` @@ -33,6 +33,19 @@ type ( Version string } + //MongoDBCfg contains the means for connecting to MongoDB + MongoDBCfg struct { + ConnectionString string `yaml:"ConnectionString"` + AuthMechanism string `yaml:"AuthenticationMechanism"` + TLS TLSCfg `yaml:"TLS"` + } + + //TLSCfg contains the means for connecting to MongoDB over TLS + TLSCfg struct { + Enabled bool `yaml:"Enable"` + CAFile string `yaml:"CAFile"` + } + //LogCfg contains the configuration for logging LogCfg struct { LogLevel int `yaml:"LogLevel"` diff --git a/database/resources.go b/database/resources.go index 9159cbbe..fae091a6 100644 --- a/database/resources.go +++ b/database/resources.go @@ -1,6 +1,8 @@ package database import ( + "crypto/tls" + "crypto/x509" "fmt" "io/ioutil" "os" @@ -10,7 +12,8 @@ import ( mgo "gopkg.in/mgo.v2" - "github.com/Zalgo2462/mgorus" + "github.com/ocmdev/mgorus" + "github.com/ocmdev/mgosec" "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/util" "github.com/rifflock/lfshook" @@ -47,7 +50,7 @@ func InitResources(cfgPath string) *Resources { } // Jump into the requested database - session, err := mgo.Dial(conf.DatabaseHost) + session, err := connectToMongoDB(&conf.MongoDBConfig, log) if err != nil { fmt.Printf("Failed to connect to database: %s", err.Error()) os.Exit(-1) @@ -67,6 +70,7 @@ func InitResources(cfgPath string) *Resources { lock: new(sync.Mutex), } + //bundle up the system resources r := &Resources{ Log: log, System: conf, @@ -84,13 +88,48 @@ func InitResources(cfgPath string) *Resources { if !metaDB.isBuilt() { metaDB.createMetaDB() } + + //Begin logging to the metadatabase if conf.LogConfig.LogToDB { - addMongoLogger(log, conf.DatabaseHost, conf.BroConfig.MetaDB, - conf.LogConfig.RitaLogTable) + log.Hooks.Add( + mgorus.NewHookerFromSession( + session, conf.BroConfig.MetaDB, conf.LogConfig.RitaLogTable, + ), + ) } return r } +//connectToMongoDB connects to MongoDB possibly with authentication and TLS +func connectToMongoDB(conf *config.MongoDBCfg, logger *log.Logger) (*mgo.Session, error) { + if conf.TLS.Enabled { + authMechanism, err := mgosec.ParseMongoAuthMechanism(conf.AuthMechanism) + if err != nil { + authMechanism = mgosec.None + logger.WithFields(log.Fields{ + "authMechanism": conf.AuthMechanism, + }).Error(err.Error()) + fmt.Println("[!] Could not parse MongoDB authentication mechanism") + } + + tlsConf := &tls.Config{} + if len(conf.TLS.CAFile) > 0 { + pem, err := ioutil.ReadFile(conf.TLS.CAFile) + if err != nil { + logger.WithFields(log.Fields{ + "CAFile": conf.TLS.CAFile, + }).Error(err.Error()) + fmt.Println("[!] Could not read MongoDB CA file") + } else { + tlsConf.RootCAs = x509.NewCertPool() + tlsConf.RootCAs.AppendCertsFromPEM(pem) + } + } + return mgosec.Dial(conf.ConnectionString, authMechanism, tlsConf) + } + return mgo.Dial(conf.ConnectionString) +} + // initLog creates the logger for logging to stdout and file func initLog(level int) (*log.Logger, error) { var logs = &log.Logger{} @@ -137,11 +176,3 @@ func addFileLogger(logger *log.Logger, logPath string) { log.PanicLevel: path.Join(logPath, "panic.log"), })) } - -func addMongoLogger(logger *log.Logger, dbHost, metaDB, logColl string) error { - mgoHook, err := mgorus.NewHooker(dbHost, metaDB, logColl) - if err == nil { - logger.Hooks.Add(mgoHook) - } - return err -} diff --git a/etc/rita.yaml b/etc/rita.yaml index a4865435..c90d7396 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -1,5 +1,14 @@ -# Mongo Database to connect to -DatabaseHost: localhost:27017 +MongoDB: + # See https://docs.mongodb.com/manual/reference/connection-string/ + ConnectionString: mongodb://localhost:27017 + # How to authenticate to MongoDB + # Accepted Values: null, "SCRAM-SHA-1", "MONGODB-CR", "PLAIN" + AuthenticationMechanism: null + # For encrypting data on the wire between RITA and MongoDB + TLS: + Enable: false + #If set, RITA will use the provided CA file instead of the system's CA's + CAFile: null LogConfig: # LogLevel From 6e28287a51090728cbaaa326c937867b1f37f08a Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 29 Jun 2017 17:42:53 -0600 Subject: [PATCH 032/117] Use auth even when not encrypted --- database/resources.go | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/database/resources.go b/database/resources.go index fae091a6..b32939f8 100644 --- a/database/resources.go +++ b/database/resources.go @@ -102,16 +102,16 @@ func InitResources(cfgPath string) *Resources { //connectToMongoDB connects to MongoDB possibly with authentication and TLS func connectToMongoDB(conf *config.MongoDBCfg, logger *log.Logger) (*mgo.Session, error) { - if conf.TLS.Enabled { - authMechanism, err := mgosec.ParseMongoAuthMechanism(conf.AuthMechanism) - if err != nil { - authMechanism = mgosec.None - logger.WithFields(log.Fields{ - "authMechanism": conf.AuthMechanism, - }).Error(err.Error()) - fmt.Println("[!] Could not parse MongoDB authentication mechanism") - } + authMechanism, err := mgosec.ParseAuthMechanism(conf.AuthMechanism) + if err != nil { + authMechanism = mgosec.None + logger.WithFields(log.Fields{ + "authMechanism": conf.AuthMechanism, + }).Error(err.Error()) + fmt.Println("[!] Could not parse MongoDB authentication mechanism") + } + if conf.TLS.Enabled { tlsConf := &tls.Config{} if len(conf.TLS.CAFile) > 0 { pem, err := ioutil.ReadFile(conf.TLS.CAFile) @@ -127,7 +127,7 @@ func connectToMongoDB(conf *config.MongoDBCfg, logger *log.Logger) (*mgo.Session } return mgosec.Dial(conf.ConnectionString, authMechanism, tlsConf) } - return mgo.Dial(conf.ConnectionString) + return mgosec.DialInsecure(conf.ConnectionString, authMechanism) } // initLog creates the logger for logging to stdout and file From 60512e1ed7f55ac3bff44de40370a9bfe5a498b6 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Fri, 7 Jul 2017 16:50:28 -0600 Subject: [PATCH 033/117] Update rita-bl analysis to use configured connection parameters --- analysis/blacklist/blacklist.go | 21 +++++++++++++++++---- config/config.go | 24 +++++++++++++++++++----- database/resources.go | 13 ++----------- 3 files changed, 38 insertions(+), 20 deletions(-) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 14696fae..49be1a47 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -6,7 +6,6 @@ import ( "net/http" "os" - "github.com/ocmdev/mgosec" bl "github.com/ocmdev/rita-bl" blDB "github.com/ocmdev/rita-bl/database" "github.com/ocmdev/rita-bl/list" @@ -28,8 +27,22 @@ const ritaBLBufferSize = 1000 func BuildBlacklistedCollections(res *database.Resources) { //capture the current value for the error closure below currentDB := res.DB.GetSelectedDB() - - blDB, err := blDB.NewMongoDB(res.System.DatabaseHost, mgosec.None, "rita-bl") + var err error + var blDatabase blDB.Handle + if res.System.MongoDBConfig.TLS.Enabled { + blDatabase, err = blDB.NewSecureMongoDB( + res.System.MongoDBConfig.ConnectionString, + res.System.MongoDBConfig.AuthMechanismParsed, + "rita-bl", + res.System.MongoDBConfig.TLS.CAFile, + ) + } else { + blDatabase, err = blDB.NewMongoDB( + res.System.MongoDBConfig.ConnectionString, + res.System.MongoDBConfig.AuthMechanismParsed, + "rita-bl", + ) + } if err != nil { res.Log.Error(err) fmt.Println("\t[!] Could not connect to blacklist database") @@ -38,7 +51,7 @@ func BuildBlacklistedCollections(res *database.Resources) { //set up rita-blacklist ritaBL := bl.NewBlacklist( - blDB, + blDatabase, func(err error) { //error handler res.Log.WithFields(log.Fields{ "db": currentDB, diff --git a/config/config.go b/config/config.go index ad21a3cc..7d098512 100644 --- a/config/config.go +++ b/config/config.go @@ -7,6 +7,8 @@ import ( "os/user" "reflect" + "github.com/ocmdev/mgosec" + "gopkg.in/yaml.v2" ) @@ -35,9 +37,10 @@ type ( //MongoDBCfg contains the means for connecting to MongoDB MongoDBCfg struct { - ConnectionString string `yaml:"ConnectionString"` - AuthMechanism string `yaml:"AuthenticationMechanism"` - TLS TLSCfg `yaml:"TLS"` + ConnectionString string `yaml:"ConnectionString"` + AuthMechanism string `yaml:"AuthenticationMechanism"` + AuthMechanismParsed mgosec.AuthMechanism `yaml:"AuthenticationMechanismParsed,omitempty"` + TLS TLSCfg `yaml:"TLS"` } //TLSCfg contains the means for connecting to MongoDB over TLS @@ -174,14 +177,25 @@ func loadSystemConfig(cfgPath string) (*SystemConfig, bool) { } err = yaml.Unmarshal(cfgFile, config) + if err != nil { + fmt.Fprintf(os.Stderr, "Failed to read config: %s\n", err.Error()) + return config, false + } + // expand env variables, config is a pointer // so we have to call elem on the reflect value expandConfig(reflect.ValueOf(config).Elem()) + //parse out the mongo authentication mechanism + authMechanism, err := mgosec.ParseAuthMechanism( + config.MongoDBConfig.AuthMechanism, + ) if err != nil { - fmt.Fprintf(os.Stderr, "Failed to read config: %s\n", err.Error()) - return config, false + authMechanism = mgosec.None + fmt.Println("[!] Could not parse MongoDB authentication mechanism") } + config.MongoDBConfig.AuthMechanismParsed = authMechanism + return config, true } return config, false diff --git a/database/resources.go b/database/resources.go index b32939f8..180be673 100644 --- a/database/resources.go +++ b/database/resources.go @@ -102,15 +102,6 @@ func InitResources(cfgPath string) *Resources { //connectToMongoDB connects to MongoDB possibly with authentication and TLS func connectToMongoDB(conf *config.MongoDBCfg, logger *log.Logger) (*mgo.Session, error) { - authMechanism, err := mgosec.ParseAuthMechanism(conf.AuthMechanism) - if err != nil { - authMechanism = mgosec.None - logger.WithFields(log.Fields{ - "authMechanism": conf.AuthMechanism, - }).Error(err.Error()) - fmt.Println("[!] Could not parse MongoDB authentication mechanism") - } - if conf.TLS.Enabled { tlsConf := &tls.Config{} if len(conf.TLS.CAFile) > 0 { @@ -125,9 +116,9 @@ func connectToMongoDB(conf *config.MongoDBCfg, logger *log.Logger) (*mgo.Session tlsConf.RootCAs.AppendCertsFromPEM(pem) } } - return mgosec.Dial(conf.ConnectionString, authMechanism, tlsConf) + return mgosec.Dial(conf.ConnectionString, conf.AuthMechanismParsed, tlsConf) } - return mgosec.DialInsecure(conf.ConnectionString, authMechanism) + return mgosec.DialInsecure(conf.ConnectionString, conf.AuthMechanismParsed) } // initLog creates the logger for logging to stdout and file From 1b93309f6378928a20c9ba4dc8a58f174eea4652 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Mon, 10 Jul 2017 16:09:40 -0600 Subject: [PATCH 034/117] Set socket timeout to be configurable --- config/config.go | 4 ++++ database/resources.go | 4 ++-- etc/rita.yaml | 2 ++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/config/config.go b/config/config.go index 7d098512..62c62772 100644 --- a/config/config.go +++ b/config/config.go @@ -6,6 +6,7 @@ import ( "os" "os/user" "reflect" + "time" "github.com/ocmdev/mgosec" @@ -38,6 +39,7 @@ type ( //MongoDBCfg contains the means for connecting to MongoDB MongoDBCfg struct { ConnectionString string `yaml:"ConnectionString"` + SocketTimeout time.Duration `yaml:"SocketTimeout"` AuthMechanism string `yaml:"AuthenticationMechanism"` AuthMechanismParsed mgosec.AuthMechanism `yaml:"AuthenticationMechanismParsed,omitempty"` TLS TLSCfg `yaml:"TLS"` @@ -196,6 +198,8 @@ func loadSystemConfig(cfgPath string) (*SystemConfig, bool) { } config.MongoDBConfig.AuthMechanismParsed = authMechanism + //set the timeout time in hours + config.MongoDBConfig.SocketTimeout *= time.Hour return config, true } return config, false diff --git a/database/resources.go b/database/resources.go index 180be673..501e7800 100644 --- a/database/resources.go +++ b/database/resources.go @@ -55,8 +55,8 @@ func InitResources(cfgPath string) *Resources { fmt.Printf("Failed to connect to database: %s", err.Error()) os.Exit(-1) } - session.SetSocketTimeout(2 * time.Hour) - session.SetSyncTimeout(2 * time.Hour) + session.SetSocketTimeout(conf.MongoDBConfig.SocketTimeout) + session.SetSyncTimeout(conf.MongoDBConfig.SocketTimeout) session.SetCursorTimeout(0) // Allows code to interact with the database diff --git a/etc/rita.yaml b/etc/rita.yaml index c90d7396..4a5e66af 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -4,6 +4,8 @@ MongoDB: # How to authenticate to MongoDB # Accepted Values: null, "SCRAM-SHA-1", "MONGODB-CR", "PLAIN" AuthenticationMechanism: null + # The time in hours before RITA's connection to MongoDB times out + SocketTimeout: 2 # For encrypting data on the wire between RITA and MongoDB TLS: Enable: false From 317c1bca11c5283eb60af3f7fd12bb06595532b0 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Mon, 10 Jul 2017 16:10:47 -0600 Subject: [PATCH 035/117] add documentation --- etc/rita.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/etc/rita.yaml b/etc/rita.yaml index 4a5e66af..64ef9083 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -4,7 +4,7 @@ MongoDB: # How to authenticate to MongoDB # Accepted Values: null, "SCRAM-SHA-1", "MONGODB-CR", "PLAIN" AuthenticationMechanism: null - # The time in hours before RITA's connection to MongoDB times out + # The time in hours before RITA's connection to MongoDB times out. 0 waits indefinitely. SocketTimeout: 2 # For encrypting data on the wire between RITA and MongoDB TLS: From ff23ad2c2dbd554be50c3f0a457550d5f05c7292 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Tue, 11 Jul 2017 17:21:50 -0600 Subject: [PATCH 036/117] Add option to verify certificate, this forced the creation of the running config, took the time to create the separate table config --- analysis/beacon/beacon.go | 34 ++-- analysis/beacon/beacon_test.go | 8 +- analysis/blacklist/blacklist.go | 52 +++--- analysis/blacklist/hostnames.go | 6 +- analysis/blacklist/ips.go | 6 +- analysis/blacklist/urls.go | 6 +- analysis/crossref/beaconing.go | 2 +- analysis/crossref/blacklist-dest-ips.go | 4 +- analysis/crossref/blacklist-source-ips.go | 4 +- analysis/crossref/crossref.go | 12 +- analysis/crossref/scanning.go | 2 +- analysis/dns/explodedDNS.go | 6 +- analysis/dns/hostnames.go | 10 +- analysis/scanning/scan.go | 10 +- analysis/structure/hosts.go | 8 +- analysis/structure/uconn.go | 10 +- analysis/urls/url.go | 10 +- analysis/useragent/useragent.go | 8 +- commands/import.go | 16 +- commands/reset-analysis.go | 6 +- commands/show-bl-hostname.go | 4 +- commands/show-bl-ip.go | 8 +- commands/show-bl-url.go | 6 +- commands/show-explodedDns.go | 2 +- commands/show-long-connections.go | 2 +- commands/show-scans.go | 2 +- commands/show-urls.go | 4 +- commands/show-user-agents.go | 2 +- commands/test-config.go | 11 +- config/config.go | 194 +++------------------- config/running.go | 65 ++++++++ config/static.go | 116 +++++++++++++ config/tables.go | 114 +++++++++++++ database/meta.go | 40 ++--- database/mock.go | 10 +- database/resources.go | 49 ++---- etc/rita.yaml | 2 + parser/fsimporter.go | 13 +- parser/indexedfile.go | 8 +- parser/parsetypes/conn.go | 2 +- parser/parsetypes/dns.go | 2 +- parser/parsetypes/http.go | 2 +- parser/parsetypes/parsetypes.go | 2 +- reporting/report-bl-dest-ips.go | 4 +- reporting/report-bl-hostnames.go | 4 +- reporting/report-bl-source-ips.go | 4 +- reporting/report-bl-urls.go | 6 +- reporting/report-explodedDns.go | 2 +- reporting/report-long-connections.go | 2 +- reporting/report-scans.go | 2 +- reporting/report-urls.go | 2 +- reporting/report-useragents.go | 2 +- 52 files changed, 528 insertions(+), 380 deletions(-) create mode 100644 config/running.go create mode 100644 config/static.go create mode 100644 config/tables.go diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 88bb6147..1a0df8d5 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -23,7 +23,7 @@ type ( //Beacon contains methods for conducting a beacon hunt Beacon struct { db string // current database - resources *database.Resources // holds the global config and DB layer + res *database.Resources // holds the global config and DB layer defaultConnThresh int // default connections threshold collectChannel chan string // holds ip addresses analysisChannel chan *beaconAnalysisInput // holds unanalyzed data @@ -52,7 +52,7 @@ type ( ) func BuildBeaconCollection(res *database.Resources) { - collection_name := res.System.BeaconConfig.BeaconTable + collection_name := res.Config.T.Beacon.BeaconTable collection_keys := []mgo.Index{ {Key: []string{"uconn_id"}, Unique: true}, {Key: []string{"ts_score"}}, @@ -67,7 +67,7 @@ func BuildBeaconCollection(res *database.Resources) { func GetBeaconResultsView(res *database.Resources, ssn *mgo.Session, cutoffScore float64) *mgo.Iter { pipeline := getViewPipeline(res, cutoffScore) - return res.DB.AggregateCollection(res.System.BeaconConfig.BeaconTable, ssn, pipeline) + return res.DB.AggregateCollection(res.Config.T.Beacon.BeaconTable, ssn, pipeline) } // New creates a new beacon module @@ -76,14 +76,14 @@ func newBeacon(res *database.Resources) *Beacon { // If the threshold is incorrectly specified, fix it up. // We require at least four delta times to analyze // (Q1, Q2, Q3, Q4). So we need at least 5 connections - thresh := res.System.BeaconConfig.DefaultConnectionThresh + thresh := res.Config.S.Beacon.DefaultConnectionThresh if thresh < 5 { thresh = 5 } return &Beacon{ db: res.DB.GetSelectedDB(), - resources: res, + res: res, defaultConnThresh: thresh, log: res.Log, collectChannel: make(chan string), @@ -97,7 +97,7 @@ func newBeacon(res *database.Resources) *Beacon { // Run Starts the beacon hunt process func (t *Beacon) run() { - session := t.resources.DB.Session.Copy() + session := t.res.DB.Session.Copy() defer session.Close() //Find first time @@ -109,14 +109,14 @@ func (t *Beacon) run() { //This could be optimized with an aggregation var conn data.Conn session.DB(t.db). - C(t.resources.System.StructureConfig.ConnTable). + C(t.res.Config.T.Structure.ConnTable). Find(nil).Limit(1).Sort("ts").Iter().Next(&conn) t.minTime = conn.Ts t.log.Debug("Looking for last connection timestamp") session.DB(t.db). - C(t.resources.System.StructureConfig.ConnTable). + C(t.res.Config.T.Structure.ConnTable). Find(nil).Limit(1).Sort("-ts").Iter().Next(&conn) t.maxTime = conn.Ts @@ -130,7 +130,7 @@ func (t *Beacon) run() { // add local addresses to collect channel var host structure.Host localIter := session.DB(t.db). - C(t.resources.System.StructureConfig.HostTable). + C(t.res.Config.T.Structure.HostTable). Find(bson.M{"local": true}).Iter() //kick off the threaded goroutines @@ -165,14 +165,14 @@ func (t *Beacon) run() { // collect grabs all src, dst pairs and their connection data func (t *Beacon) collect() { - session := t.resources.DB.Session.Copy() + session := t.res.DB.Session.Copy() defer session.Close() host, more := <-t.collectChannel for more { //grab all destinations related with this host var uconn structure.UniqueConnection destIter := session.DB(t.db). - C(t.resources.System.StructureConfig.UniqueConnTable). + C(t.res.Config.T.Structure.UniqueConnTable). Find(bson.M{"src": host}).Iter() for destIter.Next(&uconn) { @@ -191,7 +191,7 @@ func (t *Beacon) collect() { //Grab connection data var conn data.Conn connIter := session.DB(t.db). - C(t.resources.System.StructureConfig.ConnTable). + C(t.res.Config.T.Structure.ConnTable). Find(bson.M{"id_origin_h": uconn.Src, "id_resp_h": uconn.Dst}). Iter() @@ -220,7 +220,7 @@ func (t *Beacon) analyze() { //If removing duplicates lowered the conn count under the threshold, //remove this data from the analysis - if len(data.ts) < t.resources.System.BeaconConfig.DefaultConnectionThresh { + if len(data.ts) < t.res.Config.S.Beacon.DefaultConnectionThresh { continue } @@ -349,11 +349,11 @@ func (t *Beacon) analyze() { // write writes the beacon analysis results to the database func (t *Beacon) write() { - session := t.resources.DB.Session.Copy() + session := t.res.DB.Session.Copy() defer session.Close() for data := range t.writeChannel { - session.DB(t.db).C(t.resources.System.BeaconConfig.BeaconTable).Insert(data) + session.DB(t.db).C(t.res.Config.T.Beacon.BeaconTable).Insert(data) } t.writeWg.Done() } @@ -393,7 +393,7 @@ func createCountMap(data []int64) ([]int64, []int64, int64, int64) { // score to report on. Setting cuttoff to 0 retrieves all the records from the // beaconing collection. Setting cuttoff to 1 will prevent the aggregation from // returning any records. -func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { +func getViewPipeline(res *database.Resources, cuttoff float64) []bson.D { return []bson.D{ { {"$match", bson.D{ @@ -404,7 +404,7 @@ func getViewPipeline(r *database.Resources, cuttoff float64) []bson.D { }, { {"$lookup", bson.D{ - {"from", r.System.StructureConfig.UniqueConnTable}, + {"from", res.Config.T.Structure.UniqueConnTable}, {"localField", "uconn_id"}, {"foreignField", "_id"}, {"as", "uconn"}, diff --git a/analysis/beacon/beacon_test.go b/analysis/beacon/beacon_test.go index 47d66baa..6251ddb3 100644 --- a/analysis/beacon/beacon_test.go +++ b/analysis/beacon/beacon_test.go @@ -24,13 +24,13 @@ func printAnalysis(res *datatype_beacon.BeaconAnalysisOutput) string { } func TestAnalysis(t *testing.T) { - resources := database.InitMockResources("") - resources.Log.Level = log.DebugLevel - resources.System.BeaconConfig.DefaultConnectionThresh = 2 + res := database.InitMockResources("") + res.Log.Level = log.DebugLevel + res.Config.S.Beacon.DefaultConnectionThresh = 2 fail := false for i, val := range testDataList { - beaconing := newBeacon(resources) + beaconing := newBeacon(res) //set first and last connection times beaconing.minTime = val.ts[0] beaconing.maxTime = val.ts[len(val.ts)-1] diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 49be1a47..19ef1265 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -29,17 +29,17 @@ func BuildBlacklistedCollections(res *database.Resources) { currentDB := res.DB.GetSelectedDB() var err error var blDatabase blDB.Handle - if res.System.MongoDBConfig.TLS.Enabled { + if res.Config.S.MongoDB.TLS.Enabled { blDatabase, err = blDB.NewSecureMongoDB( - res.System.MongoDBConfig.ConnectionString, - res.System.MongoDBConfig.AuthMechanismParsed, + res.Config.S.MongoDB.ConnectionString, + res.Config.R.MongoDB.AuthMechanismParsed, "rita-bl", - res.System.MongoDBConfig.TLS.CAFile, + res.Config.R.MongoDB.TLS.TLSConfig, ) } else { blDatabase, err = blDB.NewMongoDB( - res.System.MongoDBConfig.ConnectionString, - res.System.MongoDBConfig.AuthMechanismParsed, + res.Config.S.MongoDB.ConnectionString, + res.Config.R.MongoDB.AuthMechanismParsed, "rita-bl", ) } @@ -60,7 +60,7 @@ func BuildBlacklistedCollections(res *database.Resources) { ) //set up the lists to check against - ritaBL.SetLists(buildBlacklists(res.System)...) + ritaBL.SetLists(buildBlacklists(res.Config)...) //set up remote calls to check against ritaBL.SetRPCs(buildBlacklistRPCS(res)...) @@ -76,25 +76,25 @@ func BuildBlacklistedCollections(res *database.Resources) { uniqueDestAggregation := getUniqueIPFromUconnPipeline("dst") uniqueSourceIter := res.DB.AggregateCollection( - res.System.StructureConfig.UniqueConnTable, + res.Config.T.Structure.UniqueConnTable, ssn, uniqueSourcesAggregation, ) uniqueDestIter := res.DB.AggregateCollection( - res.System.StructureConfig.UniqueConnTable, + res.Config.T.Structure.UniqueConnTable, ssn, uniqueDestAggregation, ) - hostnamesIter := ssn.DB(currentDB).C(res.System.DNSConfig.HostnamesTable). + hostnamesIter := ssn.DB(currentDB).C(res.Config.T.DNS.HostnamesTable). Find(nil).Iter() - urlIter := ssn.DB(currentDB).C(res.System.UrlsConfig.UrlsTable). + urlIter := ssn.DB(currentDB).C(res.Config.T.Urls.UrlsTable). Find(nil).Iter() //create the collections - sourceIPs := res.System.BlacklistedConfig.SourceIPsTable - destIPs := res.System.BlacklistedConfig.DestIPsTable - hostnames := res.System.BlacklistedConfig.HostnamesTable - urls := res.System.BlacklistedConfig.UrlsTable + sourceIPs := res.Config.T.Blacklisted.SourceIPsTable + destIPs := res.Config.T.Blacklisted.DestIPsTable + hostnames := res.Config.T.Blacklisted.HostnamesTable + urls := res.Config.T.Blacklisted.UrlsTable collections := []string{sourceIPs, destIPs, hostnames, urls} for _, collection := range collections { @@ -148,33 +148,33 @@ func ensureBLIndexes(ssn *mgo.Session, currentDB, collName string) { } //buildBlacklists gathers the blacklists to check against -func buildBlacklists(system *config.SystemConfig) []list.List { +func buildBlacklists(conf *config.Config) []list.List { //build up the lists var blacklists []list.List //use prebuilt lists - if system.BlacklistedConfig.UseIPms { + if conf.S.Blacklisted.UseIPms { blacklists = append(blacklists, lists.NewMyIPmsList()) } - if system.BlacklistedConfig.UseDNSBH { + if conf.S.Blacklisted.UseDNSBH { blacklists = append(blacklists, lists.NewDNSBHList()) } - if system.BlacklistedConfig.UseMDL { + if conf.S.Blacklisted.UseMDL { blacklists = append(blacklists, lists.NewMdlList()) } //use custom lists ipLists := buildCustomBlacklists( list.BlacklistedIPType, - system.BlacklistedConfig.IPBlacklists, + conf.S.Blacklisted.IPBlacklists, ) hostLists := buildCustomBlacklists( list.BlacklistedHostnameType, - system.BlacklistedConfig.HostnameBlacklists, + conf.S.Blacklisted.HostnameBlacklists, ) urlLists := buildCustomBlacklists( list.BlacklistedURLType, - system.BlacklistedConfig.URLBlacklists, + conf.S.Blacklisted.URLBlacklists, ) blacklists = append(blacklists, ipLists...) blacklists = append(blacklists, hostLists...) @@ -220,11 +220,11 @@ func tryOpenFileThenURL(path string) func() (io.ReadCloser, error) { func buildBlacklistRPCS(res *database.Resources) []rpc.RPC { var rpcs []rpc.RPC //set up google url checker - if len(res.System.BlacklistedConfig.SafeBrowsing.APIKey) > 0 && - len(res.System.BlacklistedConfig.SafeBrowsing.Database) > 0 { + if len(res.Config.S.Blacklisted.SafeBrowsing.APIKey) > 0 && + len(res.Config.S.Blacklisted.SafeBrowsing.Database) > 0 { googleRPC, err := rpc.NewGoogleSafeBrowsingURLsRPC( - res.System.BlacklistedConfig.SafeBrowsing.APIKey, - res.System.BlacklistedConfig.SafeBrowsing.Database, + res.Config.S.Blacklisted.SafeBrowsing.APIKey, + res.Config.S.Blacklisted.SafeBrowsing.Database, res.Log.Writer(), ) if err == nil { diff --git a/analysis/blacklist/hostnames.go b/analysis/blacklist/hostnames.go index 27d09d2f..db059552 100644 --- a/analysis/blacklist/hostnames.go +++ b/analysis/blacklist/hostnames.go @@ -29,7 +29,7 @@ func buildBlacklistedHostnames(hostnames *mgo.Iter, res *database.Resources, defer ssn.Close() outputCollection := ssn.DB(res.DB.GetSelectedDB()).C( - res.System.BlacklistedConfig.HostnamesTable, + res.Config.T.Blacklisted.HostnamesTable, ) //create type for communicating rita-bl results resultsChannel := make(resultsChan) @@ -49,8 +49,8 @@ func buildBlacklistedHostnames(hostnames *mgo.Iter, res *database.Resources, err := fillBlacklistedHostname( &blHostname, res.DB.GetSelectedDB(), - res.System.DNSConfig.HostnamesTable, - res.System.StructureConfig.UniqueConnTable, + res.Config.T.DNS.HostnamesTable, + res.Config.T.Structure.UniqueConnTable, ssn, ) if err != nil { diff --git a/analysis/blacklist/ips.go b/analysis/blacklist/ips.go index 63f41e13..396b2c3f 100644 --- a/analysis/blacklist/ips.go +++ b/analysis/blacklist/ips.go @@ -55,11 +55,11 @@ func buildBlacklistedIPs(ips *mgo.Iter, res *database.Resources, var outputCollection *mgo.Collection if source { outputCollection = ssn.DB(res.DB.GetSelectedDB()).C( - res.System.BlacklistedConfig.SourceIPsTable, + res.Config.T.Blacklisted.SourceIPsTable, ) } else { outputCollection = ssn.DB(res.DB.GetSelectedDB()).C( - res.System.BlacklistedConfig.DestIPsTable, + res.Config.T.Blacklisted.DestIPsTable, ) } @@ -82,7 +82,7 @@ func buildBlacklistedIPs(ips *mgo.Iter, res *database.Resources, err := fillBlacklistedIP( &blIP, res.DB.GetSelectedDB(), - res.System.StructureConfig.UniqueConnTable, + res.Config.T.Structure.UniqueConnTable, ssn, source, ) diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 08250513..7a7757d8 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -32,7 +32,7 @@ func buildBlacklistedURLs(urls *mgo.Iter, res *database.Resources, defer ssn.Close() outputCollection := ssn.DB(res.DB.GetSelectedDB()).C( - res.System.BlacklistedConfig.UrlsTable, + res.Config.T.Blacklisted.UrlsTable, ) //create type for communicating rita-bl results resultsChannel := make(resultsChan) @@ -53,8 +53,8 @@ func buildBlacklistedURLs(urls *mgo.Iter, res *database.Resources, &blURL, url, res.DB.GetSelectedDB(), - res.System.UrlsConfig.UrlsTable, - res.System.StructureConfig.UniqueConnTable, + res.Config.T.Urls.UrlsTable, + res.Config.T.Structure.UniqueConnTable, ssn, prefix, ) diff --git a/analysis/crossref/beaconing.go b/analysis/crossref/beaconing.go index 45c45019..697c744c 100644 --- a/analysis/crossref/beaconing.go +++ b/analysis/crossref/beaconing.go @@ -25,7 +25,7 @@ func (s BeaconingSelector) Select(res *database.Resources) (<-chan string, <-cha go func() { ssn := res.DB.Session.Copy() defer ssn.Close() - iter := beacon.GetBeaconResultsView(res, ssn, res.System.CrossrefConfig.BeaconThreshold) + iter := beacon.GetBeaconResultsView(res, ssn, res.Config.S.Crossref.BeaconThreshold) //this will produce duplicates if multiple sources beaconed to the same dest //however, this is accounted for in the finalizing step of xref diff --git a/analysis/crossref/blacklist-dest-ips.go b/analysis/crossref/blacklist-dest-ips.go index d3f1c513..11e379aa 100644 --- a/analysis/crossref/blacklist-dest-ips.go +++ b/analysis/crossref/blacklist-dest-ips.go @@ -29,13 +29,13 @@ func (s BLDestIPSelector) Select(res *database.Resources) (<-chan string, <-chan var blIPs []blacklist.BlacklistedIP ssn.DB(res.DB.GetSelectedDB()). - C(res.System.BlacklistedConfig.DestIPsTable). + C(res.Config.T.Blacklisted.DestIPsTable). Find(nil).All(&blIPs) for _, ip := range blIPs { var connected []structure.UniqueConnection ssn.DB(res.DB.GetSelectedDB()). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"dst": ip.IP}, ).All(&connected) for _, uconn := range connected { diff --git a/analysis/crossref/blacklist-source-ips.go b/analysis/crossref/blacklist-source-ips.go index 0a716872..71b60e21 100644 --- a/analysis/crossref/blacklist-source-ips.go +++ b/analysis/crossref/blacklist-source-ips.go @@ -29,13 +29,13 @@ func (s BLSourceIPSelector) Select(res *database.Resources) (<-chan string, <-ch var blIPs []blacklist.BlacklistedIP ssn.DB(res.DB.GetSelectedDB()). - C(res.System.BlacklistedConfig.SourceIPsTable). + C(res.Config.T.Blacklisted.SourceIPsTable). Find(nil).All(&blIPs) for _, ip := range blIPs { var connected []structure.UniqueConnection ssn.DB(res.DB.GetSelectedDB()). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"src": ip.IP}, ).All(&connected) for _, uconn := range connected { diff --git a/analysis/crossref/crossref.go b/analysis/crossref/crossref.go index 1a35e5fa..b46b6429 100644 --- a/analysis/crossref/crossref.go +++ b/analysis/crossref/crossref.go @@ -23,8 +23,8 @@ func getXRefSelectors() []dataXRef.XRefSelector { // BuildXRefCollection runs threaded crossref analysis func BuildXRefCollection(res *database.Resources) { indexes := []mgo.Index{{Key: []string{"host"}, Unique: true}} - res.DB.CreateCollection(res.System.CrossrefConfig.SourceTable, false, indexes) - res.DB.CreateCollection(res.System.CrossrefConfig.DestTable, false, indexes) + res.DB.CreateCollection(res.Config.T.Crossref.SourceTable, false, indexes) + res.DB.CreateCollection(res.Config.T.Crossref.DestTable, false, indexes) //maps from analysis types to channels of hosts found sources := make(map[string]<-chan string) @@ -40,13 +40,13 @@ func BuildXRefCollection(res *database.Resources) { xRefWG := new(sync.WaitGroup) xRefWG.Add(2) //kick off writes - go multiplexXRef(res, res.System.CrossrefConfig.SourceTable, sources, xRefWG) - go multiplexXRef(res, res.System.CrossrefConfig.DestTable, destinations, xRefWG) + go multiplexXRef(res, res.Config.T.Crossref.SourceTable, sources, xRefWG) + go multiplexXRef(res, res.Config.T.Crossref.DestTable, destinations, xRefWG) xRefWG.Wait() //group by host ip and put module findings into an array - finalizeXRef(res, res.System.CrossrefConfig.SourceTable) - finalizeXRef(res, res.System.CrossrefConfig.DestTable) + finalizeXRef(res, res.Config.T.Crossref.SourceTable) + finalizeXRef(res, res.Config.T.Crossref.DestTable) } //multiplexXRef takes a target colllection, and a map from diff --git a/analysis/crossref/scanning.go b/analysis/crossref/scanning.go index 384641e3..2fc0f6ba 100644 --- a/analysis/crossref/scanning.go +++ b/analysis/crossref/scanning.go @@ -25,7 +25,7 @@ func (s ScanningSelector) Select(res *database.Resources) (<-chan string, <-chan ssn := res.DB.Session.Copy() defer ssn.Close() iter := ssn.DB(res.DB.GetSelectedDB()). - C(res.System.ScanningConfig.ScanTable).Find(nil).Iter() + C(res.Config.T.Scanning.ScanTable).Find(nil).Iter() var data scanning.Scan for iter.Next(&data) { diff --git a/analysis/dns/explodedDNS.go b/analysis/dns/explodedDNS.go index 446b8494..c5ad1347 100644 --- a/analysis/dns/explodedDNS.go +++ b/analysis/dns/explodedDNS.go @@ -25,7 +25,7 @@ func BuildExplodedDNSCollection(res *database.Resources) { // times each super domain was visited func buildExplodedDNSVistedCounts(res *database.Resources) { res.DB.MapReduceCollection( - res.System.StructureConfig.DNSTable, + res.Config.T.Structure.DNSTable, mgo.MapReduce{ Map: getExplodedDNSMapper("query"), Reduce: getExplodedDNSReducer(), @@ -56,7 +56,7 @@ func zipExplodedDNSResults(res *database.Resources) { {Key: []string{"domain"}, Unique: true}, {Key: []string{"subdomains"}}, } - res.DB.CreateCollection(res.System.DNSConfig.ExplodedDNSTable, false, indexes) + res.DB.CreateCollection(res.Config.T.DNS.ExplodedDNSTable, false, indexes) res.DB.AggregateCollection(tempVistedCountCollName, ssn, // nolint: vet []bson.D{ @@ -80,7 +80,7 @@ func zipExplodedDNSResults(res *database.Resources) { }}, }, { - {"$out", res.System.DNSConfig.ExplodedDNSTable}, + {"$out", res.Config.T.DNS.ExplodedDNSTable}, }, }, ) diff --git a/analysis/dns/hostnames.go b/analysis/dns/hostnames.go index 1e6d3d80..3925e96a 100644 --- a/analysis/dns/hostnames.go +++ b/analysis/dns/hostnames.go @@ -16,9 +16,9 @@ const tempHostnamesCollName string = "__temp_hostnames" func BuildHostnamesCollection(res *database.Resources) { sourceCollectionName, tempCollectionName, - pipeline := getHostnamesAggregationScript(res.System) + pipeline := getHostnamesAggregationScript(res.Config) - hostNamesCollection := res.System.DNSConfig.HostnamesTable + hostNamesCollection := res.Config.T.DNS.HostnamesTable ssn := res.DB.Session.Copy() defer ssn.Close() @@ -39,8 +39,8 @@ func BuildHostnamesCollection(res *database.Resources) { //getHostnamesAggregationScript maps dns a type queries to their answers //unfortunately, answers may be other hostnames -func getHostnamesAggregationScript(sysCfg *config.SystemConfig) (string, string, []bson.D) { - sourceCollectionName := sysCfg.StructureConfig.DNSTable +func getHostnamesAggregationScript(conf *config.Config) (string, string, []bson.D) { + sourceCollectionName := conf.T.Structure.DNSTable newCollectionName := tempHostnamesCollName @@ -118,7 +118,7 @@ func GetIPsFromHost(res *database.Resources, host string) []string { ssn := res.DB.Session.Copy() defer ssn.Close() - hostnames := ssn.DB(res.DB.GetSelectedDB()).C(res.System.DNSConfig.HostnamesTable) + hostnames := ssn.DB(res.DB.GetSelectedDB()).C(res.Config.T.DNS.HostnamesTable) var destIPs dnsTypes.Hostname hostnames.Find(bson.M{"host": host}).One(&destIPs) diff --git a/analysis/scanning/scan.go b/analysis/scanning/scan.go index 8843b28c..c36da153 100644 --- a/analysis/scanning/scan.go +++ b/analysis/scanning/scan.go @@ -14,7 +14,7 @@ func BuildScanningCollection(res *database.Resources) { sourceCollectionName, newCollectionName, newCollectionKeys, - pipeline := getScanningCollectionScript(res.System) + pipeline := getScanningCollectionScript(res.Config) // Create it err := res.DB.CreateCollection(newCollectionName, false, newCollectionKeys) @@ -28,15 +28,15 @@ func BuildScanningCollection(res *database.Resources) { res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) } -func getScanningCollectionScript(sysCfg *config.SystemConfig) (string, string, []mgo.Index, []bson.D) { +func getScanningCollectionScript(conf *config.Config) (string, string, []mgo.Index, []bson.D) { // Name of source collection which will be aggregated into the new collection - sourceCollectionName := sysCfg.StructureConfig.ConnTable + sourceCollectionName := conf.T.Structure.ConnTable // Name of the new collection - newCollectionName := sysCfg.ScanningConfig.ScanTable + newCollectionName := conf.T.Scanning.ScanTable // Get scan threshold - scanThresh := sysCfg.ScanningConfig.ScanThreshold + scanThresh := conf.S.Scanning.ScanThreshold // Desired indeces keys := []mgo.Index{ diff --git a/analysis/structure/hosts.go b/analysis/structure/hosts.go index 9b794de2..27b9f57b 100644 --- a/analysis/structure/hosts.go +++ b/analysis/structure/hosts.go @@ -14,7 +14,7 @@ func BuildHostsCollection(res *database.Resources) { sourceCollectionName, newCollectionName, newCollectionKeys, - pipeline := getHosts(res.System) + pipeline := getHosts(res.Config) // Aggregate it! errorCheck := res.DB.CreateCollection(newCollectionName, false, newCollectionKeys) @@ -29,12 +29,12 @@ func BuildHostsCollection(res *database.Resources) { res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) } -func getHosts(sysCfg *config.SystemConfig) (string, string, []mgo.Index, []bson.D) { +func getHosts(conf *config.Config) (string, string, []mgo.Index, []bson.D) { // Name of source collection which will be aggregated into the new collection - sourceCollectionName := sysCfg.StructureConfig.ConnTable + sourceCollectionName := conf.T.Structure.ConnTable // Name of the new collection - newCollectionName := sysCfg.StructureConfig.HostTable + newCollectionName := conf.T.Structure.HostTable // Desired indeces keys := []mgo.Index{ diff --git a/analysis/structure/uconn.go b/analysis/structure/uconn.go index 629b64bb..1e2de276 100644 --- a/analysis/structure/uconn.go +++ b/analysis/structure/uconn.go @@ -14,7 +14,7 @@ func GetConnSourcesFromDest(res *database.Resources, ip string) []string { ssn := res.DB.Session.Copy() defer ssn.Close() - cons := ssn.DB(res.DB.GetSelectedDB()).C(res.System.StructureConfig.UniqueConnTable) + cons := ssn.DB(res.DB.GetSelectedDB()).C(res.Config.T.Structure.UniqueConnTable) srcIter := cons.Find(bson.M{"dst": ip}).Iter() var srcStruct struct { @@ -35,7 +35,7 @@ func BuildUniqueConnectionsCollection(res *database.Resources) { sourceCollectionName, newCollectionName, newCollectionKeys, - pipeline := getUniqueConnectionsScript(res.System) + pipeline := getUniqueConnectionsScript(res.Config) err := res.DB.CreateCollection(newCollectionName, true, newCollectionKeys) if err != nil { @@ -49,12 +49,12 @@ func BuildUniqueConnectionsCollection(res *database.Resources) { res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) } -func getUniqueConnectionsScript(sysCfg *config.SystemConfig) (string, string, []mgo.Index, []bson.D) { +func getUniqueConnectionsScript(conf *config.Config) (string, string, []mgo.Index, []bson.D) { // Name of source collection which will be aggregated into the new collection - sourceCollectionName := sysCfg.StructureConfig.ConnTable + sourceCollectionName := conf.T.Structure.ConnTable // Name of the new collection - newCollectionName := sysCfg.StructureConfig.UniqueConnTable + newCollectionName := conf.T.Structure.UniqueConnTable // Desired Indeces keys := []mgo.Index{ diff --git a/analysis/urls/url.go b/analysis/urls/url.go index c3c110ce..76d5fdf1 100644 --- a/analysis/urls/url.go +++ b/analysis/urls/url.go @@ -15,7 +15,7 @@ func BuildUrlsCollection(res *database.Resources) { newCollectionName, newCollectionKeys, job, - pipeline := getURLCollectionScript(res.System) + pipeline := getURLCollectionScript(res.Config) // Create it err := res.DB.CreateCollection(newCollectionName, false, []mgo.Index{}) @@ -34,17 +34,17 @@ func BuildUrlsCollection(res *database.Resources) { // Aggregate it res.DB.AggregateCollection(newCollectionName, ssn, pipeline) for _, index := range newCollectionKeys { - ssn.DB(res.DB.GetSelectedDB()).C(res.System.UrlsConfig.UrlsTable). + ssn.DB(res.DB.GetSelectedDB()).C(res.Config.T.Urls.UrlsTable). EnsureIndex(index) } } -func getURLCollectionScript(sysCfg *config.SystemConfig) (string, string, []mgo.Index, mgo.MapReduce, []bson.D) { +func getURLCollectionScript(conf *config.Config) (string, string, []mgo.Index, mgo.MapReduce, []bson.D) { // Name of source collection which will be aggregated into the new collection - sourceCollectionName := sysCfg.StructureConfig.HTTPTable + sourceCollectionName := conf.T.Structure.HTTPTable // Name of the new collection - newCollectionName := sysCfg.UrlsConfig.UrlsTable + newCollectionName := conf.T.Urls.UrlsTable // Desired indeces keys := []mgo.Index{ diff --git a/analysis/useragent/useragent.go b/analysis/useragent/useragent.go index 7ae2dcaa..72d3d2b4 100644 --- a/analysis/useragent/useragent.go +++ b/analysis/useragent/useragent.go @@ -14,7 +14,7 @@ func BuildUserAgentCollection(res *database.Resources) { sourceCollectionName, newCollectionName, newCollectionKeys, - pipeline := getUserAgentCollectionScript(res.System) + pipeline := getUserAgentCollectionScript(res.Config) // Create it err := res.DB.CreateCollection(newCollectionName, false, newCollectionKeys) @@ -30,12 +30,12 @@ func BuildUserAgentCollection(res *database.Resources) { res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) } -func getUserAgentCollectionScript(sysCfg *config.SystemConfig) (string, string, []mgo.Index, []bson.D) { +func getUserAgentCollectionScript(conf *config.Config) (string, string, []mgo.Index, []bson.D) { // Name of source collection which will be aggregated into the new collection - sourceCollectionName := sysCfg.StructureConfig.HTTPTable + sourceCollectionName := conf.T.Structure.HTTPTable // Name of the new collection - newCollectionName := sysCfg.UserAgentConfig.UserAgentTable + newCollectionName := conf.T.UserAgent.UserAgentTable // Desired indeces keys := []mgo.Index{ diff --git a/commands/import.go b/commands/import.go index 291d55d3..abb1db2d 100644 --- a/commands/import.go +++ b/commands/import.go @@ -50,19 +50,19 @@ func doImport(c *cli.Context) error { //both flags were set if importDir != "" && databaseName != "" { - res.System.BroConfig.LogPath = importDir - res.System.BroConfig.DBPrefix = "" + res.Config.S.Bro.LogPath = importDir + res.Config.S.Bro.DBPrefix = "" //Clear out the directory map and set the default database - res.System.BroConfig.DirectoryMap = make(map[string]string) - res.System.BroConfig.DefaultDatabase = databaseName + res.Config.S.Bro.DirectoryMap = make(map[string]string) + res.Config.S.Bro.DefaultDatabase = databaseName } - res.Log.Infof("Importing %s\n", res.System.BroConfig.LogPath) - fmt.Println("[+] Importing " + res.System.BroConfig.LogPath) + res.Log.Infof("Importing %s\n", res.Config.S.Bro.LogPath) + fmt.Println("[+] Importing " + res.Config.S.Bro.LogPath) importer := parser.NewFSImporter(res, threads, threads) datastore := parser.NewMongoDatastore(res.DB.Session, res.MetaDB, - res.System.BroConfig.ImportBuffer, res.Log) + res.Config.S.Bro.ImportBuffer, res.Log) importer.Run(datastore) - res.Log.Infof("Finished importing %s\n", res.System.BroConfig.LogPath) + res.Log.Infof("Finished importing %s\n", res.Config.S.Bro.LogPath) return nil } diff --git a/commands/reset-analysis.go b/commands/reset-analysis.go index 36c043ea..d52019e4 100644 --- a/commands/reset-analysis.go +++ b/commands/reset-analysis.go @@ -36,9 +36,9 @@ func init() { func cleanAnalysis(database string, res *database.Resources) error { //clean database - conn := res.System.StructureConfig.ConnTable - http := res.System.StructureConfig.HTTPTable - dns := res.System.StructureConfig.DNSTable + conn := res.Config.T.Structure.ConnTable + http := res.Config.T.Structure.HTTPTable + dns := res.Config.T.Structure.DNSTable names, err := res.DB.Session.DB(database).CollectionNames() if err != nil || len(names) == 0 { diff --git a/commands/show-bl-hostname.go b/commands/show-bl-hostname.go index 7f80c350..25190a06 100644 --- a/commands/show-bl-hostname.go +++ b/commands/show-bl-hostname.go @@ -45,7 +45,7 @@ func printBLHostnames(c *cli.Context) error { var blHosts []blacklist.BlacklistedHostname res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.HostnamesTable). + C(res.Config.T.Blacklisted.HostnamesTable). Find(nil).Sort("-" + sort).All(&blHosts) if len(blHosts) == 0 { @@ -62,7 +62,7 @@ func printBLHostnames(c *cli.Context) error { //then find all of the hosts which talked to the ip var connected []structure.UniqueConnection res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"dst": ip}, ).All(&connected) //and aggregate the source ip addresses diff --git a/commands/show-bl-ip.go b/commands/show-bl-ip.go index 55deb89c..9275cfd6 100644 --- a/commands/show-bl-ip.go +++ b/commands/show-bl-ip.go @@ -72,7 +72,7 @@ func printBLSourceIPs(c *cli.Context) error { var blIPs []blacklist.BlacklistedIP res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.SourceIPsTable). + C(res.Config.T.Blacklisted.SourceIPsTable). Find(nil).Sort("-" + sort).All(&blIPs) if len(blIPs) == 0 { @@ -83,7 +83,7 @@ func printBLSourceIPs(c *cli.Context) error { for i, ip := range blIPs { var connected []structure.UniqueConnection res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"src": ip.IP}, ).All(&connected) for _, uconn := range connected { @@ -115,7 +115,7 @@ func printBLDestIPs(c *cli.Context) error { var blIPs []blacklist.BlacklistedIP res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.DestIPsTable). + C(res.Config.T.Blacklisted.DestIPsTable). Find(nil).Sort("-" + sort).All(&blIPs) if len(blIPs) == 0 { @@ -126,7 +126,7 @@ func printBLDestIPs(c *cli.Context) error { for i, ip := range blIPs { var connected []structure.UniqueConnection res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"dst": ip.IP}, ).All(&connected) for _, uconn := range connected { diff --git a/commands/show-bl-url.go b/commands/show-bl-url.go index c3fc692e..9956823d 100644 --- a/commands/show-bl-url.go +++ b/commands/show-bl-url.go @@ -43,7 +43,7 @@ func printBLURLs(c *cli.Context) error { var blURLs []blacklist.BlacklistedURL res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.UrlsTable). + C(res.Config.T.Blacklisted.UrlsTable). Find(nil).Sort("-" + sort).All(&blURLs) if len(blURLs) == 0 { @@ -55,7 +55,7 @@ func printBLURLs(c *cli.Context) error { for i, blURL := range blURLs { //get the ips associated with the url var urlEntry urls.URL - res.DB.Session.DB(db).C(res.System.UrlsConfig.UrlsTable). + res.DB.Session.DB(db).C(res.Config.T.Urls.UrlsTable). Find(bson.M{"url": blURL.Host, "uri": blURL.Resource}).One(&urlEntry) ips := urlEntry.IPs //and loop over the ips @@ -63,7 +63,7 @@ func printBLURLs(c *cli.Context) error { //then find all of the hosts which talked to the ip var connected []structure.UniqueConnection res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"dst": ip}, ).All(&connected) //and aggregate the source ip addresses diff --git a/commands/show-explodedDns.go b/commands/show-explodedDns.go index 7fe2902e..542ad316 100644 --- a/commands/show-explodedDns.go +++ b/commands/show-explodedDns.go @@ -30,7 +30,7 @@ func init() { res := database.InitResources(c.String("config")) var explodedResults []dns.ExplodedDNS - iter := res.DB.Session.DB(c.String("database")).C(res.System.DNSConfig.ExplodedDNSTable).Find(nil) + iter := res.DB.Session.DB(c.String("database")).C(res.Config.T.DNS.ExplodedDNSTable).Find(nil) iter.Sort("-subdomains").All(&explodedResults) diff --git a/commands/show-long-connections.go b/commands/show-long-connections.go index 831c1c98..611085c3 100644 --- a/commands/show-long-connections.go +++ b/commands/show-long-connections.go @@ -30,7 +30,7 @@ func init() { res := database.InitResources(c.String("config")) var longConns []data.Conn - coll := res.DB.Session.DB(c.String("database")).C(res.System.StructureConfig.ConnTable) + coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.Structure.ConnTable) sortStr := "-duration" diff --git a/commands/show-scans.go b/commands/show-scans.go index a9ef4a81..2be6f9d3 100644 --- a/commands/show-scans.go +++ b/commands/show-scans.go @@ -31,7 +31,7 @@ func init() { res := database.InitResources(c.String("config")) var scans []scanning.Scan - coll := res.DB.Session.DB(c.String("database")).C(res.System.ScanningConfig.ScanTable) + coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.Scanning.ScanTable) coll.Find(nil).All(&scans) if len(scans) == 0 { diff --git a/commands/show-urls.go b/commands/show-urls.go index deb449f4..b6ba7fa3 100644 --- a/commands/show-urls.go +++ b/commands/show-urls.go @@ -30,7 +30,7 @@ func init() { res := database.InitResources(c.String("config")) var urls []urls.URL - coll := res.DB.Session.DB(c.String("database")).C(res.System.UrlsConfig.UrlsTable) + coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.Urls.UrlsTable) coll.Find(nil).Sort("-length").All(&urls) @@ -67,7 +67,7 @@ func init() { res := database.InitResources("") var urls []urls.URL - coll := res.DB.Session.DB(c.String("database")).C(res.System.UrlsConfig.UrlsTable) + coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.Urls.UrlsTable) coll.Find(nil).Sort("-count").All(&urls) diff --git a/commands/show-user-agents.go b/commands/show-user-agents.go index 1a478b09..a9d9c8bb 100644 --- a/commands/show-user-agents.go +++ b/commands/show-user-agents.go @@ -34,7 +34,7 @@ func init() { res := database.InitResources(c.String("config")) var agents []useragent.UserAgent - coll := res.DB.Session.DB(c.String("database")).C(res.System.UserAgentConfig.UserAgentTable) + coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.UserAgent.UserAgentTable) var sortStr string if c.Bool("least-used") { diff --git a/commands/test-config.go b/commands/test-config.go index b23bf48d..d6d9f9fb 100644 --- a/commands/test-config.go +++ b/commands/test-config.go @@ -30,11 +30,18 @@ func init() { func testConfiguration(c *cli.Context) error { res := database.InitResources(c.String("config")) - yml, err := yaml.Marshal(res.System) + staticConfig, err := yaml.Marshal(res.Config.S) if err != nil { return err } - fmt.Fprintf(os.Stdout, "\n%s\n", string(yml)) + tableConfig, err := yaml.Marshal(res.Config.T) + if err != nil { + return err + } + + fmt.Fprintf(os.Stdout, "\n%s\n", string(staticConfig)) + fmt.Fprintf(os.Stdout, "\n%s\n", string(tableConfig)) + return nil } diff --git a/config/config.go b/config/config.go index 62c62772..0a80d3bc 100644 --- a/config/config.go +++ b/config/config.go @@ -2,150 +2,25 @@ package config import ( "fmt" - "io/ioutil" "os" "os/user" "reflect" - "time" - - "github.com/ocmdev/mgosec" - - "gopkg.in/yaml.v2" ) //VERSION is filled at compile time with the git version of RITA var VERSION = "undefined" type ( - //SystemConfig is the container for other config sections - SystemConfig struct { - BatchSize int `yaml:"BatchSize"` - MongoDBConfig MongoDBCfg `yaml:"MongoDB"` - Prefetch float64 `yaml:"Prefetch"` - LogConfig LogCfg `yaml:"LogConfig"` - BlacklistedConfig BlacklistedCfg `yaml:"BlackListed"` - DNSConfig DNSCfg `yaml:"Dns"` - CrossrefConfig CrossrefCfg `yaml:"Crossref"` - ScanningConfig ScanningCfg `yaml:"Scanning"` - StructureConfig StructureCfg `yaml:"Structure"` - BeaconConfig BeaconCfg `yaml:"Beacon"` - UrlsConfig UrlsCfg `yaml:"Urls"` - UserAgentConfig UserAgentCfg `yaml:"UserAgent"` - BroConfig BroCfg `yaml:"Bro"` - MetaTables MetaCfg `yaml:"MetaTables"` - Version string - } - - //MongoDBCfg contains the means for connecting to MongoDB - MongoDBCfg struct { - ConnectionString string `yaml:"ConnectionString"` - SocketTimeout time.Duration `yaml:"SocketTimeout"` - AuthMechanism string `yaml:"AuthenticationMechanism"` - AuthMechanismParsed mgosec.AuthMechanism `yaml:"AuthenticationMechanismParsed,omitempty"` - TLS TLSCfg `yaml:"TLS"` - } - - //TLSCfg contains the means for connecting to MongoDB over TLS - TLSCfg struct { - Enabled bool `yaml:"Enable"` - CAFile string `yaml:"CAFile"` - } - - //LogCfg contains the configuration for logging - LogCfg struct { - LogLevel int `yaml:"LogLevel"` - RitaLogPath string `yaml:"RitaLogPath"` - LogToFile bool `yaml:"LogToFile"` - RitaLogTable string `yaml:"RitaLogTable"` - LogToDB bool `yaml:"LogToDB"` - } - - //StructureCfg contains the names of the base level collections - StructureCfg struct { - ConnTable string `yaml:"ConnectionTable"` - HTTPTable string `yaml:"HttpTable"` - DNSTable string `yaml:"DnsTable"` - UniqueConnTable string `yaml:"UniqueConnectionTable"` - HostTable string `yaml:"HostTable"` - } - - //BlacklistedCfg is used to control the blacklisted analysis module - BlacklistedCfg struct { - BlacklistDatabase string `yaml:"Database"` - UseIPms bool `yaml:"myIP.ms"` - UseDNSBH bool `yaml:"MalwareDomains.com"` - UseMDL bool `yaml:"MalwareDomainList.com"` - SafeBrowsing SafeBrowsingCfg `yaml:"SafeBrowsing"` - IPBlacklists []string `yaml:"CustomIPBlacklists"` - HostnameBlacklists []string `yaml:"CustomHostnameBlacklists"` - URLBlacklists []string `yaml:"CustomURLBlacklists"` - SourceIPsTable string `yaml:"SourceIPsTable"` - DestIPsTable string `yaml:"DestIPsTable"` - HostnamesTable string `yaml:"HostnamesTable"` - UrlsTable string `yaml:"UrlsTable"` - } - - //DNSCfg is used to control the dns analysis module - DNSCfg struct { - ExplodedDNSTable string `yaml:"ExplodedDnsTable"` - HostnamesTable string `yaml:"HostnamesTable"` - } - - //CrossrefCfg is used to control the crossref analysis module - CrossrefCfg struct { - SourceTable string `yaml:"SourceTable"` - DestTable string `yaml:"DestinationTable"` - BeaconThreshold float64 `yaml:"BeaconThreshold"` - } - - //SafeBrowsingCfg contains the details for contacting Google's safebrowsing api - SafeBrowsingCfg struct { - APIKey string `yaml:"APIKey"` - Database string `yaml:"Database"` - } - - //ScanningCfg is used to control the scanning analysis module - ScanningCfg struct { - ScanThreshold int `yaml:"ScanThreshold"` - ScanTable string `yaml:"ScanTable"` - } - - //BeaconCfg is used to control the beaconing analysis module - BeaconCfg struct { - DefaultConnectionThresh int `yaml:"DefaultConnectionThresh"` - BeaconTable string `yaml:"BeaconTable"` - } - - //UrlsCfg is used to control the urls analysis module - UrlsCfg struct { - UrlsTable string `yaml:"UrlsTable"` - } - - //UserAgentCfg is used to control the urls analysis module - UserAgentCfg struct { - UserAgentTable string `yaml:"UserAgentTable"` - } - - //BroCfg controls the file parser - BroCfg struct { - LogPath string `yaml:"LogPath"` - DBPrefix string `yaml:"DBPrefix"` - MetaDB string `yaml:"MetaDB"` - DirectoryMap map[string]string `yaml:"DirectoryMap"` - DefaultDatabase string `yaml:"DefaultDatabase"` - UseDates bool `yaml:"UseDates"` - ImportBuffer int `yaml:"ImportBuffer"` - } - - //MetaCfg contains the meta db collection names - MetaCfg struct { - FilesTable string `yaml:"FilesTable"` - DatabasesTable string `yaml:"DatabasesTable"` + //Config holds the configuration for the running system + Config struct { + R RunningCfg + S StaticCfg + T TableCfg } ) // GetConfig retrieves a configuration in order of precedence -func GetConfig(cfgPath string) (*SystemConfig, bool) { +func GetConfig(cfgPath string) (*Config, error) { if cfgPath != "" { return loadSystemConfig(cfgPath) } @@ -155,11 +30,7 @@ func GetConfig(cfgPath string) (*SystemConfig, bool) { if err != nil { fmt.Fprintf(os.Stderr, "Could not get user info: %s\n", err.Error()) } else { - - conf, ok := loadSystemConfig(user.HomeDir + "/.rita/config.yaml") - if ok { - return conf, ok - } + return loadSystemConfig(user.HomeDir + "/.rita/config.yaml") } // If none of the other configs have worked, go for the global config @@ -167,42 +38,27 @@ func GetConfig(cfgPath string) (*SystemConfig, bool) { } // loadSystemConfig attempts to parse a config file -func loadSystemConfig(cfgPath string) (*SystemConfig, bool) { - var config = new(SystemConfig) - - config.Version = VERSION - - if _, err := os.Stat(cfgPath); !os.IsNotExist(err) { - cfgFile, err := ioutil.ReadFile(cfgPath) - if err != nil { - return config, false - } - err = yaml.Unmarshal(cfgFile, config) - - if err != nil { - fmt.Fprintf(os.Stderr, "Failed to read config: %s\n", err.Error()) - return config, false - } - - // expand env variables, config is a pointer - // so we have to call elem on the reflect value - expandConfig(reflect.ValueOf(config).Elem()) +func loadSystemConfig(cfgPath string) (*Config, error) { + var config = new(Config) + static, err := loadStaticConfig(cfgPath) + if err != nil { + return config, err + } + config.S = *static - //parse out the mongo authentication mechanism - authMechanism, err := mgosec.ParseAuthMechanism( - config.MongoDBConfig.AuthMechanism, - ) - if err != nil { - authMechanism = mgosec.None - fmt.Println("[!] Could not parse MongoDB authentication mechanism") - } - config.MongoDBConfig.AuthMechanismParsed = authMechanism + tables, err := loadTableConfig(cfgPath) + if err != nil { + return config, err + } + config.T = *tables - //set the timeout time in hours - config.MongoDBConfig.SocketTimeout *= time.Hour - return config, true + running, err := loadRunningConfig(static) + if err != nil { + return config, err } - return config, false + config.R = *running + + return config, err } // expandConfig expands environment variables in config strings diff --git a/config/running.go b/config/running.go new file mode 100644 index 00000000..b43908dd --- /dev/null +++ b/config/running.go @@ -0,0 +1,65 @@ +package config + +import ( + "crypto/tls" + "crypto/x509" + "fmt" + "io/ioutil" + + "github.com/ocmdev/mgosec" +) + +type ( + //RunningCfg holds configuration options that are parsed at run time + RunningCfg struct { + MongoDB MongoDBRunningCfg + Version string + } + + //MongoDBRunningCfg holds parsed information for connecting to MongoDB + MongoDBRunningCfg struct { + AuthMechanismParsed mgosec.AuthMechanism + TLS struct { + TLSConfig *tls.Config + } + } +) + +// loadRunningConfig attempts deserializes data in the static config +func loadRunningConfig(config *StaticCfg) (*RunningCfg, error) { + var outConfig = new(RunningCfg) + var err error + + outConfig.Version = VERSION + + //parse the tls configuration + if config.MongoDB.TLS.Enabled { + tlsConf := &tls.Config{} + if !config.MongoDB.TLS.VerifyCertificate { + tlsConf.InsecureSkipVerify = true + } + if len(config.MongoDB.TLS.CAFile) > 0 { + pem, err2 := ioutil.ReadFile(config.MongoDB.TLS.CAFile) + err = err2 + if err != nil { + fmt.Println("[!] Could not read MongoDB CA file") + } else { + tlsConf.RootCAs = x509.NewCertPool() + tlsConf.RootCAs.AppendCertsFromPEM(pem) + } + } + outConfig.MongoDB.TLS.TLSConfig = tlsConf + } + + //parse out the mongo authentication mechanism + authMechanism, err := mgosec.ParseAuthMechanism( + config.MongoDB.AuthMechanism, + ) + if err != nil { + authMechanism = mgosec.None + fmt.Println("[!] Could not parse MongoDB authentication mechanism") + } + outConfig.MongoDB.AuthMechanismParsed = authMechanism + + return outConfig, err +} diff --git a/config/static.go b/config/static.go new file mode 100644 index 00000000..e60627fd --- /dev/null +++ b/config/static.go @@ -0,0 +1,116 @@ +package config + +import ( + "fmt" + "io/ioutil" + "os" + "reflect" + + yaml "gopkg.in/yaml.v2" +) + +type ( + //StaticCfg is the container for other static config sections + StaticCfg struct { + MongoDB MongoDBStaticCfg `yaml:"MongoDB"` + Log LogStaticCfg `yaml:"LogConfig"` + Blacklisted BlacklistedStaticCfg `yaml:"BlackListed"` + Crossref CrossrefStaticCfg `yaml:"Crossref"` + Scanning ScanningStaticCfg `yaml:"Scanning"` + Beacon BeaconStaticCfg `yaml:"Beacon"` + Bro BroStaticCfg `yaml:"Bro"` + } + + //MongoDBStaticCfg contains the means for connecting to MongoDB + MongoDBStaticCfg struct { + ConnectionString string `yaml:"ConnectionString"` + AuthMechanism string `yaml:"AuthenticationMechanism"` + TLS TLSStaticCfg `yaml:"TLS"` + } + + //TLSStaticCfg contains the means for connecting to MongoDB over TLS + TLSStaticCfg struct { + Enabled bool `yaml:"Enable"` + VerifyCertificate bool `yaml:"VerifyCertificate"` + CAFile string `yaml:"CAFile"` + } + + //LogStaticCfg contains the configuration for logging + LogStaticCfg struct { + LogLevel int `yaml:"LogLevel"` + RitaLogPath string `yaml:"RitaLogPath"` + LogToFile bool `yaml:"LogToFile"` + LogToDB bool `yaml:"LogToDB"` + } + + //BlacklistedStaticCfg is used to control the blacklisted analysis module + BlacklistedStaticCfg struct { + BlacklistDatabase string `yaml:"Database"` + UseIPms bool `yaml:"myIP.ms"` + UseDNSBH bool `yaml:"MalwareDomains.com"` + UseMDL bool `yaml:"MalwareDomainList.com"` + SafeBrowsing SafeBrowsingStaticCfg `yaml:"SafeBrowsing"` + IPBlacklists []string `yaml:"CustomIPBlacklists"` + HostnameBlacklists []string `yaml:"CustomHostnameBlacklists"` + URLBlacklists []string `yaml:"CustomURLBlacklists"` + } + + //CrossrefStaticCfg is used to control the crossref analysis module + CrossrefStaticCfg struct { + BeaconThreshold float64 `yaml:"BeaconThreshold"` + } + + //SafeBrowsingStaticCfg contains the details for contacting Google's safebrowsing api + SafeBrowsingStaticCfg struct { + APIKey string `yaml:"APIKey"` + Database string `yaml:"Database"` + } + + //ScanningStaticCfg is used to control the scanning analysis module + ScanningStaticCfg struct { + ScanThreshold int `yaml:"ScanThreshold"` + } + + //BeaconStaticCfg is used to control the beaconing analysis module + BeaconStaticCfg struct { + DefaultConnectionThresh int `yaml:"DefaultConnectionThresh"` + } + + //BroStaticCfg controls the file parser + BroStaticCfg struct { + LogPath string `yaml:"LogPath"` + DBPrefix string `yaml:"DBPrefix"` + MetaDB string `yaml:"MetaDB"` + DirectoryMap map[string]string `yaml:"DirectoryMap"` + DefaultDatabase string `yaml:"DefaultDatabase"` + UseDates bool `yaml:"UseDates"` + ImportBuffer int `yaml:"ImportBuffer"` + } +) + +// loadStaticConfig attempts to parse a config file +func loadStaticConfig(cfgPath string) (*StaticCfg, error) { + var config = new(StaticCfg) + _, err := os.Stat(cfgPath) + + if os.IsNotExist(err) { + return config, err + } + + cfgFile, err := ioutil.ReadFile(cfgPath) + if err != nil { + return config, err + } + err = yaml.Unmarshal(cfgFile, config) + + if err != nil { + fmt.Fprintf(os.Stderr, "Failed to read config: %s\n", err.Error()) + return config, err + } + + // expand env variables, config is a pointer + // so we have to call elem on the reflect value + expandConfig(reflect.ValueOf(config).Elem()) + + return config, nil +} diff --git a/config/tables.go b/config/tables.go new file mode 100644 index 00000000..58e9ad41 --- /dev/null +++ b/config/tables.go @@ -0,0 +1,114 @@ +package config + +import ( + "fmt" + "io/ioutil" + "os" + "reflect" + + yaml "gopkg.in/yaml.v2" +) + +type ( + //TableCfg is the container for other table config sections + TableCfg struct { + Log LogTableCfg `yaml:"LogConfig"` + Blacklisted BlacklistedTableCfg `yaml:"BlackListed"` + DNS DNSTableCfg `yaml:"Dns"` + Crossref CrossrefTableCfg `yaml:"Crossref"` + Scanning ScanningTableCfg `yaml:"Scanning"` + Structure StructureTableCfg `yaml:"Structure"` + Beacon BeaconTableCfg `yaml:"Beacon"` + Urls UrlsTableCfg `yaml:"Urls"` + UserAgent UserAgentTableCfg `yaml:"UserAgent"` + Meta MetaTableCfg `yaml:"MetaTables"` + } + + //LogTableCfg contains the configuration for logging + LogTableCfg struct { + RitaLogTable string `yaml:"RitaLogTable"` + } + + //StructureTableCfg contains the names of the base level collections + StructureTableCfg struct { + ConnTable string `yaml:"ConnectionTable"` + HTTPTable string `yaml:"HttpTable"` + DNSTable string `yaml:"DnsTable"` + UniqueConnTable string `yaml:"UniqueConnectionTable"` + HostTable string `yaml:"HostTable"` + } + + //BlacklistedTableCfg is used to control the blacklisted analysis module + BlacklistedTableCfg struct { + BlacklistDatabase string `yaml:"Database"` + SourceIPsTable string `yaml:"SourceIPsTable"` + DestIPsTable string `yaml:"DestIPsTable"` + HostnamesTable string `yaml:"HostnamesTable"` + UrlsTable string `yaml:"UrlsTable"` + } + + //DNSTableCfg is used to control the dns analysis module + DNSTableCfg struct { + ExplodedDNSTable string `yaml:"ExplodedDnsTable"` + HostnamesTable string `yaml:"HostnamesTable"` + } + + //CrossrefTableCfg is used to control the crossref analysis module + CrossrefTableCfg struct { + SourceTable string `yaml:"SourceTable"` + DestTable string `yaml:"DestinationTable"` + } + + //ScanningTableCfg is used to control the scanning analysis module + ScanningTableCfg struct { + ScanTable string `yaml:"ScanTable"` + } + + //BeaconTableCfg is used to control the beaconing analysis module + BeaconTableCfg struct { + BeaconTable string `yaml:"BeaconTable"` + } + + //UrlsTableCfg is used to control the urls analysis module + UrlsTableCfg struct { + UrlsTable string `yaml:"UrlsTable"` + } + + //UserAgentTableCfg is used to control the urls analysis module + UserAgentTableCfg struct { + UserAgentTable string `yaml:"UserAgentTable"` + } + + //MetaTableCfg contains the meta db collection names + MetaTableCfg struct { + FilesTable string `yaml:"FilesTable"` + DatabasesTable string `yaml:"DatabasesTable"` + } +) + +// loadTableConfig attempts to parse a config file +func loadTableConfig(cfgPath string) (*TableCfg, error) { + var config = new(TableCfg) + _, err := os.Stat(cfgPath) + + if os.IsNotExist(err) { + return config, err + } + + cfgFile, err := ioutil.ReadFile(cfgPath) + if err != nil { + return config, err + } + err = yaml.Unmarshal(cfgFile, config) + + if err != nil { + fmt.Fprintf(os.Stderr, "Failed to read config: %s\n", err.Error()) + return config, err + } + + // expand env variables, config is a pointer + // so we have to call elem on the reflect value + expandConfig(reflect.ValueOf(config).Elem()) + + return config, nil +} diff --git a/database/meta.go b/database/meta.go index 55a6de86..d75f44b8 100644 --- a/database/meta.go +++ b/database/meta.go @@ -38,12 +38,12 @@ func (m *MetaDBHandle) AddNewDB(name string) error { ssn := m.res.DB.Session.Copy() defer ssn.Close() - err := ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable).Insert( + err := ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).Insert( DBMetaInfo{ Name: name, Analyzed: false, - UsingDates: m.res.System.BroConfig.UseDates, - Version: m.res.System.Version, + UsingDates: m.res.Config.S.Bro.UseDates, + Version: m.res.Config.R.Version, }, ) if err != nil { @@ -68,13 +68,13 @@ func (m *MetaDBHandle) DeleteDB(name string) error { //get the record var db DBMetaInfo - err := ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable).Find(bson.M{"name": name}).One(&db) + err := ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).Find(bson.M{"name": name}).One(&db) if err != nil { return err } //delete the record - err = ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable).Remove(bson.M{"name": name}) + err = ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).Remove(bson.M{"name": name}) if err != nil { return err } @@ -83,7 +83,7 @@ func (m *MetaDBHandle) DeleteDB(name string) error { ssn.DB(name).DropDatabase() //delete any parsed file records associated - _, err = ssn.DB(m.DB).C(m.res.System.MetaTables.FilesTable).RemoveAll(bson.M{"database": name}) + _, err = ssn.DB(m.DB).C(m.res.Config.T.Meta.FilesTable).RemoveAll(bson.M{"database": name}) if err != nil { return err } @@ -117,7 +117,7 @@ func (m *MetaDBHandle) MarkDBAnalyzed(name string, complete bool) error { defer ssn.Close() dbr := DBMetaInfo{} - err := ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable). + err := ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable). Find(bson.M{"name": name}).One(&dbr) if err != nil { @@ -128,7 +128,7 @@ func (m *MetaDBHandle) MarkDBAnalyzed(name string, complete bool) error { return err } - err = ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable). + err = ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable). Update(bson.M{"_id": dbr.ID}, bson.M{"$set": bson.M{"analyzed": complete}}) if err != nil { @@ -151,7 +151,7 @@ func (m *MetaDBHandle) GetDBMetaInfo(name string) (DBMetaInfo, error) { ssn := m.res.DB.Session.Copy() defer ssn.Close() var result DBMetaInfo - err := ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable).Find(bson.M{"name": name}).One(&result) + err := ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).Find(bson.M{"name": name}).One(&result) return result, err } @@ -164,7 +164,7 @@ func (m *MetaDBHandle) GetDatabases() []string { ssn := m.res.DB.Session.Copy() defer ssn.Close() - iter := ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable).Find(nil).Iter() + iter := ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).Find(nil).Iter() var results []string var db DBMetaInfo @@ -185,7 +185,7 @@ func (m *MetaDBHandle) GetUnAnalyzedDatabases() []string { var results []string var cur DBMetaInfo - iter := ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable).Find(bson.M{"analyzed": false}).Iter() + iter := ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).Find(bson.M{"analyzed": false}).Iter() for iter.Next(&cur) { results = append(results, cur.Name) } @@ -203,7 +203,7 @@ func (m *MetaDBHandle) GetAnalyzedDatabases() []string { var results []string var cur DBMetaInfo - iter := ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable).Find(bson.M{"analyzed": true}).Iter() + iter := ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).Find(bson.M{"analyzed": true}).Iter() for iter.Next(&cur) { results = append(results, cur.Name) } @@ -227,7 +227,7 @@ func (m *MetaDBHandle) GetFiles() ([]fpt.IndexedFile, error) { ssn := m.res.DB.Session.Copy() defer ssn.Close() - err := ssn.DB(m.DB).C(m.res.System.MetaTables.FilesTable). + err := ssn.DB(m.DB).C(m.res.Config.T.Meta.FilesTable). Find(nil).Iter().All(&toReturn) if err != nil { m.res.Log.WithFields(log.Fields{ @@ -250,7 +250,7 @@ func (m *MetaDBHandle) AddParsedFiles(files []*fpt.IndexedFile) error { ssn := m.res.DB.Session.Copy() defer ssn.Close() - bulk := ssn.DB(m.DB).C(m.res.System.MetaTables.FilesTable).Bulk() + bulk := ssn.DB(m.DB).C(m.res.Config.T.Meta.FilesTable).Bulk() bulk.Unordered() //construct the interface slice for bulk @@ -290,7 +290,7 @@ func (m *MetaDBHandle) isBuilt() bool { } for _, name := range coll { - if name == m.res.System.MetaTables.FilesTable { + if name == m.res.Config.T.Meta.FilesTable { return true } } @@ -325,10 +325,10 @@ func (m *MetaDBHandle) createMetaDB() { Capped: false, } - err := ssn.DB(m.DB).C(m.res.System.LogConfig.RitaLogTable).Create(&myCol) + err := ssn.DB(m.DB).C(m.res.Config.T.Log.RitaLogTable).Create(&myCol) errchk(err) - err = ssn.DB(m.DB).C(m.res.System.MetaTables.FilesTable).Create(&myCol) + err = ssn.DB(m.DB).C(m.res.Config.T.Meta.FilesTable).Create(&myCol) errchk(err) idx := mgo.Index{ @@ -339,11 +339,11 @@ func (m *MetaDBHandle) createMetaDB() { Name: "hashindex", } - err = ssn.DB(m.DB).C(m.res.System.MetaTables.FilesTable).EnsureIndex(idx) + err = ssn.DB(m.DB).C(m.res.Config.T.Meta.FilesTable).EnsureIndex(idx) errchk(err) // Create the database collection - err = ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable).Create(&myCol) + err = ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).Create(&myCol) errchk(err) idx = mgo.Index{ @@ -354,7 +354,7 @@ func (m *MetaDBHandle) createMetaDB() { Name: "nameindex", } - err = ssn.DB(m.DB).C(m.res.System.MetaTables.DatabasesTable).EnsureIndex(idx) + err = ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).EnsureIndex(idx) errchk(err) m.logDebug("newMetaDBHandle", "exiting") diff --git a/database/mock.go b/database/mock.go index 1697c428..7872f400 100644 --- a/database/mock.go +++ b/database/mock.go @@ -11,14 +11,14 @@ import ( // returning a *Resources object which has all of the necessary configuration information func InitMockResources(cfgPath string) *Resources { //TODO: hard code in a test config - conf, ok := config.GetConfig(cfgPath) - if !ok { + conf, err := config.GetConfig(cfgPath) + if err != nil { fmt.Fprintf(os.Stdout, "Failed to config, exiting") - os.Exit(-1) + panic(err) } // Fire up the logging system - log, err := initLog(conf.LogConfig.LogLevel) + log, err := initLog(conf.S.Log.LogLevel) if err != nil { fmt.Printf("Failed to prep logger: %s", err.Error()) os.Exit(-1) @@ -32,7 +32,7 @@ func InitMockResources(cfgPath string) *Resources { r := &Resources{ Log: log, - System: conf, + Config: conf, } // db and resources have cyclic pointers diff --git a/database/resources.go b/database/resources.go index 501e7800..07b669a6 100644 --- a/database/resources.go +++ b/database/resources.go @@ -1,8 +1,6 @@ package database import ( - "crypto/tls" - "crypto/x509" "fmt" "io/ioutil" "os" @@ -23,7 +21,7 @@ import ( type ( // Resources provides a data structure for passing system Resources Resources struct { - System *config.SystemConfig + Config *config.Config Log *log.Logger DB *DB MetaDB *MetaDBHandle @@ -33,24 +31,24 @@ type ( // InitResources grabs the configuration file and intitializes the configuration data // returning a *Resources object which has all of the necessary configuration information func InitResources(cfgPath string) *Resources { - conf, ok := config.GetConfig(cfgPath) - if !ok { + conf, err := config.GetConfig(cfgPath) + if err != nil { fmt.Fprintf(os.Stdout, "Failed to config, exiting") - os.Exit(-1) + panic(err) } // Fire up the logging system - log, err := initLog(conf.LogConfig.LogLevel) + log, err := initLog(conf.S.Log.LogLevel) if err != nil { fmt.Printf("Failed to prep logger: %s", err.Error()) os.Exit(-1) } - if conf.LogConfig.LogToFile { - addFileLogger(log, conf.LogConfig.RitaLogPath) + if conf.S.Log.LogToFile { + addFileLogger(log, conf.S.Log.RitaLogPath) } // Jump into the requested database - session, err := connectToMongoDB(&conf.MongoDBConfig, log) + session, err := connectToMongoDB(&conf.S.MongoDB, &conf.R.MongoDB, log) if err != nil { fmt.Printf("Failed to connect to database: %s", err.Error()) os.Exit(-1) @@ -66,14 +64,14 @@ func InitResources(cfgPath string) *Resources { // Allows code to create and remove tracked databases metaDB := &MetaDBHandle{ - DB: conf.BroConfig.MetaDB, + DB: conf.S.Bro.MetaDB, lock: new(sync.Mutex), } //bundle up the system resources r := &Resources{ Log: log, - System: conf, + Config: conf, } // db and resources have cyclic pointers @@ -90,10 +88,10 @@ func InitResources(cfgPath string) *Resources { } //Begin logging to the metadatabase - if conf.LogConfig.LogToDB { + if conf.S.Log.LogToDB { log.Hooks.Add( mgorus.NewHookerFromSession( - session, conf.BroConfig.MetaDB, conf.LogConfig.RitaLogTable, + session, conf.S.Bro.MetaDB, conf.T.Log.RitaLogTable, ), ) } @@ -101,24 +99,13 @@ func InitResources(cfgPath string) *Resources { } //connectToMongoDB connects to MongoDB possibly with authentication and TLS -func connectToMongoDB(conf *config.MongoDBCfg, logger *log.Logger) (*mgo.Session, error) { - if conf.TLS.Enabled { - tlsConf := &tls.Config{} - if len(conf.TLS.CAFile) > 0 { - pem, err := ioutil.ReadFile(conf.TLS.CAFile) - if err != nil { - logger.WithFields(log.Fields{ - "CAFile": conf.TLS.CAFile, - }).Error(err.Error()) - fmt.Println("[!] Could not read MongoDB CA file") - } else { - tlsConf.RootCAs = x509.NewCertPool() - tlsConf.RootCAs.AppendCertsFromPEM(pem) - } - } - return mgosec.Dial(conf.ConnectionString, conf.AuthMechanismParsed, tlsConf) +func connectToMongoDB(static *config.MongoDBStaticCfg, + running *config.MongoDBRunningCfg, + logger *log.Logger) (*mgo.Session, error) { + if static.TLS.Enabled { + return mgosec.Dial(static.ConnectionString, running.AuthMechanismParsed, running.TLS.TLSConfig) } - return mgosec.DialInsecure(conf.ConnectionString, conf.AuthMechanismParsed) + return mgosec.DialInsecure(static.ConnectionString, running.AuthMechanismParsed) } // initLog creates the logger for logging to stdout and file diff --git a/etc/rita.yaml b/etc/rita.yaml index 64ef9083..c0362897 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -9,6 +9,8 @@ MongoDB: # For encrypting data on the wire between RITA and MongoDB TLS: Enable: false + #If set, RITA will verify the MongoDB certificate's hostname and validity + VerifyCertificate: false #If set, RITA will use the provided CA file instead of the system's CA's CAFile: null diff --git a/parser/fsimporter.go b/parser/fsimporter.go index b5f2baf3..5333c228 100644 --- a/parser/fsimporter.go +++ b/parser/fsimporter.go @@ -13,8 +13,8 @@ import ( "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" fpt "github.com/ocmdev/rita/parser/fileparsetypes" - "github.com/ocmdev/rita/util" "github.com/ocmdev/rita/parser/parsetypes" + "github.com/ocmdev/rita/util" log "github.com/sirupsen/logrus" ) @@ -49,10 +49,10 @@ func (fs *FSImporter) Run(datastore *MongoDatastore) { fmt.Println("\t[-] Finding files to parse") //find all of the bro log paths - files := readDir(fs.res.System.BroConfig.LogPath, fs.res.Log) + files := readDir(fs.res.Config.S.Bro.LogPath, fs.res.Log) //hash the files and get their stats - indexedFiles := indexFiles(files, fs.indexingThreads, fs.res.System, fs.res.Log) + indexedFiles := indexFiles(files, fs.indexingThreads, fs.res.Config, fs.res.Log) progTime := time.Now() fs.res.Log.WithFields( @@ -65,7 +65,7 @@ func (fs *FSImporter) Run(datastore *MongoDatastore) { indexedFiles = removeOldFilesFromIndex(indexedFiles, fs.res.MetaDB, fs.res.Log) parseFiles(indexedFiles, fs.parseThreads, - fs.res.System.BroConfig.UseDates, datastore, fs.res.Log) + fs.res.Config.S.Bro.UseDates, datastore, fs.res.Log) datastore.flush() updateFilesIndex(indexedFiles, fs.res.MetaDB, fs.res.Log) @@ -115,7 +115,7 @@ func readDir(cpath string, logger *log.Logger) []string { //indexFiles takes in a list of bro files, a number of threads, and parses //some metadata out of the files func indexFiles(files []string, indexingThreads int, - cfg *config.SystemConfig, logger *log.Logger) []*fpt.IndexedFile { + cfg *config.Config, logger *log.Logger) []*fpt.IndexedFile { n := len(files) output := make([]*fpt.IndexedFile, n) indexingWG := new(sync.WaitGroup) @@ -124,7 +124,7 @@ func indexFiles(files []string, indexingThreads int, indexingWG.Add(1) go func(files []string, indexedFiles []*fpt.IndexedFile, - sysConf *config.SystemConfig, logger *log.Logger, + sysConf *config.Config, logger *log.Logger, wg *sync.WaitGroup, start int, jump int, length int) { for j := start; j < length; j += jump { @@ -268,6 +268,7 @@ func removeOldFilesFromIndex(indexedFiles []*fpt.IndexedFile, } return toReturn } + //updateFilesIndex updates the files collection in the metaDB with the newly parsed files func updateFilesIndex(indexedFiles []*fpt.IndexedFile, metaDatabase *database.MetaDBHandle, logger *log.Logger) { diff --git a/parser/indexedfile.go b/parser/indexedfile.go index c59f5049..28ef83d5 100644 --- a/parser/indexedfile.go +++ b/parser/indexedfile.go @@ -17,7 +17,7 @@ import ( //newIndexedFile takes in a file path and the bro config and opens up the //file path and parses out some metadata -func newIndexedFile(filePath string, config *config.SystemConfig, +func newIndexedFile(filePath string, config *config.Config, logger *log.Logger) (*fpt.IndexedFile, error) { toReturn := new(fpt.IndexedFile) toReturn.Path = filePath @@ -76,13 +76,13 @@ func newIndexedFile(filePath string, config *config.SystemConfig, return toReturn, errors.New("Could not parse first line of file for time") } - toReturn.TargetCollection = line.TargetCollection(&config.StructureConfig) + toReturn.TargetCollection = line.TargetCollection(&config.T.Structure) if toReturn.TargetCollection == "" { fileHandle.Close() return toReturn, errors.New("Could not find a target collection for file") } - toReturn.TargetDatabase = getTargetDatabase(filePath, &config.BroConfig) + toReturn.TargetDatabase = getTargetDatabase(filePath, &config.S.Bro) if toReturn.TargetDatabase == "" { fileHandle.Close() return toReturn, errors.New("Could not find a dataset for file") @@ -113,7 +113,7 @@ func getFileHash(fileHandle *os.File, fInfo os.FileInfo) (string, error) { //getTargetDatabase assigns a database to a log file based on the path, //and the bro config -func getTargetDatabase(path string, broConfig *config.BroCfg) string { +func getTargetDatabase(path string, broConfig *config.BroStaticCfg) string { // check the directory map for key, val := range broConfig.DirectoryMap { if strings.Contains(path, key) { diff --git a/parser/parsetypes/conn.go b/parser/parsetypes/conn.go index ff9e089c..644977a2 100644 --- a/parser/parsetypes/conn.go +++ b/parser/parsetypes/conn.go @@ -57,7 +57,7 @@ type ( //TargetCollection returns the mongo collection this entry should be inserted //into -func (in *Conn) TargetCollection(config *config.StructureCfg) string { +func (in *Conn) TargetCollection(config *config.StructureTableCfg) string { return config.ConnTable } diff --git a/parser/parsetypes/dns.go b/parser/parsetypes/dns.go index 3b845496..c71f6c92 100644 --- a/parser/parsetypes/dns.go +++ b/parser/parsetypes/dns.go @@ -62,7 +62,7 @@ type DNS struct { //TargetCollection returns the mongo collection this entry should be inserted //into -func (in *DNS) TargetCollection(config *config.StructureCfg) string { +func (in *DNS) TargetCollection(config *config.StructureTableCfg) string { return config.DNSTable } diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index 1190c18b..e5711af1 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -78,7 +78,7 @@ type HTTP struct { //TargetCollection returns the mongo collection this entry should be inserted //into -func (line *HTTP) TargetCollection(config *config.StructureCfg) string { +func (line *HTTP) TargetCollection(config *config.StructureTableCfg) string { return config.HTTPTable } diff --git a/parser/parsetypes/parsetypes.go b/parser/parsetypes/parsetypes.go index d74f1bd4..4ee61826 100644 --- a/parser/parsetypes/parsetypes.go +++ b/parser/parsetypes/parsetypes.go @@ -4,7 +4,7 @@ import "github.com/ocmdev/rita/config" //BroData holds a line of a bro log type BroData interface { - TargetCollection(*config.StructureCfg) string + TargetCollection(*config.StructureTableCfg) string Indices() []string Normalize() } diff --git a/reporting/report-bl-dest-ips.go b/reporting/report-bl-dest-ips.go index 2d28101e..d4136de3 100644 --- a/reporting/report-bl-dest-ips.go +++ b/reporting/report-bl-dest-ips.go @@ -21,13 +21,13 @@ func printBLDestIPs(db string, res *database.Resources) error { var blIPs []blacklist.BlacklistedIP res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.DestIPsTable). + C(res.Config.T.Blacklisted.DestIPsTable). Find(nil).Sort("-conn").All(&blIPs) for i, ip := range blIPs { var connected []structure.UniqueConnection res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"dst": ip.IP}, ).All(&connected) for _, uconn := range connected { diff --git a/reporting/report-bl-hostnames.go b/reporting/report-bl-hostnames.go index 3680e1b7..f6ae0aad 100644 --- a/reporting/report-bl-hostnames.go +++ b/reporting/report-bl-hostnames.go @@ -24,7 +24,7 @@ func printBLHostnames(db string, res *database.Resources) error { var blHosts []blacklist.BlacklistedHostname res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.HostnamesTable). + C(res.Config.T.Blacklisted.HostnamesTable). Find(nil).Sort("-conn").All(&blHosts) //for each blacklisted host @@ -36,7 +36,7 @@ func printBLHostnames(db string, res *database.Resources) error { //then find all of the hosts which talked to the ip var connected []structure.UniqueConnection res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"dst": ip}, ).All(&connected) //and aggregate the source ip addresses diff --git a/reporting/report-bl-source-ips.go b/reporting/report-bl-source-ips.go index 6aa79a16..2ebcd29c 100644 --- a/reporting/report-bl-source-ips.go +++ b/reporting/report-bl-source-ips.go @@ -23,13 +23,13 @@ func printBLSourceIPs(db string, res *database.Resources) error { var blIPs []blacklist.BlacklistedIP res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.SourceIPsTable). + C(res.Config.T.Blacklisted.SourceIPsTable). Find(nil).Sort("-conn").All(&blIPs) for i, ip := range blIPs { var connected []structure.UniqueConnection res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"src": ip.IP}, ).All(&connected) for _, uconn := range connected { diff --git a/reporting/report-bl-urls.go b/reporting/report-bl-urls.go index 532a41d6..b44ab175 100644 --- a/reporting/report-bl-urls.go +++ b/reporting/report-bl-urls.go @@ -24,14 +24,14 @@ func printBLURLs(db string, res *database.Resources) error { var blURLs []blacklist.BlacklistedURL res.DB.Session.DB(db). - C(res.System.BlacklistedConfig.UrlsTable). + C(res.Config.T.Blacklisted.UrlsTable). Find(nil).Sort("-conn").All(&blURLs) //for each blacklisted url for i, blURL := range blURLs { //get the ips associated with the url var urlEntry urls.URL - res.DB.Session.DB(db).C(res.System.UrlsConfig.UrlsTable). + res.DB.Session.DB(db).C(res.Config.T.Urls.UrlsTable). Find(bson.M{"url": blURL.Host, "uri": blURL.Resource}).One(&urlEntry) ips := urlEntry.IPs //and loop over the ips @@ -39,7 +39,7 @@ func printBLURLs(db string, res *database.Resources) error { //then find all of the hosts which talked to the ip var connected []structure.UniqueConnection res.DB.Session.DB(db). - C(res.System.StructureConfig.UniqueConnTable).Find( + C(res.Config.T.Structure.UniqueConnTable).Find( bson.M{"dst": ip}, ).All(&connected) //and aggregate the source ip addresses diff --git a/reporting/report-explodedDns.go b/reporting/report-explodedDns.go index d0ed95cb..2512e5c0 100644 --- a/reporting/report-explodedDns.go +++ b/reporting/report-explodedDns.go @@ -18,7 +18,7 @@ func printDNS(db string, res *database.Resources) error { defer f.Close() var results []dns.ExplodedDNS - iter := res.DB.Session.DB(db).C(res.System.DNSConfig.ExplodedDNSTable).Find(nil) + iter := res.DB.Session.DB(db).C(res.Config.T.DNS.ExplodedDNSTable).Find(nil) iter.Sort("-subdomains").Limit(1000).All(&results) out, err := template.New("dns.html").Parse(templates.DNStempl) diff --git a/reporting/report-long-connections.go b/reporting/report-long-connections.go index 73e65f39..dc261b60 100644 --- a/reporting/report-long-connections.go +++ b/reporting/report-long-connections.go @@ -22,7 +22,7 @@ func printLongConns(db string, res *database.Resources) error { } var conns []data.Conn - coll := res.DB.Session.DB(db).C(res.System.StructureConfig.ConnTable) + coll := res.DB.Session.DB(db).C(res.Config.T.Structure.ConnTable) coll.Find(nil).Sort("-duration").Limit(1000).All(&conns) w, err := getLongConnWriter(conns) diff --git a/reporting/report-scans.go b/reporting/report-scans.go index 4ac181b0..66f1b44d 100644 --- a/reporting/report-scans.go +++ b/reporting/report-scans.go @@ -24,7 +24,7 @@ func printScans(db string, res *database.Resources) error { } var scans []scanning.Scan - coll := res.DB.Session.DB(db).C(res.System.ScanningConfig.ScanTable) + coll := res.DB.Session.DB(db).C(res.Config.T.Scanning.ScanTable) coll.Find(nil).All(&scans) w, err := getScanWriter(scans) diff --git a/reporting/report-urls.go b/reporting/report-urls.go index 9f88d1e1..2c5f2bbe 100644 --- a/reporting/report-urls.go +++ b/reporting/report-urls.go @@ -22,7 +22,7 @@ func printLongURLs(db string, res *database.Resources) error { } var urls []urls.URL - coll := res.DB.Session.DB(db).C(res.System.UrlsConfig.UrlsTable) + coll := res.DB.Session.DB(db).C(res.Config.T.Urls.UrlsTable) coll.Find(nil).Sort("-length").Limit(1000).All(&urls) w, err := getLongURLWriter(urls) diff --git a/reporting/report-useragents.go b/reporting/report-useragents.go index 59553604..c0c6d872 100644 --- a/reporting/report-useragents.go +++ b/reporting/report-useragents.go @@ -22,7 +22,7 @@ func printUserAgents(db string, res *database.Resources) error { } var agents []useragent.UserAgent - coll := res.DB.Session.DB(db).C(res.System.UserAgentConfig.UserAgentTable) + coll := res.DB.Session.DB(db).C(res.Config.T.UserAgent.UserAgentTable) coll.Find(nil).Sort("times_used").Limit(1000).All(&agents) w, err := getUserAgentsWriter(agents) From 1d7f0897a1af8fabd27a6b13d1adb29ce3279a5e Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Wed, 12 Jul 2017 22:12:02 -0600 Subject: [PATCH 037/117] Fix socket timeout --- config/static.go | 12 ++++++++---- database/resources.go | 4 ++-- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/config/static.go b/config/static.go index e60627fd..db696c9f 100644 --- a/config/static.go +++ b/config/static.go @@ -5,7 +5,7 @@ import ( "io/ioutil" "os" "reflect" - + "time" yaml "gopkg.in/yaml.v2" ) @@ -23,9 +23,10 @@ type ( //MongoDBStaticCfg contains the means for connecting to MongoDB MongoDBStaticCfg struct { - ConnectionString string `yaml:"ConnectionString"` - AuthMechanism string `yaml:"AuthenticationMechanism"` - TLS TLSStaticCfg `yaml:"TLS"` + ConnectionString string `yaml:"ConnectionString"` + AuthMechanism string `yaml:"AuthenticationMechanism"` + SocketTimeout time.Duration `yaml:"SocketTimeout"` + TLS TLSStaticCfg `yaml:"TLS"` } //TLSStaticCfg contains the means for connecting to MongoDB over TLS @@ -112,5 +113,8 @@ func loadStaticConfig(cfgPath string) (*StaticCfg, error) { // so we have to call elem on the reflect value expandConfig(reflect.ValueOf(config).Elem()) + // set the socket time out in hours + config.MongoDB.SocketTimeout *= time.Hour + return config, nil } diff --git a/database/resources.go b/database/resources.go index 07b669a6..4bc0bbcb 100644 --- a/database/resources.go +++ b/database/resources.go @@ -53,8 +53,8 @@ func InitResources(cfgPath string) *Resources { fmt.Printf("Failed to connect to database: %s", err.Error()) os.Exit(-1) } - session.SetSocketTimeout(conf.MongoDBConfig.SocketTimeout) - session.SetSyncTimeout(conf.MongoDBConfig.SocketTimeout) + session.SetSocketTimeout(conf.S.MongoDB.SocketTimeout) + session.SetSyncTimeout(conf.S.MongoDB.SocketTimeout) session.SetCursorTimeout(0) // Allows code to interact with the database From 4123e0fba81d5d4caff7de3d9602195a33a6439c Mon Sep 17 00:00:00 2001 From: logan lembke Date: Mon, 10 Jul 2017 13:45:13 -0600 Subject: [PATCH 038/117] Output proper csv --- commands/commands.go | 9 +++++ commands/show-beacons.go | 43 ++++++++++---------- commands/show-bl-hostname.go | 32 +++++++-------- commands/show-bl-ip.go | 36 +++++++++-------- commands/show-bl-url.go | 32 +++++++-------- commands/show-explodedDns.go | 25 ++++-------- commands/show-long-connections.go | 28 ++++++------- commands/show-scans.go | 67 ++++++++++++++++++++++--------- commands/show-urls.go | 26 +++++------- commands/show-user-agents.go | 21 +++------- 10 files changed, 167 insertions(+), 152 deletions(-) diff --git a/commands/commands.go b/commands/commands.go index d8903b35..2ada596f 100644 --- a/commands/commands.go +++ b/commands/commands.go @@ -2,6 +2,7 @@ package commands import ( "runtime" + "strconv" "github.com/urfave/cli" ) @@ -62,3 +63,11 @@ func bootstrapCommands(commands ...cli.Command) { func Commands() []cli.Command { return allCommands } + +//helper functions for formatting floats and integers +func f(f float64) string { + return strconv.FormatFloat(f, 'g', 6, 64) +} +func i(i int64) string { + return strconv.FormatInt(i, 10) +} diff --git a/commands/show-beacons.go b/commands/show-beacons.go index 6477bcf2..ad483c61 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -1,10 +1,8 @@ package commands import ( - "fmt" + "encoding/csv" "os" - "strconv" - "text/template" "github.com/ocmdev/rita/analysis/beacon" "github.com/ocmdev/rita/database" @@ -66,39 +64,40 @@ func showBeaconReport(data []beaconData.BeaconAnalysisView) error { "Top Size", "Top Intvl Count", "Top Size Count", "Intvl Skew", "Size Skew", "Intvl Dispersion", "Size Dispersion", "Intvl Duration"}) - f := func(f float64) string { - return strconv.FormatFloat(f, 'g', 6, 64) - } - i := func(i int64) string { - return strconv.FormatInt(i, 10) - } for _, d := range data { table.Append( []string{ f(d.Score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), i(d.TS_iRange), i(d.DS_range), i(d.TS_iMode), i(d.DS_mode), i(d.TS_iModeCount), i(d.DS_modeCount), f(d.TS_iSkew), f(d.DS_skew), - i(d.TS_iDispersion), i(d.DS_dispersion), f(d.TS_duration)}) + i(d.TS_iDispersion), i(d.DS_dispersion), f(d.TS_duration), + }, + ) } table.Render() return nil } func showBeaconCsv(data []beaconData.BeaconAnalysisView) error { - tmpl := "{{.Score}},{{.Src}},{{.Dst}},{{.Connections}},{{.AvgBytes}}," - tmpl += "{{.TS_iRange}},{{.DS_range}},{{.TS_iMode}},{{.DS_mode}},{{.TS_iModeCount}}," - tmpl += "{{.DS_modeCount}},{{.TS_iSkew}},{{.DS_skew}},{{.TS_iDispersion}}," - tmpl += "{{.DS_dispersion}},{{.TS_duration}}\n" - - out, err := template.New("beacon").Parse(tmpl) - if err != nil { - return err + csvWriter := csv.NewWriter(os.Stdout) + headers := []string{ + "Score", "Source", "Destination", "Connections", + "Avg Bytes", "TS Range", "DS Range", "TS Mode", "DS Mode", "TS Mode Count", + "DS Mode Count", "TS Skew", "DS Skew", "TS Dispersion", "DS Dispersion", + "TS Duration", } + csvWriter.Write(headers) + for _, d := range data { - err := out.Execute(os.Stdout, d) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - } + csvWriter.Write( + []string{ + f(d.Score), d.Src, d.Dst, i(d.Connections), f(d.AvgBytes), + i(d.TS_iRange), i(d.DS_range), i(d.TS_iMode), i(d.DS_mode), + i(d.TS_iModeCount), i(d.DS_modeCount), f(d.TS_iSkew), f(d.DS_skew), + i(d.TS_iDispersion), i(d.DS_dispersion), f(d.TS_duration), + }, + ) } + csvWriter.Flush() return nil } diff --git a/commands/show-bl-hostname.go b/commands/show-bl-hostname.go index 7f80c350..da611e6c 100644 --- a/commands/show-bl-hostname.go +++ b/commands/show-bl-hostname.go @@ -1,8 +1,7 @@ package commands import ( - "fmt" - "html/template" + "encoding/csv" "os" "sort" "strconv" @@ -89,28 +88,29 @@ func printBLHostnames(c *cli.Context) error { } func showBLHostnames(hostnames []blacklist.BlacklistedHostname, connectedHosts bool) error { - tmpl := "{{.Hostname}},{{.Connections}},{{.UniqueConnections}},{{.TotalBytes}}," - tmpl += blacklistListsTemplate + csvWriter := csv.NewWriter(os.Stdout) + headers := []string{"Hostname", "Connections", "Unique Connections", "Total Bytes", "Lists"} if connectedHosts { - tmpl += ",{{range $idx, $host := .ConnectedHosts}}{{if $idx}} {{end}}{{ $host }}{{end}}" - } - tmpl += endl - - out, err := template.New("blhostname").Parse(tmpl) - if err != nil { - return err + headers = append(headers, "Sources") } - + csvWriter.Write(headers) for _, hostname := range hostnames { sort.Strings(hostname.Lists) + serialized := []string{ + hostname.Hostname, + strconv.Itoa(hostname.Connections), + strconv.Itoa(hostname.UniqueConnections), + strconv.Itoa(hostname.TotalBytes), + strings.Join(hostname.Lists, " "), + } if connectedHosts { sort.Strings(hostname.ConnectedHosts) + serialized = append(serialized, strings.Join(hostname.ConnectedHosts, " ")) } - err := out.Execute(os.Stdout, hostname) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - } + csvWriter.Write(serialized) } + csvWriter.Flush() + return nil } diff --git a/commands/show-bl-ip.go b/commands/show-bl-ip.go index 55deb89c..af547cf8 100644 --- a/commands/show-bl-ip.go +++ b/commands/show-bl-ip.go @@ -1,8 +1,7 @@ package commands import ( - "fmt" - "html/template" + "encoding/csv" "os" "sort" "strconv" @@ -150,29 +149,32 @@ func printBLDestIPs(c *cli.Context) error { } func showBLIPs(ips []blacklist.BlacklistedIP, connectedHosts, source bool) error { - //source is unused until we add column headers - tmpl := "{{.IP}},{{.Connections}},{{.UniqueConnections}},{{.TotalBytes}}," - tmpl += blacklistListsTemplate + csvWriter := csv.NewWriter(os.Stdout) + headers := []string{"IP", "Connections", "Unique Connections", "Total Bytes", "Lists"} if connectedHosts { - tmpl += ",{{range $idx, $host := .ConnectedHosts}}{{if $idx}} {{end}}{{ $host }}{{end}}" - } - tmpl += endl - - out, err := template.New("blip").Parse(tmpl) - if err != nil { - return err + if source { + headers = append(headers, "Destinations") + } else { + headers = append(headers, "Sources") + } } - + csvWriter.Write(headers) for _, ip := range ips { sort.Strings(ip.Lists) + serialized := []string{ + ip.IP, + strconv.Itoa(ip.Connections), + strconv.Itoa(ip.UniqueConnections), + strconv.Itoa(ip.TotalBytes), + strings.Join(ip.Lists, " "), + } if connectedHosts { sort.Strings(ip.ConnectedHosts) + serialized = append(serialized, strings.Join(ip.ConnectedHosts, " ")) } - err := out.Execute(os.Stdout, ip) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - } + csvWriter.Write(serialized) } + csvWriter.Flush() return nil } diff --git a/commands/show-bl-url.go b/commands/show-bl-url.go index c3fc692e..00a15181 100644 --- a/commands/show-bl-url.go +++ b/commands/show-bl-url.go @@ -1,8 +1,7 @@ package commands import ( - "fmt" - "html/template" + "encoding/csv" "os" "sort" "strconv" @@ -89,28 +88,29 @@ func printBLURLs(c *cli.Context) error { } func showBLURLs(urls []blacklist.BlacklistedURL, connectedHosts bool) error { - tmpl := "{{.Host}},{{.Resource}},{{.Connections}},{{.UniqueConnections}},{{.TotalBytes}}," - tmpl += blacklistListsTemplate + csvWriter := csv.NewWriter(os.Stdout) + headers := []string{"Host", "Resource", "Connections", "Unique Connections", "Total Bytes", "Lists"} if connectedHosts { - tmpl += ",{{range $idx, $url := .ConnectedHosts}}{{if $idx}} {{end}}{{ $url }}{{end}}" - } - tmpl += endl - - out, err := template.New("blurl").Parse(tmpl) - if err != nil { - return err + headers = append(headers, "Sources") } - + csvWriter.Write(headers) for _, url := range urls { sort.Strings(url.Lists) + serialized := []string{ + url.Host, + url.Resource, + strconv.Itoa(url.Connections), + strconv.Itoa(url.UniqueConnections), + strconv.Itoa(url.TotalBytes), + strings.Join(url.Lists, " "), + } if connectedHosts { sort.Strings(url.ConnectedHosts) + serialized = append(serialized, strings.Join(url.ConnectedHosts, " ")) } - err := out.Execute(os.Stdout, url) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - } + csvWriter.Write(serialized) } + csvWriter.Flush() return nil } diff --git a/commands/show-explodedDns.go b/commands/show-explodedDns.go index 7fe2902e..9c13d958 100644 --- a/commands/show-explodedDns.go +++ b/commands/show-explodedDns.go @@ -1,10 +1,8 @@ package commands import ( - "fmt" + "encoding/csv" "os" - "strconv" - "text/template" "github.com/ocmdev/rita/database" "github.com/ocmdev/rita/datatypes/dns" @@ -55,19 +53,14 @@ func init() { } func showDNSResults(dnsResults []dns.ExplodedDNS) error { - tmpl := "{{.Domain}},{{.Subdomains}},{{.Visited}}\n" - - out, err := template.New("exploded-dns").Parse(tmpl) - if err != nil { - return err - } - + csvWriter := csv.NewWriter(os.Stdout) + csvWriter.Write([]string{"Domain", "Unique Subdomains", "Times Looked Up"}) for _, result := range dnsResults { - err := out.Execute(os.Stdout, result) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - } + csvWriter.Write([]string{ + result.Domain, i(result.Subdomains), i(result.Visited), + }) } + csvWriter.Flush() return nil } @@ -76,9 +69,7 @@ func showDNSResultsHuman(dnsResults []dns.ExplodedDNS) error { table.SetHeader([]string{"Domain", "Unique Subdomains", "Times Looked Up"}) for _, result := range dnsResults { table.Append([]string{ - result.Domain, - strconv.FormatInt(result.Subdomains, 10), - strconv.FormatInt(result.Visited, 10), + result.Domain, i(result.Subdomains), i(result.Visited), }) } table.Render() diff --git a/commands/show-long-connections.go b/commands/show-long-connections.go index 831c1c98..e319d7d9 100644 --- a/commands/show-long-connections.go +++ b/commands/show-long-connections.go @@ -1,10 +1,9 @@ package commands import ( - "fmt" + "encoding/csv" "os" "strconv" - "text/template" "github.com/ocmdev/rita/database" "github.com/ocmdev/rita/datatypes/data" @@ -57,19 +56,20 @@ func init() { } func showConns(connResults []data.Conn) error { - tmpl := "{{.Src}},{{.Spt}},{{.Dst}},{{.Dpt}},{{.Dur}},{{.Proto}}\n" - - out, err := template.New("Conn").Parse(tmpl) - if err != nil { - return err - } - + csvWriter := csv.NewWriter(os.Stdout) + csvWriter.Write([]string{"Source IP", "Source Port", "Destination IP", + "Destination Port", "Duration", "Protocol"}) for _, result := range connResults { - err := out.Execute(os.Stdout, result) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - } + csvWriter.Write([]string{ + result.Src, + strconv.Itoa(result.Spt), + result.Dst, + strconv.Itoa(result.Dpt), + f(result.Dur), + result.Proto, + }) } + csvWriter.Flush() return nil } @@ -83,7 +83,7 @@ func showConnsHuman(connResults []data.Conn) error { strconv.Itoa(result.Spt), result.Dst, strconv.Itoa(result.Dpt), - strconv.FormatFloat(result.Dur, 'f', 2, 64), + f(result.Dur), result.Proto, }) } diff --git a/commands/show-scans.go b/commands/show-scans.go index a9ef4a81..8d34951b 100644 --- a/commands/show-scans.go +++ b/commands/show-scans.go @@ -1,11 +1,9 @@ package commands import ( - "fmt" + "encoding/csv" "os" - "sort" "strconv" - "text/template" "github.com/ocmdev/rita/database" "github.com/ocmdev/rita/datatypes/scanning" @@ -22,11 +20,16 @@ func init() { humanFlag, databaseFlag, configFlag, + cli.BoolFlag{ + Name: "ports, P", + Usage: "show which individual ports were scanned", + }, }, Action: func(c *cli.Context) error { if c.String("database") == "" { return cli.NewExitError("Specify a database with -d", -1) } + showPorts := c.Bool("ports") res := database.InitResources(c.String("config")) @@ -39,12 +42,12 @@ func init() { } if c.Bool("human-readable") { - err := showScansHuman(scans) + err := showScansHuman(scans, showPorts) if err != nil { return cli.NewExitError(err.Error(), -1) } } - err := showScans(scans) + err := showScans(scans, showPorts) if err != nil { return cli.NewExitError(err.Error(), -1) } @@ -54,30 +57,56 @@ func init() { bootstrapCommands(command) } -func showScans(scans []scanning.Scan) error { - tmpl := "{{.Src}},{{.Dst}},{{.PortCount}},{{range $idx, $port := .PortSet}}{{if $idx}} {{end}}{{ $port }}{{end}}\r\n" - - out, err := template.New("scn").Parse(tmpl) - if err != nil { - return err +func showScans(scans []scanning.Scan, showPorts bool) error { + csvWriter := csv.NewWriter(os.Stdout) + header := []string{"Source", "Destination", "Ports Scanned"} + if showPorts { + header = append(header, "Ports") } - + csvWriter.Write(header) for _, scan := range scans { - sort.Ints(scan.PortSet) - err := out.Execute(os.Stdout, scan) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) + data := []string{scan.Src, scan.Dst, strconv.Itoa(scan.PortCount)} + + if showPorts { + portSet := make([]byte, scan.PortCount*3) + for i, port := range scan.PortSet { + if i != 0 { + strconv.AppendQuote(portSet, " ") + } + strconv.AppendInt(portSet, int64(port), 10) + } + data = append(data, string(portSet)) } + + csvWriter.Write(data) } + csvWriter.Flush() return nil } // showScans prints all scans for a given database -func showScansHuman(scans []scanning.Scan) error { +func showScansHuman(scans []scanning.Scan, showPorts bool) error { table := tablewriter.NewWriter(os.Stdout) - table.SetHeader([]string{"Source", "Destination", "Ports Scanned"}) + header := []string{"Source", "Destination", "Ports Scanned"} + if showPorts { + header = append(header, "Ports") + } + table.SetHeader(header) for _, scan := range scans { - table.Append([]string{scan.Src, scan.Dst, strconv.Itoa(scan.PortCount)}) + data := []string{scan.Src, scan.Dst, strconv.Itoa(scan.PortCount)} + + if showPorts { + portSet := make([]byte, scan.PortCount*3) + for i, port := range scan.PortSet { + if i != 0 { + strconv.AppendQuote(portSet, " ") + } + strconv.AppendInt(portSet, int64(port), 10) + } + data = append(data, string(portSet)) + } + + table.Append(data) } table.Render() return nil diff --git a/commands/show-urls.go b/commands/show-urls.go index deb449f4..5bb68186 100644 --- a/commands/show-urls.go +++ b/commands/show-urls.go @@ -1,10 +1,9 @@ package commands import ( - "fmt" + "encoding/csv" "os" - "strconv" - "text/template" + "strings" "github.com/ocmdev/rita/database" "github.com/ocmdev/rita/datatypes/urls" @@ -92,26 +91,21 @@ func init() { } func showURLs(urls []urls.URL) error { - tmpl := "{{.URL}},{{.URI}},{{.Length}},{{.Count}},{{range $idx, $ip := .IPs}}{{if $idx}} {{end}}{{ $ip }}{{end}}\n" - - out, err := template.New("urls").Parse(tmpl) - if err != nil { - return err - } - + csvWriter := csv.NewWriter(os.Stdout) + csvWriter.Write([]string{"URL", "URI", "Length", "Times Visted", "IPs"}) for _, url := range urls { - err := out.Execute(os.Stdout, url) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - } + csvWriter.Write([]string{ + url.URL, url.URI, i(url.Length), i(url.Count), strings.Join(url.IPs, " "), + }) } + csvWriter.Flush() return nil } func showURLsHuman(urls []urls.URL) error { table := tablewriter.NewWriter(os.Stdout) table.SetColWidth(50) - table.SetHeader([]string{"URL", "URI", "Length", "Times Visted"}) + table.SetHeader([]string{"URL", "URI", "Length", "Times Visted", "IPs"}) for _, url := range urls { if len(url.URL) > 50 { url.URL = url.URL[0:47] + "..." @@ -120,7 +114,7 @@ func showURLsHuman(urls []urls.URL) error { url.URI = url.URI[0:47] + "..." } table.Append([]string{ - url.URL, url.URI, strconv.FormatInt(url.Length, 10), strconv.FormatInt(url.Count, 10), + url.URL, url.URI, i(url.Length), i(url.Count), strings.Join(url.IPs, " "), }) } table.Render() diff --git a/commands/show-user-agents.go b/commands/show-user-agents.go index 1a478b09..917c1508 100644 --- a/commands/show-user-agents.go +++ b/commands/show-user-agents.go @@ -1,10 +1,8 @@ package commands import ( - "fmt" + "encoding/csv" "os" - "strconv" - "text/template" "github.com/ocmdev/rita/database" "github.com/ocmdev/rita/datatypes/useragent" @@ -66,19 +64,12 @@ func init() { } func showAgents(agents []useragent.UserAgent) error { - tmpl := "{{.UserAgent}},{{.TimesUsed}}\n" - - out, err := template.New("ua").Parse(tmpl) - if err != nil { - return err - } - + csvWriter := csv.NewWriter(os.Stdout) + csvWriter.Write([]string{"User Agent", "Times Used"}) for _, agent := range agents { - err := out.Execute(os.Stdout, agent) - if err != nil { - fmt.Fprintf(os.Stdout, "ERROR: Template failure: %s\n", err.Error()) - } + csvWriter.Write([]string{agent.UserAgent, i(agent.TimesUsed)}) } + csvWriter.Flush() return nil } @@ -87,7 +78,7 @@ func showAgentsHuman(agents []useragent.UserAgent) error { table.SetColWidth(100) table.SetHeader([]string{"User Agent", "Times Used"}) for _, agent := range agents { - table.Append([]string{agent.UserAgent, strconv.FormatInt(agent.TimesUsed, 10)}) + table.Append([]string{agent.UserAgent, i(agent.TimesUsed)}) } table.Render() return nil From 2e2c8f8b2ea7d8609a3cbf2f60edd95c9beafdcc Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Wed, 19 Jul 2017 23:39:11 -0500 Subject: [PATCH 039/117] Revised comments around UseDates to reflect timezone behavior. --- etc/rita.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/etc/rita.yaml b/etc/rita.yaml index 64ef9083..2a8548f5 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -55,11 +55,11 @@ Bro: # separation is handled by the metadatabase. MetaDB: MetaDatabase - # If use dates is true the logs will be split into databases by date, this is - # best for if you have multiple days worth of log files in the logpath and wish - # to treat each day as a separate test. 24hours worth of data is the ideal for - # analysis, and using dates will ensure that tests are broken into 24hour periods - # around midnight of each day. + # If use dates is true the logs will be split into databases by date using the + # current system's timezone. This is best for if you have multiple days worth + # of log files in the logpath and wish to treat each day as a separate test. + # 24 hours worth of data is the ideal for analysis, and using dates will ensure + # that tests are broken into 24 hour periods on midnight in the current timezone. UseDates: false # The number of records shipped off to MongoDB at a time. Increasing From fbe2ce342c046b7ffc6b4d21293bd99beef5df54 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Mon, 24 Jul 2017 13:14:51 -0600 Subject: [PATCH 040/117] Point readme towards wiki for docker --- Readme.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Readme.md b/Readme.md index 9728899c..1a1336b9 100644 --- a/Readme.md +++ b/Readme.md @@ -27,6 +27,9 @@ Additional functionality is being developed and will be included soon. * Source your .bashrc (the installer added RITA to the PATH): `source ~/.bashrc` * Start MongoDB: `sudo service mongod start` +### Docker Installation +RITA is available as a Docker image at ocmdev/rita, [check out the instructions in the wiki](https://github.com/ocmdev/rita/wiki/Docker-Installation). + ### Manual Installation To install each component of RITA by hand, [check out the instructions in the wiki](https://github.com/ocmdev/rita/wiki/Installation). From a361acef1911a034b7295fb4e4131256c433d8ff Mon Sep 17 00:00:00 2001 From: logan lembke Date: Wed, 26 Jul 2017 15:42:04 -0600 Subject: [PATCH 041/117] Split time and data scores in beaconing, rename some vars --- analysis/beacon/beacon.go | 154 +++++++++++++++------------- analysis/beacon/beacon_test.go | 2 +- analysis/beacon/beacon_test_data.go | 33 ++++-- datatypes/beacon/beacon.go | 56 +++++----- 4 files changed, 138 insertions(+), 107 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 1a0df8d5..5d2d3e74 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -227,121 +227,133 @@ func (t *Beacon) analyze() { //store the diff slice length since we use it a lot //for timestamps this is one less then the data slice length //since we are calculating the times in between readings - length := len(data.ts) - 1 - ds_length := len(data.orig_ip_bytes) + tsLength := len(data.ts) - 1 + dsLength := len(data.orig_ip_bytes) //find the duration of this connection //perfect beacons should fill the observation period - duration := float64(data.ts[length]-data.ts[0]) / + duration := float64(data.ts[tsLength]-data.ts[0]) / float64(t.maxTime-t.minTime) //find the delta times between the timestamps - diff := make([]int64, length) - for i := 0; i < length; i++ { + diff := make([]int64, tsLength) + for i := 0; i < tsLength; i++ { diff[i] = data.ts[i+1] - data.ts[i] } //perfect beacons should have symmetric delta time and size distributions //Bowley's measure of skew is used to check symmetry sort.Sort(util.SortableInt64(diff)) - bSkew := float64(0) - ds_skew := float64(0) - - //length -1 is used since diff is a zero based slice - low := diff[util.Round(.25*float64(length-1))] - mid := diff[util.Round(.5*float64(length-1))] - high := diff[util.Round(.75*float64(length-1))] - bNum := low + high - 2*mid - bDen := high - low - - ds_low := data.orig_ip_bytes[util.Round(.25*float64(ds_length-1))] - ds_mid := data.orig_ip_bytes[util.Round(.5*float64(ds_length-1))] - ds_high := data.orig_ip_bytes[util.Round(.75*float64(ds_length-1))] - ds_bNum := ds_low + ds_high - 2*ds_mid - ds_bDen := ds_high - ds_low - - //bSkew should equal zero if the denominator equals zero + tsSkew := float64(0) + dsSkew := float64(0) + + //tsLength -1 is used since diff is a zero based slice + tsLow := diff[util.Round(.25*float64(tsLength-1))] + tsMid := diff[util.Round(.5*float64(tsLength-1))] + tsHigh := diff[util.Round(.75*float64(tsLength-1))] + tsBowleyNum := tsLow + tsHigh - 2*tsMid + tsBowleyDen := tsHigh - tsLow + + //we do the same for datasizes + dsLow := data.orig_ip_bytes[util.Round(.25*float64(dsLength-1))] + dsMid := data.orig_ip_bytes[util.Round(.5*float64(dsLength-1))] + dsHigh := data.orig_ip_bytes[util.Round(.75*float64(dsLength-1))] + dsBowleyNum := dsLow + dsHigh - 2*dsMid + dsBowleyDen := dsHigh - dsLow + + //tsSkew should equal zero if the denominator equals zero //bowley skew is unreliable if Q2 = Q1 or Q2 = Q3 - if bDen != 0 && mid != low && mid != high { - bSkew = float64(bNum) / float64(bDen) + if tsBowleyDen != 0 && tsMid != tsLow && tsMid != tsHigh { + tsSkew = float64(tsBowleyNum) / float64(tsBowleyDen) } - if ds_bDen != 0 && ds_mid != ds_low && ds_mid != ds_high { - ds_skew = float64(ds_bNum) / float64(ds_bDen) + if dsBowleyDen != 0 && dsMid != dsLow && dsMid != dsHigh { + dsSkew = float64(dsBowleyNum) / float64(dsBowleyDen) } //perfect beacons should have very low dispersion around the //median of their delta times //Median Absolute Deviation About the Median //is used to check dispersion - devs := make([]int64, length) - for i := 0; i < length; i++ { - devs[i] = util.Abs(diff[i] - mid) + devs := make([]int64, tsLength) + for i := 0; i < tsLength; i++ { + devs[i] = util.Abs(diff[i] - tsMid) } - ds_devs := make([]int64, ds_length) - for i := 0; i < ds_length; i++ { - ds_devs[i] = util.Abs(data.orig_ip_bytes[i] - ds_mid) + ds_devs := make([]int64, dsLength) + for i := 0; i < dsLength; i++ { + ds_devs[i] = util.Abs(data.orig_ip_bytes[i] - dsMid) } sort.Sort(util.SortableInt64(devs)) sort.Sort(util.SortableInt64(ds_devs)) - madm := devs[util.Round(.5*float64(length-1))] - ds_madm := ds_devs[util.Round(.5*float64(ds_length-1))] + tsMadm := devs[util.Round(.5*float64(tsLength-1))] + dsMadm := ds_devs[util.Round(.5*float64(dsLength-1))] //Store the range for human analysis - iRange := diff[length-1] - diff[0] - ds_range := data.orig_ip_bytes[ds_length-1] - data.orig_ip_bytes[0] + tsIntervalRange := diff[tsLength-1] - diff[0] + dsRange := data.orig_ip_bytes[dsLength-1] - data.orig_ip_bytes[0] //get a list of the intervals found in the data, //the number of times the interval was found, //and the most occurring interval - intervals, intervalCounts, mode, modeCount := createCountMap(diff) - ds_sizes, ds_counts, ds_mode, ds_modeCount := createCountMap(data.orig_ip_bytes) - - output := dataBeacon.BeaconAnalysisOutput{ - UconnID: data.uconnID, - TS_iSkew: bSkew, - TS_iDispersion: madm, - TS_duration: duration, - TS_iRange: iRange, - TS_iMode: mode, - TS_iModeCount: modeCount, - TS_intervals: intervals, - TS_intervalCounts: intervalCounts, - DS_skew: ds_skew, - DS_dispersion: ds_madm, - DS_range: ds_range, - DS_sizes: ds_sizes, - DS_counts: ds_counts, - DS_mode: ds_mode, - DS_modeCount: ds_modeCount, - } + intervals, intervalCounts, tsMode, tsModeCount := createCountMap(diff) + dsSizes, dsCounts, dsMode, dsModeCount := createCountMap(data.orig_ip_bytes) //more skewed distributions recieve a lower score //less skewed distributions recieve a higher score - alpha := 1.0 - math.Abs(bSkew) - delta := 1.0 - math.Abs(ds_skew) + tsSkewScore := 1.0 - math.Abs(tsSkew) //smush tsSkew + dsSkewScore := 1.0 - math.Abs(dsSkew) //smush dsSkew //lower dispersion is better, cutoff dispersion scores at 30 seconds - beta := 1.0 - float64(madm)/30.0 - if beta < 0 { - beta = 0 + tsMadmScore := 1.0 - float64(tsMadm)/30.0 + if tsMadmScore < 0 { + tsMadmScore = 0 } - //no cutoff dispersion for data size - epsilon := 1.0 - float64(ds_madm) - if epsilon < 0 { - epsilon = 0 + + //lower dispersion is better, cutoff dispersion scores at 32 bytes + dsMadmScore := 1.0 - float64(dsMadm)/32.0 + if dsMadmScore < 0 { + dsMadmScore = 0 } - gamma := duration + tsDurationScore := duration + //smaller data sizes receive a higher score - zeta := 1.0 - (float64(ds_mode) / 65535.0) + dsSmallnessScore := 1.0 - (float64(dsMode) / 65535.0) + if dsSmallnessScore < 0 { + dsSmallnessScore = 0 + } + + output := dataBeacon.BeaconAnalysisOutput{ + UconnID: data.uconnID, + TS_iSkew: tsSkew, + TS_iDispersion: tsMadm, + TS_duration: duration, + TS_iRange: tsIntervalRange, + TS_iMode: tsMode, + TS_iModeCount: tsModeCount, + TS_intervals: intervals, + TS_intervalCounts: intervalCounts, + DS_skew: dsSkew, + DS_dispersion: dsMadm, + DS_range: dsRange, + DS_sizes: dsSizes, + DS_sizeCounts: dsCounts, + DS_mode: dsMode, + DS_modeCount: dsModeCount, + } + + //score numerators + tsSum := (tsSkewScore + tsMadmScore + tsDurationScore) + dsSum := (dsSkewScore + dsMadmScore + dsSmallnessScore) + + //score averages + output.TS_score = tsSum / 3.0 + output.DS_score = dsSum / 3.0 + output.Score = (tsSum + dsSum) / 6.0 - //in order of ascending importance: timestamp skew, timestamp duration, - //timestamp dispersion, size skew, size duration, size weight - output.Score = (alpha + beta + gamma + delta + epsilon + zeta) / 6.0 t.writeChannel <- &output } t.analysisWg.Done() diff --git a/analysis/beacon/beacon_test.go b/analysis/beacon/beacon_test.go index 6251ddb3..563bcc2f 100644 --- a/analysis/beacon/beacon_test.go +++ b/analysis/beacon/beacon_test.go @@ -38,7 +38,7 @@ func TestAnalysis(t *testing.T) { src: "0.0.0.0", dst: "0.0.0.0", ts: val.ts, - orig_ip_bytes: []int64{5, 5, 5}, + orig_ip_bytes: val.ds, } beaconing.analysisWg.Add(1) diff --git a/analysis/beacon/beacon_test_data.go b/analysis/beacon/beacon_test_data.go index 1b90a11e..5450b2c9 100644 --- a/analysis/beacon/beacon_test_data.go +++ b/analysis/beacon/beacon_test_data.go @@ -2,6 +2,7 @@ package beacon type testData struct { ts []int64 + ds []int64 minScore float64 maxScore float64 description string @@ -9,49 +10,65 @@ type testData struct { var testDataList = []testData{ - {ts: []int64{0, 1800, 3600, 5400, 7200, 9000, 10800, 12600, 14400, 16200, 18000, 19800, 21600, 23400, 25200, 27000, 28800, 30600, 32400, 34200, 36000, 37800, 39600, 41400, 43200, 45000, 46800, 48600, 50400, 52200, 54000, 55800, 57600, 59400, 61200, 63000, 64800, 66600, 68400, 70200, 72000, 73800, 75600, 77400, 79200, 81000, 82800, 84600, 86400}, + { + ts: []int64{0, 1800, 3600, 5400, 7200, 9000, 10800, 12600, 14400, 16200, 18000, 19800, 21600, 23400, 25200, 27000, 28800, 30600, 32400, 34200, 36000, 37800, 39600, 41400, 43200, 45000, 46800, 48600, 50400, 52200, 54000, 55800, 57600, 59400, 61200, 63000, 64800, 66600, 68400, 70200, 72000, 73800, 75600, 77400, 79200, 81000, 82800, 84600, 86400}, + ds: []int64{4, 4, 4}, minScore: 0.99, maxScore: 1.0, description: "Perfect Beacon every 30 minutes... Starts at 0 (midnight) ends at 86400 (+24 hours)", }, - {ts: []int64{0, 1800, 3600, 5400, 7200, 9000, 10800, 12600, 14400, 16200, 18000, 19800, 21600, 23400, 25200, 27000, 28800, 30600, 32400, 34200, 36000, 37800, 39600, 41400, 43200, 45000, 46800, 48600, 50400, 52200, 54000, 55800, 57600, 59400, 61200, 63000, 64800, 66600, 68400, 70200, 72000, 73800, 75600, 77400, 79200, 81000, 82800, 84600}, + { + ts: []int64{0, 1800, 3600, 5400, 7200, 9000, 10800, 12600, 14400, 16200, 18000, 19800, 21600, 23400, 25200, 27000, 28800, 30600, 32400, 34200, 36000, 37800, 39600, 41400, 43200, 45000, 46800, 48600, 50400, 52200, 54000, 55800, 57600, 59400, 61200, 63000, 64800, 66600, 68400, 70200, 72000, 73800, 75600, 77400, 79200, 81000, 82800, 84600}, + ds: []int64{4, 4, 4}, minScore: 0.95, maxScore: 1.0, description: "Beacon every 30 minutes... Starts at 0 (midnight) ends 60 minutes before next midnight", }, - {ts: []int64{28800, 30600, 32400, 34200, 36000, 37800, 39600, 41400, 43200, 45000, 46800, 48600, 50400, 52200, 54000, 55800, 57600}, + { + ts: []int64{28800, 30600, 32400, 34200, 36000, 37800, 39600, 41400, 43200, 45000, 46800, 48600, 50400, 52200, 54000, 55800, 57600}, + ds: []int64{4, 4, 4}, minScore: 0.95, maxScore: 1.0, description: "Beacon every 30 minutes... Starts at 8:00 ends at 16:00", }, - {ts: []int64{32400, 32410, 32420, 32430, 32440, 32450, 32460, 32470, 32480, 32490, 32500, 32510, 32520, 32530, 32540, 32550, 32560, 32570, 32580, 32590, 32600, 32610, 32620, 32630, 32640, 32650, 32660, 32670, 32680, 32690, 32700, 32710, 32720, 32730, 32740, 32750, 32760, 32770, 32780, 32790, 32800, 32810, 32820, 32830, 32840, 32850, 32860, 32870, 32880, 32890, 32900, 32910, 32920, 32930, 32940, 32950, 32960, 32970, 32980, 32990, 33000, 33010, 33020, 33030, 33040, 33050, 33060, 33070, 33080, 33090, 33100, 33110, 33120, 33130, 33140, 33150, 33160, 33170, 33180, 33190, 33200, 33210, 33220, 33230, 33240, 33250, 33260, 33270, 33280, 33290, 33300, 33310, 33320, 33330, 33340, 33350, 33360, 33370, 33380, 33390, 33400, 33410, 33420, 33430, 33440, 33450, 33460, 33470, 33480, 33490, 33500, 33510, 33520, 33530, 33540, 33550, 33560, 33570, 33580, 33590, 33600, 33610, 33620, 33630, 33640, 33650, 33660, 33670, 33680, 33690, 33700, 33710, 33720, 33730, 33740, 33750, 33760, 33770, 33780, 33790, 33800, 33810, 33820, 33830, 33840, 33850, 33860, 33870, 33880, 33890, 33900, 33910, 33920, 33930, 33940, 33950, 33960, 33970, 33980, 33990, 34000, 34010, 34020, 34030, 34040, 34050, 34060, 34070, 34080, 34090, 34100, 34110, 34120, 34130, 34140, 34150, 34160, 34170, 34180, 34190, 34200, 34210, 34220, 34230, 34240, 34250, 34260, 34270, 34280, 34290, 34300, 34310, 34320, 34330, 34340, 34350, 34360, 34370, 34380, 34390, 34400, 34410, 34420, 34430, 34440, 34450, 34460, 34470, 34480, 34490, 34500, 34510, 34520, 34530, 34540, 34550, 34560, 34570, 34580, 34590, 34600, 34610, 34620, 34630, 34640, 34650, 34660, 34670, 34680, 34690, 34700, 34710, 34720, 34730, 34740, 34750, 34760, 34770, 34780, 34790, 34800, 34810, 34820, 34830, 34840, 34850, 34860, 34870, 34880, 34890, 34900, 34910, 34920, 34930, 34940, 34950, 34960, 34970, 34980, 34990, 35000, 35010, 35020, 35030, 35040, 35050, 35060, 35070, 35080, 35090, 35100, 35110, 35120, 35130, 35140, 35150, 35160, 35170, 35180, 35190, 35200, 35210, 35220, 35230, 35240, 35250, 35260, 35270, 35280, 35290, 35300, 35310, 35320, 35330, 35340, 35350, 35360, 35370, 35380, 35390, 35400, 35410, 35420, 35430, 35440, 35450, 35460, 35470, 35480, 35490, 35500, 35510, 35520, 35530, 35540, 35550, 35560, 35570, 35580, 35590, 35600, 35610, 35620, 35630, 35640, 35650, 35660, 35670, 35680, 35690, 35700, 35710, 35720, 35730, 35740, 35750, 35760, 35770, 35780, 35790, 35800, 35810, 35820, 35830, 35840, 35850, 35860, 35870, 35880, 35890, 35900, 35910, 35920, 35930, 35940, 35950, 35960, 35970, 35980, 35990, 36000}, + { + ts: []int64{32400, 32410, 32420, 32430, 32440, 32450, 32460, 32470, 32480, 32490, 32500, 32510, 32520, 32530, 32540, 32550, 32560, 32570, 32580, 32590, 32600, 32610, 32620, 32630, 32640, 32650, 32660, 32670, 32680, 32690, 32700, 32710, 32720, 32730, 32740, 32750, 32760, 32770, 32780, 32790, 32800, 32810, 32820, 32830, 32840, 32850, 32860, 32870, 32880, 32890, 32900, 32910, 32920, 32930, 32940, 32950, 32960, 32970, 32980, 32990, 33000, 33010, 33020, 33030, 33040, 33050, 33060, 33070, 33080, 33090, 33100, 33110, 33120, 33130, 33140, 33150, 33160, 33170, 33180, 33190, 33200, 33210, 33220, 33230, 33240, 33250, 33260, 33270, 33280, 33290, 33300, 33310, 33320, 33330, 33340, 33350, 33360, 33370, 33380, 33390, 33400, 33410, 33420, 33430, 33440, 33450, 33460, 33470, 33480, 33490, 33500, 33510, 33520, 33530, 33540, 33550, 33560, 33570, 33580, 33590, 33600, 33610, 33620, 33630, 33640, 33650, 33660, 33670, 33680, 33690, 33700, 33710, 33720, 33730, 33740, 33750, 33760, 33770, 33780, 33790, 33800, 33810, 33820, 33830, 33840, 33850, 33860, 33870, 33880, 33890, 33900, 33910, 33920, 33930, 33940, 33950, 33960, 33970, 33980, 33990, 34000, 34010, 34020, 34030, 34040, 34050, 34060, 34070, 34080, 34090, 34100, 34110, 34120, 34130, 34140, 34150, 34160, 34170, 34180, 34190, 34200, 34210, 34220, 34230, 34240, 34250, 34260, 34270, 34280, 34290, 34300, 34310, 34320, 34330, 34340, 34350, 34360, 34370, 34380, 34390, 34400, 34410, 34420, 34430, 34440, 34450, 34460, 34470, 34480, 34490, 34500, 34510, 34520, 34530, 34540, 34550, 34560, 34570, 34580, 34590, 34600, 34610, 34620, 34630, 34640, 34650, 34660, 34670, 34680, 34690, 34700, 34710, 34720, 34730, 34740, 34750, 34760, 34770, 34780, 34790, 34800, 34810, 34820, 34830, 34840, 34850, 34860, 34870, 34880, 34890, 34900, 34910, 34920, 34930, 34940, 34950, 34960, 34970, 34980, 34990, 35000, 35010, 35020, 35030, 35040, 35050, 35060, 35070, 35080, 35090, 35100, 35110, 35120, 35130, 35140, 35150, 35160, 35170, 35180, 35190, 35200, 35210, 35220, 35230, 35240, 35250, 35260, 35270, 35280, 35290, 35300, 35310, 35320, 35330, 35340, 35350, 35360, 35370, 35380, 35390, 35400, 35410, 35420, 35430, 35440, 35450, 35460, 35470, 35480, 35490, 35500, 35510, 35520, 35530, 35540, 35550, 35560, 35570, 35580, 35590, 35600, 35610, 35620, 35630, 35640, 35650, 35660, 35670, 35680, 35690, 35700, 35710, 35720, 35730, 35740, 35750, 35760, 35770, 35780, 35790, 35800, 35810, 35820, 35830, 35840, 35850, 35860, 35870, 35880, 35890, 35900, 35910, 35920, 35930, 35940, 35950, 35960, 35970, 35980, 35990, 36000}, + ds: []int64{4, 4, 4}, minScore: 0.95, maxScore: 1.0, description: "Beacon every 10 seconds... Starts at 9:00 ends at 10:00", }, - {ts: []int64{181, 3644, 7104, 10741, 14406, 17867, 21589, 25263, 28954, 32633, 36026, 39460, 43114, 46766, 50476, 54078, 57504, 61127, 64850, 68408, 71829, 75698, 79208, 82702, 84500}, + { + ts: []int64{181, 3644, 7104, 10741, 14406, 17867, 21589, 25263, 28954, 32633, 36026, 39460, 43114, 46766, 50476, 54078, 57504, 61127, 64850, 68408, 71829, 75698, 79208, 82702, 84500}, + ds: []int64{4, 4, 4, 6, 6, 6, 4, 4, 4}, minScore: 0.90, maxScore: 1.0, description: "Beacon every 1 hour... Starts at 0 (midnight) ends at 86400 (+24 hours)... Noise added to each timestamp: Gaussian Mu=0 Sigma=100", }, - {ts: []int64{0, 2, 3600, 3602, 7200, 7202, 10800, 10802, 14400, 14402, 18000, 18002, 21600, 21602, 25200, 25202, 28800, 28802, 32400, 32402, 36000, 36002, 39600, 39602, 43200, 43202, 46800, 46802, 50400, 50402, 54000, 54002, 57600, 57602, 61200, 61202, 64800, 64802, 68400, 68402, 72000, 72002, 75600, 75602, 79200, 79202, 82800, 82802, 86400, 86402}, + { + ts: []int64{0, 2, 3600, 3602, 7200, 7202, 10800, 10802, 14400, 14402, 18000, 18002, 21600, 21602, 25200, 25202, 28800, 28802, 32400, 32402, 36000, 36002, 39600, 39602, 43200, 43202, 46800, 46802, 50400, 50402, 54000, 54002, 57600, 57602, 61200, 61202, 64800, 64802, 68400, 68402, 72000, 72002, 75600, 75602, 79200, 79202, 82800, 82802, 86400, 86402}, + ds: []int64{4, 4, 4}, minScore: 0.9, maxScore: 1.0, description: "Perfect Beacon every 60 minutes... Connects twice every time", }, - {ts: []int64{0, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 5, 6, 7, 9, 10, 10, 10, 11, 13}, + { + ts: []int64{0, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 5, 6, 7, 9, 10, 10, 10, 11, 13}, + ds: []int64{4, 100, 2, 43, 3}, minScore: 0.0, maxScore: 0.7, description: "Connection happens a lot... but not a beacon", }, - {ts: []int64{10, 15, 23, 70, 50400, 50470, 50472}, + { + ts: []int64{10, 15, 23, 70, 50400, 50470, 50472}, + ds: []int64{10, 15, 23, 70, 50400, 50470, 50472}, minScore: 0.0, maxScore: 0.5, description: "Not a beacon.", diff --git a/datatypes/beacon/beacon.go b/datatypes/beacon/beacon.go index 7c6fd554..3f9a0699 100644 --- a/datatypes/beacon/beacon.go +++ b/datatypes/beacon/beacon.go @@ -5,46 +5,48 @@ import ( ) type ( - //straight output from the beacon analysis + //BeaconAnalysisOutput contains the summary statistics of a unique connection BeaconAnalysisOutput struct { UconnID bson.ObjectId `bson:"uconn_id"` TS_iRange int64 `bson:"ts_iRange"` TS_iMode int64 `bson:"ts_iMode"` TS_iModeCount int64 `bson:"ts_iMode_count"` - TS_iSkew float64 `bson:"ts_iSkew"` - TS_iDispersion int64 `bson:"ts_iDispersion"` - TS_duration float64 `bson:"ts_duration"` - Score float64 `bson:"score"` TS_intervals []int64 `bson:"ts_intervals"` TS_intervalCounts []int64 `bson:"ts_interval_counts"` - DS_skew float64 `bson:"ds_skew"` - DS_dispersion int64 `bson:"ds_dispersion"` + TS_iDispersion int64 `bson:"ts_iDispersion"` + TS_iSkew float64 `bson:"ts_iSkew"` + TS_duration float64 `bson:"ts_duration"` + TS_score float64 `bson:"ts_score"` DS_range int64 `bson:"ds_range"` - DS_sizes []int64 `bson:"ds_sizes"` - DS_counts []int64 `bson:"ds_counts"` DS_mode int64 `bson:"ds_mode"` DS_modeCount int64 `bson:"ds_mode_count"` + DS_sizes []int64 `bson:"ds_sizes"` + DS_sizeCounts []int64 `bson:"ds_counts"` + DS_dispersion int64 `bson:"ds_dispersion"` + DS_skew float64 `bson:"ds_skew"` + DS_score float64 `bson:"ds_score"` + Score float64 `bson:"score"` } //Used in order to join the uconn and beacon tables BeaconAnalysisView struct { - Src string `bson:"src"` - Dst string `bson:"dst"` - LocalSrc bool `bson:"local_src"` - LocalDst bool `bson:"local_dst"` - Connections int64 `bson:"connection_count"` - AvgBytes float64 `bson:"avg_bytes"` - TS_iRange int64 `bson:"ts_iRange"` - TS_iMode int64 `bson:"ts_iMode"` - TS_iModeCount int64 `bson:"ts_iMode_count"` - TS_iSkew float64 `bson:"ts_iSkew"` - TS_iDispersion int64 `bson:"ts_iDispersion"` - TS_duration float64 `bson:"ts_duration"` - Score float64 `bson:"score"` - DS_skew float64 `bson:"ds_skew"` - DS_dispersion int64 `bson:"ds_dispersion"` - DS_range int64 `bson:"ds_range"` - DS_mode int64 `bson:"ds_mode"` - DS_modeCount int64 `bson:"ds_mode_count"` + Src string `bson:"src"` + Dst string `bson:"dst"` + LocalSrc bool `bson:"local_src"` + LocalDst bool `bson:"local_dst"` + Connections int64 `bson:"connection_count"` + AvgBytes float64 `bson:"avg_bytes"` + TS_iRange int64 `bson:"ts_iRange"` + TS_iMode int64 `bson:"ts_iMode"` + TS_iModeCount int64 `bson:"ts_iMode_count"` + TS_iSkew float64 `bson:"ts_iSkew"` + TS_iDispersion int64 `bson:"ts_iDispersion"` + TS_duration float64 `bson:"ts_duration"` + Score float64 `bson:"score"` + DS_skew float64 `bson:"ds_skew"` + DS_dispersion int64 `bson:"ds_dispersion"` + DS_range int64 `bson:"ds_range"` + DS_mode int64 `bson:"ds_mode"` + DS_modeCount int64 `bson:"ds_mode_count"` } ) From 6731f883aea6b9b15049c1c235331fdb95f08c76 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Wed, 26 Jul 2017 15:52:56 -0600 Subject: [PATCH 042/117] Prevent fall through on RITA show-X commands --- commands/show-beacons.go | 1 + commands/show-explodedDns.go | 1 + commands/show-long-connections.go | 1 + commands/show-scans.go | 1 + commands/show-urls.go | 4 ++++ commands/show-user-agents.go | 1 + 6 files changed, 9 insertions(+) diff --git a/commands/show-beacons.go b/commands/show-beacons.go index ad483c61..a6adc46f 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -48,6 +48,7 @@ func showBeacons(c *cli.Context) error { if err != nil { return cli.NewExitError(err.Error(), -1) } + return nil } err := showBeaconCsv(data) diff --git a/commands/show-explodedDns.go b/commands/show-explodedDns.go index 2b0df219..ba4aac0f 100644 --- a/commands/show-explodedDns.go +++ b/commands/show-explodedDns.go @@ -41,6 +41,7 @@ func init() { if err != nil { return cli.NewExitError(err.Error(), -1) } + return nil } err := showDNSResults(explodedResults) if err != nil { diff --git a/commands/show-long-connections.go b/commands/show-long-connections.go index 8fc45553..71ac7067 100644 --- a/commands/show-long-connections.go +++ b/commands/show-long-connections.go @@ -44,6 +44,7 @@ func init() { if err != nil { return cli.NewExitError(err.Error(), -1) } + return nil } err := showConns(longConns) if err != nil { diff --git a/commands/show-scans.go b/commands/show-scans.go index 714a593f..940b6239 100644 --- a/commands/show-scans.go +++ b/commands/show-scans.go @@ -46,6 +46,7 @@ func init() { if err != nil { return cli.NewExitError(err.Error(), -1) } + return nil } err := showScans(scans, showPorts) if err != nil { diff --git a/commands/show-urls.go b/commands/show-urls.go index da511d34..ecd9c26f 100644 --- a/commands/show-urls.go +++ b/commands/show-urls.go @@ -42,7 +42,9 @@ func init() { if err != nil { return cli.NewExitError(err.Error(), -1) } + return nil } + err := showURLs(urls) if err != nil { return cli.NewExitError(err.Error(), -1) @@ -50,6 +52,7 @@ func init() { return nil }, } + vistedURLs := cli.Command{ Name: "show-most-visited-urls", @@ -79,6 +82,7 @@ func init() { if err != nil { return cli.NewExitError(err.Error(), -1) } + return nil } err := showURLs(urls) if err != nil { diff --git a/commands/show-user-agents.go b/commands/show-user-agents.go index ccdca4b1..f7650923 100644 --- a/commands/show-user-agents.go +++ b/commands/show-user-agents.go @@ -52,6 +52,7 @@ func init() { if err != nil { return cli.NewExitError(err.Error(), -1) } + return nil } err := showAgents(agents) if err != nil { From 8e2d377646230803987b4059ecaf7937e659ff81 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Tue, 8 Aug 2017 10:49:29 -0600 Subject: [PATCH 043/117] Prevent RITA from following symlinks --- parser/fsimporter.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/parser/fsimporter.go b/parser/fsimporter.go index 5333c228..5555ecf5 100644 --- a/parser/fsimporter.go +++ b/parser/fsimporter.go @@ -101,7 +101,10 @@ func readDir(cpath string, logger *log.Logger) []string { } for _, file := range files { - if file.IsDir() { + // Stop RITA from following symlinks + // In the case that RITA is pointed directly at Bro, it should not + // parse the "current" symlink which points to the spool. + if file.IsDir() && file.Mode() != os.ModeSymlink { toReturn = append(toReturn, readDir(path.Join(cpath, file.Name()), logger)...) } if strings.HasSuffix(file.Name(), "gz") || From 0c8755ba71a7173e891a69e833d17fbf2932e803 Mon Sep 17 00:00:00 2001 From: logan lembke Date: Mon, 14 Aug 2017 16:51:02 -0600 Subject: [PATCH 044/117] Added more granular locking to parser, prevent panic when importing data into an already analyzed dataset --- parser/datastore.go | 17 +++ parser/fsimporter.go | 17 +-- parser/mongodatastore.go | 284 ++++++++++++++++++++++----------------- 3 files changed, 184 insertions(+), 134 deletions(-) create mode 100644 parser/datastore.go diff --git a/parser/datastore.go b/parser/datastore.go new file mode 100644 index 00000000..d949db30 --- /dev/null +++ b/parser/datastore.go @@ -0,0 +1,17 @@ +package parser + +import "github.com/ocmdev/rita/parser/parsetypes" + +//Datastore allows RITA to store bro data in a database +type Datastore interface { + Store(*ImportedData) + Flush() + Index() +} + +//ImportedData directs BroData to a specific database and collection +type ImportedData struct { + BroData parsetypes.BroData + TargetDatabase string + TargetCollection string +} diff --git a/parser/fsimporter.go b/parser/fsimporter.go index 5555ecf5..f69cfbe1 100644 --- a/parser/fsimporter.go +++ b/parser/fsimporter.go @@ -38,7 +38,7 @@ func NewFSImporter(resources *database.Resources, } //Run starts importing a given path into a datastore -func (fs *FSImporter) Run(datastore *MongoDatastore) { +func (fs *FSImporter) Run(datastore Datastore) { // track the time spent parsing start := time.Now() fs.res.Log.WithFields( @@ -67,7 +67,7 @@ func (fs *FSImporter) Run(datastore *MongoDatastore) { parseFiles(indexedFiles, fs.parseThreads, fs.res.Config.S.Bro.UseDates, datastore, fs.res.Log) - datastore.flush() + datastore.Flush() updateFilesIndex(indexedFiles, fs.res.MetaDB, fs.res.Log) progTime = time.Now() @@ -78,7 +78,7 @@ func (fs *FSImporter) Run(datastore *MongoDatastore) { }, ).Info("Finished upload. Starting indexing") fmt.Println("\t[-] Indexing log entries. This may take a while.") - datastore.finalize() + datastore.Index() progTime = time.Now() fs.res.Log.WithFields( @@ -156,7 +156,7 @@ func indexFiles(files []string, indexingThreads int, //errors and parses the bro files line by line into the database. //NOTE: side effect: this sets the dates field on the indexedFiles func parseFiles(indexedFiles []*fpt.IndexedFile, parsingThreads int, - useDates bool, datastore *MongoDatastore, logger *log.Logger) { + useDates bool, datastore Datastore, logger *log.Logger) { //set up parallel parsing n := len(indexedFiles) parsingWG := new(sync.WaitGroup) @@ -213,15 +213,16 @@ func parseFiles(indexedFiles []*fpt.IndexedFile, parsingThreads int, } //figure out what database this line is heading for + targetCollection := indexedFiles[j].TargetCollection targetDB := indexedFiles[j].TargetDatabase if useDates { targetDB += "-" + date } - datastore.store(importedData{ - broData: data, - targetDatabase: targetDB, - file: indexedFiles[j], + datastore.Store(&ImportedData{ + BroData: data, + TargetDatabase: targetDB, + TargetCollection: targetCollection, }) } } diff --git a/parser/mongodatastore.go b/parser/mongodatastore.go index ba236fbb..28e820b7 100644 --- a/parser/mongodatastore.go +++ b/parser/mongodatastore.go @@ -1,132 +1,111 @@ package parser import ( - log "github.com/sirupsen/logrus" - + "errors" "sync" - mgo "gopkg.in/mgo.v2" + log "github.com/sirupsen/logrus" "github.com/ocmdev/rita/database" - fpt "github.com/ocmdev/rita/parser/fileparsetypes" - pt "github.com/ocmdev/rita/parser/parsetypes" + mgo "gopkg.in/mgo.v2" ) -//importedData is sent to a datastore to be stored away -type importedData struct { - broData pt.BroData - targetDatabase string - file *fpt.IndexedFile +//storeMap maps database names to collection maps and provides a mutex +//to sync around +type storeMap struct { + databases map[string]*collectionMap + rwLock *sync.Mutex +} + +//collectionMap maps collection names to collection writers and provides a +//mutex to sync around +type collectionMap struct { + collections map[string]*collectionWriter + rwLock *sync.Mutex } -//collectionStore binds a collection write channel with the target collection -//and the indices to be applied to it -type collectionStore struct { - writeChannel chan importedData - database string - collection string - indices []string +//collectionWriter reads a channel and inserts the data into MongoDB +type collectionWriter struct { + writeChannel chan *ImportedData + writerWG *sync.WaitGroup + session *mgo.Session + logger *log.Logger + bufferSize int + targetDatabase string + targetCollection string + indices []string } -//MongoDatastore is a datastore which stores bro data in MongoDB +//MongoDatastore provides a backend for storing bro data in MongoDB type MongoDatastore struct { - dbMap map[string]map[string]collectionStore - existingDBs []string - metaDB *database.MetaDBHandle - bufferSize int - session *mgo.Session - logger *log.Logger - waitgroup *sync.WaitGroup - mutex1 *sync.Mutex - mutex2 *sync.Mutex + session *mgo.Session + metaDB *database.MetaDBHandle + bufferSize int + logger *log.Logger + writerWG *sync.WaitGroup + writeMap storeMap + analyzedDBs []string + unanalyzedDBs []string } -//NewMongoDatastore creates a datastore which stores bro data in MongoDB +//NewMongoDatastore returns a new MongoDatastore and caches the existing +//db names func NewMongoDatastore(session *mgo.Session, metaDB *database.MetaDBHandle, bufferSize int, logger *log.Logger) *MongoDatastore { return &MongoDatastore{ - dbMap: make(map[string]map[string]collectionStore), - existingDBs: metaDB.GetDatabases(), - metaDB: metaDB, - bufferSize: bufferSize, - session: session, - logger: logger, - waitgroup: new(sync.WaitGroup), - mutex1: new(sync.Mutex), //mutex1 syncs the first level of map access - mutex2: new(sync.Mutex), //mutex2 syncs the second level of map access - //NOTE: Mutex2 may be replaced with a map of mutexes for better performance + session: session, + metaDB: metaDB, + bufferSize: bufferSize, + logger: logger, + writerWG: new(sync.WaitGroup), + writeMap: storeMap{ + databases: make(map[string]*collectionMap), + rwLock: new(sync.Mutex), + }, + analyzedDBs: metaDB.GetAnalyzedDatabases(), + unanalyzedDBs: metaDB.GetUnAnalyzedDatabases(), } } -//store a line of imported data in MongoDB -func (mongo *MongoDatastore) store(data importedData) { - //get the map representing the target database - mongo.mutex1.Lock() - collectionMap, ok := mongo.dbMap[data.targetDatabase] - if !ok { - mongo.registerDatabase(data.targetDatabase) - collectionMap = make(map[string]collectionStore) - mongo.dbMap[data.targetDatabase] = collectionMap - } - mongo.mutex1.Unlock() - - //get the collectionStore for the target collection - mongo.mutex2.Lock() - coll, ok := collectionMap[data.file.TargetCollection] - if !ok { - coll = collectionStore{ - writeChannel: make(chan importedData), - database: data.targetDatabase, - collection: data.file.TargetCollection, - indices: data.broData.Indices(), - } - mongo.session.DB(coll.database).C(coll.collection). - Create(&mgo.CollectionInfo{DisableIdIndex: true}) - collectionMap[data.file.TargetCollection] = coll - //start the goroutine for this writer - mongo.waitgroup.Add(1) - go bulkInsertImportedData( - coll, mongo.bufferSize, mongo.session.Copy(), - mongo.waitgroup, mongo.logger, - ) +//Store saves parsed Bro data to MongoDB. +//Additionally, it caches some information to create indices later on +func (mongo *MongoDatastore) Store(data *ImportedData) { + collMap, err := mongo.getCollectionMap(data) + if err != nil { + mongo.logger.Error(err) + return } - mongo.mutex2.Unlock() - //queue up the line to be written - coll.writeChannel <- data + collWriter := mongo.getCollectionWriter(data, collMap) + collWriter.writeChannel <- data } -//flush flushes the datastore -func (mongo *MongoDatastore) flush() { - //wait for any changes to the collection maps to finish - mongo.mutex1.Lock() - mongo.mutex2.Lock() - //close out the write channels, allowing them to flush - for _, db := range mongo.dbMap { - for _, collStore := range db { - close(collStore.writeChannel) +//Flush waits for all writing to finish +func (mongo *MongoDatastore) Flush() { + mongo.writeMap.rwLock.Lock() + for _, collMap := range mongo.writeMap.databases { + collMap.rwLock.Lock() + for _, collWriter := range collMap.collections { + close(collWriter.writeChannel) } + collMap.rwLock.Unlock() } - mongo.mutex2.Unlock() - mongo.mutex1.Unlock() - //wait for the channels to flush - mongo.waitgroup.Wait() + mongo.writeMap.rwLock.Unlock() + mongo.writerWG.Wait() } -//finalize ensures the indexes are applied to the mongo collections -func (mongo *MongoDatastore) finalize() { - //ensure indices +//Index ensures that the data is searchable +func (mongo *MongoDatastore) Index() { //NOTE: We do this one by one in order to prevent individual indexing //operations from taking too long ssn := mongo.session.Copy() defer ssn.Close() - //wait for any changes to the collection maps to finish - //this shouldn't be an issue but it doesn't hurt - mongo.mutex1.Lock() - mongo.mutex2.Lock() - for _, collMap := range mongo.dbMap { - for _, collStore := range collMap { - collection := ssn.DB(collStore.database).C(collStore.collection) - for _, index := range collStore.indices { + + mongo.writeMap.rwLock.Lock() + for _, collMap := range mongo.writeMap.databases { + collMap.rwLock.Lock() + for _, collWriter := range collMap.collections { + collection := ssn.DB(collWriter.targetDatabase).C(collWriter.targetCollection) + for _, index := range collWriter.indices { err := collection.EnsureIndex(mgo.Index{ Key: []string{index}, }) @@ -137,51 +116,106 @@ func (mongo *MongoDatastore) finalize() { } } } + collMap.rwLock.Unlock() } - mongo.mutex2.Unlock() - mongo.mutex1.Unlock() + mongo.writeMap.rwLock.Unlock() } -func (mongo *MongoDatastore) registerDatabase(db string) { - found := false - for _, existingDB := range mongo.existingDBs { - if db == existingDB { - found = true - break +//getCollectionMap returns a map from collection names to collection writers +//given a bro entry's target database. If the database does not exist, +//getCollectionMap will create the database. If the database does exist +//and the database has been analyzed, getCollectionMap will return an error. +func (mongo *MongoDatastore) getCollectionMap(data *ImportedData) (*collectionMap, error) { + mongo.writeMap.rwLock.Lock() + defer mongo.writeMap.rwLock.Unlock() + + //check the cache for the collection map + collMap, ok := mongo.writeMap.databases[data.TargetDatabase] + if ok { + return collMap, nil + } + + //check if the database is already analyzed + for _, analyzedDB := range mongo.analyzedDBs { + if analyzedDB == data.TargetDatabase { + return nil, errors.New("cannot import bro data into already analyzed database") + } + } + + //check if the database was created in an earlier parse + targetDBExists := false + for _, unanalyzedDB := range mongo.unanalyzedDBs { + if unanalyzedDB == data.TargetDatabase { + targetDBExists = true + } + } + + //create the database if it doesn't exist + if !targetDBExists { + err := mongo.metaDB.AddNewDB(data.TargetDatabase) + if err != nil { + return nil, err } + mongo.unanalyzedDBs = append(mongo.unanalyzedDBs, data.TargetDatabase) + } + + mongo.writeMap.databases[data.TargetDatabase] = &collectionMap{ + collections: make(map[string]*collectionWriter), + rwLock: new(sync.Mutex), + } + return mongo.writeMap.databases[data.TargetDatabase], nil +} + +//getCollectionWriter returns a collection writer which can be used to send +//data to a specific MongoDB collection. If a collection writer does not exist +//in the cache, it is created and a new thread is spun up for it. +func (mongo *MongoDatastore) getCollectionWriter(data *ImportedData, collMap *collectionMap) *collectionWriter { + collMap.rwLock.Lock() + defer collMap.rwLock.Unlock() + collWriter, ok := collMap.collections[data.TargetCollection] + if ok { + return collWriter } - if !found { - mongo.metaDB.AddNewDB(db) - } else { - mongo.logger.Error("Attempted to insert data into existing database.") - panic("[!] Attempted to insert data into existing database.") + collMap.collections[data.TargetCollection] = &collectionWriter{ + writeChannel: make(chan *ImportedData), + writerWG: mongo.writerWG, + session: mongo.session.Copy(), + logger: mongo.logger, + bufferSize: mongo.bufferSize, + targetDatabase: data.TargetDatabase, + targetCollection: data.TargetCollection, + indices: data.BroData.Indices(), } + go collMap.collections[data.TargetCollection].bulkInsert() + return collMap.collections[data.TargetCollection] } -func bulkInsertImportedData(coll collectionStore, bufferSize int, - session *mgo.Session, wg *sync.WaitGroup, logger *log.Logger) { +//bulkInsert is a goroutine which reads a channel and inserts the data in bulk +//into MongoDB +func (writer *collectionWriter) bulkInsert() { + writer.writerWG.Add(1) + defer writer.writerWG.Done() + defer writer.session.Close() - //buffer the writes to MongoDB - buffer := make([]interface{}, 0, bufferSize) - collection := session.DB(coll.database).C(coll.collection) + buffer := make([]interface{}, 0, writer.bufferSize) + collection := writer.session.DB(writer.targetDatabase).C(writer.targetCollection) - //append data to the buffer until it is full, then insert them - for data := range coll.writeChannel { - if len(buffer) == bufferSize { + for data := range writer.writeChannel { + if len(buffer) == writer.bufferSize { bulk := collection.Bulk() bulk.Unordered() bulk.Insert(buffer...) _, err := bulk.Run() if err != nil { - logger.WithFields(log.Fields{ - "target_database": coll.database, - "target_collection": coll.collection, + writer.logger.WithFields(log.Fields{ + "target_database": writer.targetDatabase, + "target_collection": writer.targetCollection, "error": err.Error(), }).Error("Unable to insert bulk data in MongoDB") } buffer = buffer[:0] } - buffer = append(buffer, data.broData) + buffer = append(buffer, data.BroData) } //guaranteed to be at least 1 line in the buffer @@ -190,12 +224,10 @@ func bulkInsertImportedData(coll collectionStore, bufferSize int, bulk.Insert(buffer...) _, err := bulk.Run() if err != nil { - logger.WithFields(log.Fields{ - "target_database": coll.database, - "target_collection": coll.collection, + writer.logger.WithFields(log.Fields{ + "target_database": writer.targetDatabase, + "target_collection": writer.targetCollection, "error": err.Error(), }).Error("Unable to insert bulk data in MongoDB") } - session.Close() - wg.Done() } From b1e8b8d94b00e20a5f28a7695db77e5920a5925a Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Wed, 16 Aug 2017 21:21:49 -0600 Subject: [PATCH 045/117] Split config into two files, updated install script to be coooool --- config/config.go | 39 +++--- config/static.go | 2 +- database/mock.go | 6 +- database/resources.go | 6 +- etc/rita.yaml | 37 +----- etc/tables.yaml | 42 +++++++ install.sh | 272 +++++++++++++++++++++++------------------- 7 files changed, 223 insertions(+), 181 deletions(-) create mode 100644 etc/tables.yaml mode change 100644 => 100755 install.sh diff --git a/config/config.go b/config/config.go index 0a80d3bc..f8e65a94 100644 --- a/config/config.go +++ b/config/config.go @@ -19,34 +19,45 @@ type ( } ) +const userConfigPath = "/.rita/config.yaml" +const tableConfigPath = "/.rita/tables.yaml" + +//NOTE: If go ever gets default parameters, default the config options to "" + // GetConfig retrieves a configuration in order of precedence -func GetConfig(cfgPath string) (*Config, error) { - if cfgPath != "" { - return loadSystemConfig(cfgPath) +func GetConfig(userConfig string, tableConfig string) (*Config, error) { + //var user string + var currUser *user.User + if userConfig == "" || tableConfig == "" { + // Get the user's homedir + var err error + currUser, err = user.Current() + if err != nil { + fmt.Fprintf(os.Stderr, "Could not get user info: %s\n", err.Error()) + return nil, err + } } - // Get the user's homedir - user, err := user.Current() - if err != nil { - fmt.Fprintf(os.Stderr, "Could not get user info: %s\n", err.Error()) - } else { - return loadSystemConfig(user.HomeDir + "/.rita/config.yaml") + if userConfig == "" { + userConfig = currUser.HomeDir + userConfigPath + } + if tableConfig == "" { + tableConfig = currUser.HomeDir + tableConfigPath } - // If none of the other configs have worked, go for the global config - return loadSystemConfig("/etc/rita/config.yaml") + return loadSystemConfig(userConfig, tableConfig) } // loadSystemConfig attempts to parse a config file -func loadSystemConfig(cfgPath string) (*Config, error) { +func loadSystemConfig(userConfig string, tableConfig string) (*Config, error) { var config = new(Config) - static, err := loadStaticConfig(cfgPath) + static, err := loadStaticConfig(userConfig) if err != nil { return config, err } config.S = *static - tables, err := loadTableConfig(cfgPath) + tables, err := loadTableConfig(tableConfig) if err != nil { return config, err } diff --git a/config/static.go b/config/static.go index db696c9f..cb615afa 100644 --- a/config/static.go +++ b/config/static.go @@ -6,6 +6,7 @@ import ( "os" "reflect" "time" + yaml "gopkg.in/yaml.v2" ) @@ -46,7 +47,6 @@ type ( //BlacklistedStaticCfg is used to control the blacklisted analysis module BlacklistedStaticCfg struct { - BlacklistDatabase string `yaml:"Database"` UseIPms bool `yaml:"myIP.ms"` UseDNSBH bool `yaml:"MalwareDomains.com"` UseMDL bool `yaml:"MalwareDomainList.com"` diff --git a/database/mock.go b/database/mock.go index 7872f400..e67b3806 100644 --- a/database/mock.go +++ b/database/mock.go @@ -9,9 +9,11 @@ import ( // InitMockResources grabs the configuration file and intitializes the configuration data // returning a *Resources object which has all of the necessary configuration information -func InitMockResources(cfgPath string) *Resources { +func InitMockResources(userConfig string) *Resources { //TODO: hard code in a test config - conf, err := config.GetConfig(cfgPath) + //GetConfig requires a table config. "" tells the configuration manager + //to use the default table config. + conf, err := config.GetConfig(userConfig, "") if err != nil { fmt.Fprintf(os.Stdout, "Failed to config, exiting") panic(err) diff --git a/database/resources.go b/database/resources.go index 4bc0bbcb..3d15088d 100644 --- a/database/resources.go +++ b/database/resources.go @@ -30,8 +30,10 @@ type ( // InitResources grabs the configuration file and intitializes the configuration data // returning a *Resources object which has all of the necessary configuration information -func InitResources(cfgPath string) *Resources { - conf, err := config.GetConfig(cfgPath) +func InitResources(userConfig string) *Resources { + //GetConfig requires a table config. "" tells the configuration manager + //to use the default table config. + conf, err := config.GetConfig(userConfig, "") if err != nil { fmt.Fprintf(os.Stdout, "Failed to config, exiting") panic(err) diff --git a/etc/rita.yaml b/etc/rita.yaml index b566d4ed..e6442220 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -25,9 +25,8 @@ LogConfig: # LogPath is the path for Rita's logs, generally this just defaults to # $HOME/.rita/logs. Logs will only be written here if LogToFile is true RitaLogPath: $HOME/.rita/logs - LogToFile: true - RitaLogTable: logs + LogToFile: true LogToDB: true # The section Bro configures the bro ingestor @@ -70,7 +69,6 @@ Bro: ImportBuffer: 100000 BlackListed: - Database: rita-blacklist # These are blacklists built into rita-blacklist. Set these to false # to disable checks against them. myIP.ms: true @@ -102,47 +100,14 @@ BlackListed: # Ex: ftp://myftpserver.com/a/file/over/here.txt CustomURLBlacklists: [] - # Table names - SourceIPsTable: blSourceIPs - DestIPsTable: blDestIPs - HostnamesTable: blHostnames - UrlsTable: blUrls - -# NOTE: DO NOT CHANGE THE SETTINGS BELOW UNLESS YOU ARE FAMILIAR WITH THE CODE # -Structure: - ConnectionTable: conn - HttpTable: http - DnsTable: dns - UniqueConnectionTable: uconn - HostTable: host - -Dns: - ExplodedDnsTable: explodedDns - HostnamesTable: hostnames - Crossref: - SourceTable: sourceXREF - DestinationTable: destXREF BeaconThreshold: .7 Scanning: ScanThreshold: 50 - ScanTable: scan Beacon: DefaultConnectionThresh: 24 - BeaconTable: beacon - -Urls: - UrlsTable: urls - -UserAgent: - UserAgentTable: useragent - -MetaTables: - FilesTable: files - DatabasesTable: databases - # Adjusting batchsize and prefetch may help speed up certain database queries BatchSize: 300 diff --git a/etc/tables.yaml b/etc/tables.yaml new file mode 100644 index 00000000..ab9eb077 --- /dev/null +++ b/etc/tables.yaml @@ -0,0 +1,42 @@ +# NOTE: DO NOT CHANGE THE SETTINGS BELOW UNLESS YOU ARE FAMILIAR WITH THE CODE # +LogConfig: + RitaLogTable: logs + +Structure: + ConnectionTable: conn + HttpTable: http + DnsTable: dns + UniqueConnectionTable: uconn + HostTable: host + +BlackListed: + Database: rita-blacklist + SourceIPsTable: blSourceIPs + DestIPsTable: blDestIPs + HostnamesTable: blHostnames + UrlsTable: blUrls + +Dns: + ExplodedDnsTable: explodedDns + HostnamesTable: hostnames + +Crossref: + SourceTable: sourceXREF + DestinationTable: destXREF + +Scanning: + ScanTable: scan + +Beacon: + BeaconTable: beacon + +Urls: + UrlsTable: urls + +UserAgent: + UserAgentTable: useragent + +MetaTables: + FilesTable: files + DatabasesTable: databases + diff --git a/install.sh b/install.sh old mode 100644 new mode 100755 index 948fda90..725979d4 --- a/install.sh +++ b/install.sh @@ -3,24 +3,41 @@ # RITA is brought to you by Offensive CounterMeasures. # offensivecountermeasures.com +_NAME=$(basename "${0}") +_FAILED="\e[91mFAILED\e[0m" +_SUCCESS="\e[92mSUCCESS\e[0m" + +#Error handling +#Kill 0 to kill subshells as well +trap "printf '\n[!] Installation $_FAILED!\n'; kill 0" ERR INT set -o errexit +set -o errtrace set -o pipefail -_NAME=$(basename "${0}") -_INSDIR="/usr/local" +# Make sure to source the latest .bashrc +# Hack the PS1 variable to get around ubuntu .bashrc +OLD_PS1=$PS1 +PS1=" " +# Hack the interactive flag to get around other .bashrc's +set -i + +source $HOME/.bashrc + +# Clean up our hacks +set +i +PS1=$OLD_PS1 +unset OLD_PS1 -__help() { - cat <] - ${_NAME} -h | --help Options: -h --help Show this help message. - -i --install-dir Directory to install to. -u --uninstall Remove RITA. HEREDOC @@ -28,48 +45,74 @@ HEREDOC __prep() { cat </dev/null; do + echo -ne "$loadingText.\r" + sleep 0.5 + echo -ne "$loadingText..\r" + sleep 0.5 + echo -ne "$loadingText...\r" + sleep 0.5 + echo -ne "\r\033[K" + echo -ne "$loadingText\r" + sleep 0.5 + done + wait $pid + echo -e "$loadingText... $_SUCCESS" +} + +__checkPermissions() { + [ `id -u` -eq 0 ] } __uninstall() { - printf "Removing $_RITADIR \n" - rm -rf $_RITADIR - printf "Removing $GOPATH/bin/rita \n" + printf "\t[!] Removing $GOPATH/bin/rita \n" rm -rf $GOPATH/bin/rita - printf "Removing $GOPATH/src/github.com/ocmdev \n" + printf "\t[!] Removing $GOPATH/src/github.com/ocmdev \n" rm -rf $GOPATH/src/github.com/ocmdev - printf "Removing $HOME/.rita \n" + printf "\t[!] Removing $HOME/.rita \n" rm -rf $HOME/.rita } __install() { # Check if RITA is already installed, if so ask if this is a re-install - if [ -e $_RITADIR ] + if [ ! -z $(command -v rita) ] || + [ -f $GOPATH/bin/rita ] then - printf "[+] $_RITADIR already exists.\n" - read -p "[-] Would you like to erase it and re-install? [y/n] " -n 1 -r + printf "[+] RITA is already installed.\n" + read -p "[-] Would you like to erase it and re-install? [y/n] " -r if [[ $REPLY =~ ^[Yy]$ ]] then __uninstall @@ -78,151 +121,129 @@ __install() { fi fi - echo "[+] Updating apt... -" + __prep - apt update -qq + # Install installation dependencies + apt-get update > /dev/null & __load "[+] Updating apt" + + apt-get install -y git wget make lsb-release > /dev/null & \ + __load "[+] Installing git, wget, make, and lsb-release" + + # Install Bro IDS + printf "[+] Checking if Bro IDS is installed... " - echo " -[+] Ensuring git is installed... -" - apt install -y git - echo " -[+] Ensuring bro is installed... -" if [ $(dpkg-query -W -f='${Status}' bro 2>/dev/null | grep -c "ok installed") -eq 0 ] && [ $(dpkg-query -W -f='${Status}' securityonion-bro 2>/dev/null | grep -c "ok installed") -eq 0 ] then - apt install -y bro - apt install -y broctl + printf "\n" + apt-get install -y bro broctl bro-aux > /dev/null & \ + __load "\t[+] Installing Bro IDS" + else + printf "$_SUCCESS\n" fi - echo " -[+] Ensuring go is installed... -" - + # Install Go + printf "[+] Checking if Go is installed...\n" # Check if go is not available in the path - if [ ! $(command -v go) ] + if [ -z $(command -v go) ] then # Check if go is available in the standard location if [ ! -e "/usr/local/go" ] then - # golang most recent update - wget https://storage.googleapis.com/golang/go1.7.1.linux-amd64.tar.gz - tar -zxf go1.7.1.linux-amd64.tar.gz -C /usr/local/ - echo 'export PATH=$PATH:/usr/local/go/bin' >> $HOME/.bashrc - rm go1.7.1.linux-amd64.tar.gz + ( # golang most recent update + wget -q https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz + tar -zxf go1.8.3.linux-amd64.tar.gz -C /usr/local/ + rm go1.8.3.linux-amd64.tar.gz + echo 'export PATH=$PATH:/usr/local/go/bin' >> $HOME/.bashrc + ) & __load "\t[+] Installing Go" fi + # Add go to the path export PATH="$PATH:/usr/local/go/bin" else - echo -e "\e[31m[-] WARNING: Go has been detected on this system,\e[37m if you -installed with apt, RITA has only been tested with golang 1.7 which is currently not the -version in the Ubuntu apt repositories, make sure your golang is up to date -with 'go version'. Otherwise you can remove with 'sudo apt remove golang' and let this script -install the correct version for you! + echo -e "\e[93m\t[!] WARNING: Go has been detected on this system.\e[0m +\tIf you installed Go with apt, make sure your Go installation is up +\tto date with 'go version'. RITA has only been tested with golang +\t1.7 and 1.8 which are currently not the versions in the Ubuntu +\tapt repositories. You may remove the old version with +\t'sudo apt remove golang' and let this script install the correct +\tversion for you! " - sleep 10s fi - - echo -e "[+] Configuring Go dev environment... -\e[0m" - - sleep 3s - # Check if the GOPATH isn't set - if [ -z "${GOPATH}" ] + if [ -z ${GOPATH+x} ] then - mkdir -p $HOME/go/{src,pkg,bin} - echo 'export GOPATH=$HOME/go' >> $HOME/.bashrc + ( # Set up the GOPATH + mkdir -p $HOME/go/{src,pkg,bin} + echo 'export GOPATH=$HOME/go' >> $HOME/.bashrc + echo 'export PATH=$PATH:$GOPATH/bin' >> $HOME/.bashrc + ) & __load "[+] Configuring Go dev environment" export GOPATH=$HOME/go - echo 'export PATH=$PATH:$GOPATH/bin' >> $HOME/.bashrc export PATH=$PATH:$GOPATH/bin - else - echo -e "[-] GOPATH seems to be set, we'll skip this part then for now - " fi - echo -e "[+] Getting the package key and install package for MongoDB... -" - - sleep 3s - - apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 0C49F3730359A14518585931BC711F9BA15703C6 - - echo "deb [ arch=amd64 ] http://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/3.4 multiverse" > /etc/apt/sources.list.d/mongodb-org-3.4.list - - apt update -qq - apt install -y mongodb-org - - printf "\n[+] Running 'go get github.com/ocmdev/rita...'\n\n" - - # Build RITA + # Install MongoDB + apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 \ + --recv 0C49F3730359A14518585931BC711F9BA15703C6 > /dev/null 2>&1 & \ + __load "[+] Obtaining the package key for MongoDB" - apt install -y build-essential - go get github.com/ocmdev/rita - printf "[+] Installing RITA...\n\n" - cd $GOPATH/src/github.com/ocmdev/rita - make install + echo "deb [ arch=$(dpkg --print-architecture) ] http://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/3.4 multiverse" > /etc/apt/sources.list.d/mongodb-org-3.4.list - printf "[+] Transferring files...\n\n" - mkdir $_RITADIR + apt-get update > /dev/null & __load "[+] Updating apt" + apt-get install -y mongodb-org > /dev/null & __load "[+] Installing MongoDB" - cp -r etc $_RITADIR/etc - cp LICENSE $_RITADIR/LICENSE + ( # Build RITA + go get github.com/ocmdev/rita + cd $GOPATH/src/github.com/ocmdev/rita + make install > /dev/null + ) & __load "[+] Installing RITA" - # Install the base configuration file - printf "[+] Installing config to $HOME/.rita/config.yaml\n\n" - mkdir -p $HOME/.rita/logs - cp etc/rita.yaml $HOME/.rita/config.yaml + ( # Install the base configuration files + mkdir $HOME/.rita + mkdir $HOME/.rita/logs + cd $GOPATH/src/github.com/ocmdev/rita + cp ./LICENSE $HOME/.rita/LICENSE + cp ./etc/rita.yaml $HOME/.rita/config.yaml + cp ./etc/tables.yaml $HOME/.rita/tables.yaml + ) & __load "[+] Installing config files to $HOME/.rita" - # Give ownership of ~/go to the user - sudo chown -R $SUDO_USER:$SUDO_USER $HOME/go - sudo chown -R $SUDO_USER:$SUDO_USER $HOME/.rita - echo "[+] Make sure you also configure Bro and run with 'sudo broctl deploy' and make sure MongoDB is running with the command 'mongo' or 'sudo mongo'. -" - - echo -e "[+] If you need to stop Mongo at any time, run 'sudo service mongod stop' -[+] In order to finish the installation, reload bash config with 'source ~/.bashrc'. -[+] Also make sure to start the mongoDB service with 'sudo service mongod start' before running RITA. -[+] You can access the mongo shell with 'sudo mongo' -" + # If the user is using sudo, give ownership to the sudo user + if [ -z ${SUDO_USER+x} ] + then + chown -R $SUDO_USER:$SUDO_USER $HOME/go + chown -R $SUDO_USER:$SUDO_USER $HOME/.rita + fi - echo -e "[+] You may need to source your .bashrc before you call RITA! -" + echo -e " +In order to finish the installation, reload your bash config +with 'source ~/.bashrc'. Make sure to configure Bro and run +'sudo broctl deploy'. Also, make sure to start the MongoDB +service with 'sudo service mongod start'. You can access +the MongoDB shell with 'mongo'. If, at any time, you need +to stop MongoDB, run 'sudo service mongod stop'." - printf "Thank you for installing RITA!\n" - printf "OCMDev Group projects IRC #ocmdev on OFTC\n" + __title + printf "Thank you for installing RITA! " printf "Happy hunting\n" - } # start point for installer __entry() { - # Check for help or other install dir + # Check for help if [[ "${1:-}" =~ ^-h|--help$ ]] then __help exit 0 fi - if [[ "${1:-}" =~ ^-i|--install-dir ]] - then - _INSDIR=$( echo "${@}" | cut -d' ' -f2 ) - fi - - # Set the rita directory - _RITADIR="$_INSDIR/rita" - - - # Check to see if the user has permission to install to this directory - if [ -w $_INSDIR ] + # Check to see if the user has permission to install RITA + if __checkPermissions then # Check if we are uninstalling if [[ "${1:-}" =~ ^-u|--uninstall ]] @@ -232,8 +253,7 @@ __entry() { __install fi else - printf "You do NOT have permission to write to $_INSDIR\n\n" - __help + printf "You do NOT have permission install RITA\n\n" fi } From 28fd4f41b56becb278a73facd66a2b0985d44831 Mon Sep 17 00:00:00 2001 From: Hannah Date: Thu, 17 Aug 2017 15:06:13 -0600 Subject: [PATCH 046/117] Changed blacklisted urls to get more accurate connection information --- analysis/blacklist/urls.go | 69 +++++++++++++++++++++++++++++++++++--- 1 file changed, 65 insertions(+), 4 deletions(-) diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 7a7757d8..dc89bb9d 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -3,6 +3,7 @@ package blacklist import ( "errors" "strings" + "fmt" "github.com/ocmdev/rita-bl/list" @@ -55,6 +56,7 @@ func buildBlacklistedURLs(urls *mgo.Iter, res *database.Resources, res.DB.GetSelectedDB(), res.Config.T.Urls.UrlsTable, res.Config.T.Structure.UniqueConnTable, + res.Config.T.Structure.HTTPTable, ssn, prefix, ) @@ -97,7 +99,8 @@ func checkRitaBlacklistURLs(urls *mgo.Iter, blHandle *bl.Blacklist, } func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, - urlCollection, uconnCollection string, ssn *mgo.Session, prefix string) error { + urlCollection, uconnCollection string, httpCollection string, + ssn *mgo.Session, prefix string) error { var urlQuery bson.M urlTrimmed := strings.TrimPrefix(longURL, prefix) resourceIdx := strings.Index(urlTrimmed, "/") @@ -115,19 +118,77 @@ func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, } blURL.Host = host blURL.Resource = resource - - connQuery := bson.M{"dst": bson.M{"$in": blURLFull.IPs}} + httpPipeline := []bson.D{ + { + { "$match", bson.M{"host": host, "uri": resource} }, + }, + { + { "$group", bson.M{ + "_id": bson.D{ + { "h", "$host" }, + { "u", "$uri" }, + { "s", "$id_origin_h" }, + { "d", "$id_resp_h" }, + }, + "host": bson.M{ "$first": "$host" }, + "uri": bson.M{ "$first": "$uri" }, + "src": bson.M{ "$first": "$id_origin_h" }, + "dst": bson.M{ "$first": "$id_resp_h" }, + }}, + }, + { + { "$lookup", bson.M{ + "from": uconnCollection, + "localField": "dst", + "foreignField": "dst", + "as": "uconn", + }}, + }, + { + { "$unwind", "$uconn" }, + }, + { + { "$redact", bson.M{ + "$cond": bson.M{ + "if": bson.M{ + "$eq": []interface{}{ + "$src", + "$uconn.src", + }, + }, + "then": "$$KEEP", + "else": "$$PRUNE", + }, + }}, + }, + { + { "$project", bson.M{ + "id": 1, + "connection_count": "$uconn.connection_count", + "src": 1, + "dst": 1, + "local_src": "$uconn.local_src", + "local_dst": "$uconn.local_dst", + "total_bytes": "$uconn.total_bytes", + "average_bytes": "$uconn.average_bytes", + "total_duration": "$uconn.total_duration", + }}, + }, + } var totalBytes int var totalConnections int var uniqueConnCount int - uniqueConnections := ssn.DB(db).C(uconnCollection).Find(connQuery).Iter() + uniqueConnections := ssn.DB(db).C(httpCollection).Pipe(httpPipeline).Iter() var uconn structure.UniqueConnection for uniqueConnections.Next(&uconn) { totalBytes += uconn.TotalBytes totalConnections += uconn.ConnectionCount uniqueConnCount++ } + if uniqueConnections.Err() != nil { + fmt.Println(uniqueConnections.Err()) + } blURL.Connections = totalConnections blURL.UniqueConnections = uniqueConnCount blURL.TotalBytes = totalBytes From 6c921b9c90313ed6f2f47ec1daf331d39614e19a Mon Sep 17 00:00:00 2001 From: Hannah Date: Mon, 28 Aug 2017 10:37:28 -0600 Subject: [PATCH 047/117] Implemented a new solution for false positives that should actually eliminate them, however, this solution is very slow --- analysis/blacklist/urls.go | 103 +++++++++++++++---------------- datatypes/structure/structure.go | 13 +++- 2 files changed, 62 insertions(+), 54 deletions(-) diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index dc89bb9d..30bda0b9 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -3,7 +3,6 @@ package blacklist import ( "errors" "strings" - "fmt" "github.com/ocmdev/rita-bl/list" @@ -55,7 +54,7 @@ func buildBlacklistedURLs(urls *mgo.Iter, res *database.Resources, url, res.DB.GetSelectedDB(), res.Config.T.Urls.UrlsTable, - res.Config.T.Structure.UniqueConnTable, + res.Config.T.Structure.ConnTable, res.Config.T.Structure.HTTPTable, ssn, prefix, @@ -99,98 +98,98 @@ func checkRitaBlacklistURLs(urls *mgo.Iter, blHandle *bl.Blacklist, } func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, - urlCollection, uconnCollection string, httpCollection string, + urlCollection, connCollection string, httpCollection string, ssn *mgo.Session, prefix string) error { var urlQuery bson.M urlTrimmed := strings.TrimPrefix(longURL, prefix) resourceIdx := strings.Index(urlTrimmed, "/") + if resourceIdx == -1 { return errors.New("url does not specify a resource") } + host := urlTrimmed[:resourceIdx] resource := urlTrimmed[resourceIdx:] urlQuery = bson.M{"url": host, "uri": resource} var blURLFull urls.URL err := ssn.DB(db).C(urlCollection).Find(urlQuery).One(&blURLFull) + if err != nil { return err } + blURL.Host = host blURL.Resource = resource + + // Find source ips that connected to this full url, and join with the conn table + // on the uid httpPipeline := []bson.D{ { - { "$match", bson.M{"host": host, "uri": resource} }, + {"$match", bson.M{"host": host, "uri": resource}}, }, { - { "$group", bson.M{ - "_id": bson.D{ - { "h", "$host" }, - { "u", "$uri" }, - { "s", "$id_origin_h" }, - { "d", "$id_resp_h" }, - }, - "host": bson.M{ "$first": "$host" }, - "uri": bson.M{ "$first": "$uri" }, - "src": bson.M{ "$first": "$id_origin_h" }, - "dst": bson.M{ "$first": "$id_resp_h" }, + {"$project", bson.M{ + "_id": 0, + "uid": 1, }}, }, { - { "$lookup", bson.M{ - "from": uconnCollection, - "localField": "dst", - "foreignField": "dst", - "as": "uconn", + {"$lookup", bson.M{ + "from": connCollection, + "localField": "uid", + "foreignField": "uid", + "as": "conn", }}, }, { - { "$unwind", "$uconn" }, + {"$unwind", "$conn"}, }, { - { "$redact", bson.M{ - "$cond": bson.M{ - "if": bson.M{ - "$eq": []interface{}{ - "$src", - "$uconn.src", - }, - }, - "then": "$$KEEP", - "else": "$$PRUNE", - }, + {"$project", bson.M{ + "orig_bytes": "$conn.orig_bytes", + "resp_bytes": "$conn.resp_bytes", + "src": "$conn.id_origin_h", }}, }, { - { "$project", bson.M{ - "id": 1, - "connection_count": "$uconn.connection_count", - "src": 1, - "dst": 1, - "local_src": "$uconn.local_src", - "local_dst": "$uconn.local_dst", - "total_bytes": "$uconn.total_bytes", - "average_bytes": "$uconn.average_bytes", - "total_duration": "$uconn.total_duration", + {"$group", bson.M{ + "_id": "src", + "total_bytes": bson.D{ + {"$sum", bson.D{ + {"$add", []interface{}{ + "$orig_bytes", + "$resp_bytes", + }}, + }}, + }, + "total_conn": bson.D{ + {"$sum", bson.M{ + "$add": 1, + }}, + }, }}, }, } var totalBytes int var totalConnections int - var uniqueConnCount int - uniqueConnections := ssn.DB(db).C(httpCollection).Pipe(httpPipeline).Iter() - var uconn structure.UniqueConnection - for uniqueConnections.Next(&uconn) { - totalBytes += uconn.TotalBytes - totalConnections += uconn.ConnectionCount - uniqueConnCount++ + var uConnCount int + connIter := ssn.DB(db).C(httpCollection).Pipe(httpPipeline).Iter() + var srcGroup structure.SrcIPGroup + + for connIter.Next(&srcGroup) { + totalBytes += srcGroup.TotalBytes + totalConnections += srcGroup.TotalConns + uConnCount++ } - if uniqueConnections.Err() != nil { - fmt.Println(uniqueConnections.Err()) + + if connIter.Err() != nil { + return connIter.Err() } + blURL.Connections = totalConnections - blURL.UniqueConnections = uniqueConnCount + blURL.UniqueConnections = uConnCount blURL.TotalBytes = totalBytes return nil diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index 137adeaf..7aa8c5f6 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -6,8 +6,8 @@ import ( type ( Host struct { - Ip string `bson:"ip"` - Local bool `bson:"local"` + Ip string `bson:"ip"` + Local bool `bson:"local"` } UniqueConnection struct { @@ -21,4 +21,13 @@ type ( AverageBytes float32 `bson:"average_bytes"` TotalDuration float32 `bson:"total_duration"` } + + //srcIPGroup holds information used to find the number of unique connections, + //total connections, and total bytes for a blacklisted url, but are grouped by + //the ip that connected to the blacklisted url + SrcIPGroup struct { + ID bson.ObjectId `bson:"_id,omitempty"` + TotalBytes int `bson:"total_bytes"` + TotalConns int `bson:"total_conn"` + } ) From 964e9aeb20e0c36602fc1db9e6495490f275a921 Mon Sep 17 00:00:00 2001 From: Hannah Date: Tue, 5 Sep 2017 17:21:14 -0600 Subject: [PATCH 048/117] Indexed uid to improve speed of blacklisted url false positive solution --- parser/parsetypes/conn.go | 2 +- parser/parsetypes/http.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/parser/parsetypes/conn.go b/parser/parsetypes/conn.go index 644977a2..901f2886 100644 --- a/parser/parsetypes/conn.go +++ b/parser/parsetypes/conn.go @@ -63,7 +63,7 @@ func (in *Conn) TargetCollection(config *config.StructureTableCfg) string { //Indices gives MongoDB indices that should be used with the collection func (in *Conn) Indices() []string { - return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "-duration", "ts"} + return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "-duration", "ts", "uid"} } //Normalize pre processes this type of entry before it is imported by rita diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index e5711af1..a4e017ee 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -84,7 +84,7 @@ func (line *HTTP) TargetCollection(config *config.StructureTableCfg) string { //Indices gives MongoDB indices that should be used with the collection func (line *HTTP) Indices() []string { - return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "$hashed:user_agent"} + return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "$hashed:user_agent", "uid"} } // Normalize fixes up absolute uri's as read by bro to be relative From 9bf847ce92da44752b5e54ec45586bfab5a04ef1 Mon Sep 17 00:00:00 2001 From: Hannah Date: Thu, 21 Sep 2017 14:06:00 -0600 Subject: [PATCH 049/117] Fixed some simple style errors e.g. Uid -> UID --- analysis/beacon/beacon.go | 2 +- parser/parsetypes/conn.go | 2 +- parser/parsetypes/dns.go | 2 +- parser/parsetypes/http.go | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 5d2d3e74..5113f45f 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -150,7 +150,7 @@ func (t *Beacon) run() { } for localIter.Next(&host) { - t.collectChannel <- host.Ip + t.collectChannel <- host.IP } t.log.Debug("Finding all source / destination pairs for analysis") close(t.collectChannel) diff --git a/parser/parsetypes/conn.go b/parser/parsetypes/conn.go index 901f2886..75a3ad77 100644 --- a/parser/parsetypes/conn.go +++ b/parser/parsetypes/conn.go @@ -12,7 +12,7 @@ type ( ID bson.ObjectId `bson:"_id,omitempty"` // TimeStamp of this connection TimeStamp int64 `bson:"ts" bro:"ts" brotype:"time"` - // Uid is the Unique Id for this connection (generated by Bro) + // UID is the Unique Id for this connection (generated by Bro) UID string `bson:"uid" bro:"uid" brotype:"string"` // Source is the source address for this connection Source string `bson:"id_origin_h" bro:"id.orig_h" brotype:"addr"` diff --git a/parser/parsetypes/dns.go b/parser/parsetypes/dns.go index c71f6c92..919decfa 100644 --- a/parser/parsetypes/dns.go +++ b/parser/parsetypes/dns.go @@ -11,7 +11,7 @@ type DNS struct { ID bson.ObjectId `bson:"_id,omitempty"` // TimeStamp of this connection TimeStamp int64 `bson:"ts" bro:"ts" brotype:"time"` - // Uid is the Unique Id for this connection (generated by Bro) + // UID is the Unique Id for this connection (generated by Bro) UID string `bson:"uid" bro:"uid" brotype:"string"` // Source is the source address for this connection Source string `bson:"id_origin_h" bro:"id.orig_h" brotype:"addr"` diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index a4e017ee..28d373ae 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -15,7 +15,7 @@ type HTTP struct { ID bson.ObjectId `bson:"_id,omitempty"` // TimeStamp of this connection TimeStamp int64 `bson:"ts" bro:"ts" brotype:"time"` - // Uid is the Unique Id for this connection (generated by Bro) + // UID is the Unique Id for this connection (generated by Bro) UID string `bson:"uid" bro:"uid" brotype:"string"` // Source is the source address for this connection Source string `bson:"id_origin_h" bro:"id.orig_h" brotype:"addr"` From b1f34982861ff141f9184122ec029fe863504f78 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Tue, 3 Oct 2017 20:59:51 -0600 Subject: [PATCH 050/117] Added integer representations of the ip addresses in the hosts table --- analysis/structure/hosts.go | 148 +++++++++++++++++++++++++++++++ datatypes/structure/structure.go | 6 ++ 2 files changed, 154 insertions(+) diff --git a/analysis/structure/hosts.go b/analysis/structure/hosts.go index 27b9f57b..0645af98 100644 --- a/analysis/structure/hosts.go +++ b/analysis/structure/hosts.go @@ -1,8 +1,13 @@ package structure import ( + "encoding/binary" + "net" + "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" + structureTypes "github.com/ocmdev/rita/datatypes/structure" + log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) @@ -27,8 +32,13 @@ func BuildHostsCollection(res *database.Resources) { defer ssn.Close() res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) + setIPv4Binary(res.DB.GetSelectedDB(), newCollectionName, ssn, res.Log) + setIPv6Binary(res.DB.GetSelectedDB(), newCollectionName, ssn, res.Log) } +//getHosts aggregates the individual hosts from the conn collection and +//labels them as private or public as well as ipv4 or ipv6. The aggregation +//includes padding for a binary encoding of the ip address. func getHosts(conf *config.Config) (string, string, []mgo.Index, []bson.D) { // Name of source collection which will be aggregated into the new collection sourceCollectionName := conf.T.Structure.ConnTable @@ -40,6 +50,7 @@ func getHosts(conf *config.Config) (string, string, []mgo.Index, []bson.D) { keys := []mgo.Index{ {Key: []string{"ip"}, Unique: true}, {Key: []string{"local"}}, + {Key: []string{"ipv4"}}, } // Aggregation script @@ -75,6 +86,26 @@ func getHosts(conf *config.Config) (string, string, []mgo.Index, []bson.D) { {"_id", 0}, {"ip", "$_id"}, {"local", 1}, + {"ipv4", bson.D{ + {"$cond", bson.D{ + {"if", bson.D{ + {"$eq", []interface{}{ + bson.D{ + {"$indexOfCP", []interface{}{ + "$_id", ":", + }}, + }, + -1, + }}, + }}, + {"then", bson.D{ + {"$literal", true}, + }}, + {"else", bson.D{ + {"$literal", false}, + }}, + }}, + }}, }}, }, { @@ -84,3 +115,120 @@ func getHosts(conf *config.Config) (string, string, []mgo.Index, []bson.D) { return sourceCollectionName, newCollectionName, keys, pipeline } + +//setIPv4Binary sets the binary data for the ipv4 addresses in the dataset +func setIPv4Binary(selectedDB string, collectionName string, + session *mgo.Session, logger *log.Logger) { + coll := session.DB(selectedDB).C(collectionName) + + i := 0 + + var host structureTypes.Host + iter := coll.Find(bson.D{{"ipv4", true}}).Snapshot().Iter() //nolint: vet + + bulkUpdate := coll.Bulk() + + for iter.Next(&host) { + //1000 is the most a MongoDB bulk update operation can handle + if i == 1000 { + bulkUpdate.Unordered() + _, err := bulkUpdate.Run() + if err != nil { + logger.WithFields(log.Fields{ + "error": err.Error(), + }).Error("Unable to write binary representation of IP addresses") + } + + bulkUpdate = coll.Bulk() + i = 0 + } + + ipv4 := net.ParseIP(host.Ip) + ipv4Binary := uint64(binary.BigEndian.Uint32(ipv4[12:16])) + + //nolint: vet + bulkUpdate.Update( + bson.D{ + {"_id", host.ID}, + }, + bson.D{ + {"$set", bson.D{ + {"ipv4_binary", ipv4Binary}, + }}, + }, + ) + i++ + } + + //guaranteed to be at least one in the array + bulkUpdate.Unordered() + _, err := bulkUpdate.Run() + if err != nil { + logger.WithFields(log.Fields{ + "error": err.Error(), + }).Error("Unable to write binary representation of IP addresses") + } +} + +//setIPv6Binary sets the binary data for the ipv6 addresses in the dataset +func setIPv6Binary(selectedDB string, collectionName string, + session *mgo.Session, logger *log.Logger) { + coll := session.DB(selectedDB).C(collectionName) + + i := 0 + + var host structureTypes.Host + iter := coll.Find(bson.D{{"ipv4", false}}).Snapshot().Iter() //nolint: vet + + bulkUpdate := coll.Bulk() + + for iter.Next(&host) { + //1000 is the most a MongoDB bulk update operation can handle + if i == 1000 { + bulkUpdate.Unordered() + _, err := bulkUpdate.Run() + if err != nil { + logger.WithFields(log.Fields{ + "error": err.Error(), + }).Error("Unable to write binary representation of IP addresses") + } + + bulkUpdate = coll.Bulk() + i = 0 + } + + ipv6 := net.ParseIP(host.Ip) + ipv6Binary1 := uint64(binary.BigEndian.Uint32(ipv6[0:4])) + ipv6Binary2 := uint64(binary.BigEndian.Uint32(ipv6[4:8])) + ipv6Binary3 := uint64(binary.BigEndian.Uint32(ipv6[8:12])) + ipv6Binary4 := uint64(binary.BigEndian.Uint32(ipv6[12:16])) + + //nolint: vet + bulkUpdate.Update( + bson.D{ + {"_id", host.ID}, + }, + bson.D{ + {"$set", bson.D{ + {"ipv6_binary", bson.D{ + {"1", ipv6Binary1}, + {"2", ipv6Binary2}, + {"3", ipv6Binary3}, + {"4", ipv6Binary4}, + }}, + }}, + }, + ) + + i++ + } + + //guaranteed to be at least one in the array + bulkUpdate.Unordered() + _, err := bulkUpdate.Run() + if err != nil { + logger.WithFields(log.Fields{ + "error": err.Error(), + }).Error("Unable to write binary representation of IP addresses") + } +} diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index 137adeaf..44f6caec 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -5,11 +5,17 @@ import ( ) type ( + //Host describes a computer interface found in the + //network traffic being analyzed Host struct { + ID bson.ObjectId `bson:"_id,omitempty"` Ip string `bson:"ip"` Local bool `bson:"local"` + IPv4 bool `bson:"ipv6"` } + //UniqueConnection describes a pair of computer interfaces which contacted + //each other over the observation period UniqueConnection struct { ID bson.ObjectId `bson:"_id,omitempty"` ConnectionCount int `bson:"connection_count"` From b371275459b2a7007dabbe890c4ae73363d06257 Mon Sep 17 00:00:00 2001 From: Hannah Date: Thu, 17 Aug 2017 15:06:13 -0600 Subject: [PATCH 051/117] Changed blacklisted urls to get more accurate connection information --- analysis/blacklist/urls.go | 69 +++++++++++++++++++++++++++++++++++--- 1 file changed, 65 insertions(+), 4 deletions(-) diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 7a7757d8..dc89bb9d 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -3,6 +3,7 @@ package blacklist import ( "errors" "strings" + "fmt" "github.com/ocmdev/rita-bl/list" @@ -55,6 +56,7 @@ func buildBlacklistedURLs(urls *mgo.Iter, res *database.Resources, res.DB.GetSelectedDB(), res.Config.T.Urls.UrlsTable, res.Config.T.Structure.UniqueConnTable, + res.Config.T.Structure.HTTPTable, ssn, prefix, ) @@ -97,7 +99,8 @@ func checkRitaBlacklistURLs(urls *mgo.Iter, blHandle *bl.Blacklist, } func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, - urlCollection, uconnCollection string, ssn *mgo.Session, prefix string) error { + urlCollection, uconnCollection string, httpCollection string, + ssn *mgo.Session, prefix string) error { var urlQuery bson.M urlTrimmed := strings.TrimPrefix(longURL, prefix) resourceIdx := strings.Index(urlTrimmed, "/") @@ -115,19 +118,77 @@ func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, } blURL.Host = host blURL.Resource = resource - - connQuery := bson.M{"dst": bson.M{"$in": blURLFull.IPs}} + httpPipeline := []bson.D{ + { + { "$match", bson.M{"host": host, "uri": resource} }, + }, + { + { "$group", bson.M{ + "_id": bson.D{ + { "h", "$host" }, + { "u", "$uri" }, + { "s", "$id_origin_h" }, + { "d", "$id_resp_h" }, + }, + "host": bson.M{ "$first": "$host" }, + "uri": bson.M{ "$first": "$uri" }, + "src": bson.M{ "$first": "$id_origin_h" }, + "dst": bson.M{ "$first": "$id_resp_h" }, + }}, + }, + { + { "$lookup", bson.M{ + "from": uconnCollection, + "localField": "dst", + "foreignField": "dst", + "as": "uconn", + }}, + }, + { + { "$unwind", "$uconn" }, + }, + { + { "$redact", bson.M{ + "$cond": bson.M{ + "if": bson.M{ + "$eq": []interface{}{ + "$src", + "$uconn.src", + }, + }, + "then": "$$KEEP", + "else": "$$PRUNE", + }, + }}, + }, + { + { "$project", bson.M{ + "id": 1, + "connection_count": "$uconn.connection_count", + "src": 1, + "dst": 1, + "local_src": "$uconn.local_src", + "local_dst": "$uconn.local_dst", + "total_bytes": "$uconn.total_bytes", + "average_bytes": "$uconn.average_bytes", + "total_duration": "$uconn.total_duration", + }}, + }, + } var totalBytes int var totalConnections int var uniqueConnCount int - uniqueConnections := ssn.DB(db).C(uconnCollection).Find(connQuery).Iter() + uniqueConnections := ssn.DB(db).C(httpCollection).Pipe(httpPipeline).Iter() var uconn structure.UniqueConnection for uniqueConnections.Next(&uconn) { totalBytes += uconn.TotalBytes totalConnections += uconn.ConnectionCount uniqueConnCount++ } + if uniqueConnections.Err() != nil { + fmt.Println(uniqueConnections.Err()) + } blURL.Connections = totalConnections blURL.UniqueConnections = uniqueConnCount blURL.TotalBytes = totalBytes From 88252e528d2f2c59d78d17bd996666df25001436 Mon Sep 17 00:00:00 2001 From: Hannah Date: Mon, 28 Aug 2017 10:37:28 -0600 Subject: [PATCH 052/117] Implemented a new solution for false positives that should actually eliminate them, however, this solution is very slow --- analysis/blacklist/urls.go | 103 +++++++++++++++---------------- datatypes/structure/structure.go | 9 +++ 2 files changed, 60 insertions(+), 52 deletions(-) diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index dc89bb9d..30bda0b9 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -3,7 +3,6 @@ package blacklist import ( "errors" "strings" - "fmt" "github.com/ocmdev/rita-bl/list" @@ -55,7 +54,7 @@ func buildBlacklistedURLs(urls *mgo.Iter, res *database.Resources, url, res.DB.GetSelectedDB(), res.Config.T.Urls.UrlsTable, - res.Config.T.Structure.UniqueConnTable, + res.Config.T.Structure.ConnTable, res.Config.T.Structure.HTTPTable, ssn, prefix, @@ -99,98 +98,98 @@ func checkRitaBlacklistURLs(urls *mgo.Iter, blHandle *bl.Blacklist, } func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, - urlCollection, uconnCollection string, httpCollection string, + urlCollection, connCollection string, httpCollection string, ssn *mgo.Session, prefix string) error { var urlQuery bson.M urlTrimmed := strings.TrimPrefix(longURL, prefix) resourceIdx := strings.Index(urlTrimmed, "/") + if resourceIdx == -1 { return errors.New("url does not specify a resource") } + host := urlTrimmed[:resourceIdx] resource := urlTrimmed[resourceIdx:] urlQuery = bson.M{"url": host, "uri": resource} var blURLFull urls.URL err := ssn.DB(db).C(urlCollection).Find(urlQuery).One(&blURLFull) + if err != nil { return err } + blURL.Host = host blURL.Resource = resource + + // Find source ips that connected to this full url, and join with the conn table + // on the uid httpPipeline := []bson.D{ { - { "$match", bson.M{"host": host, "uri": resource} }, + {"$match", bson.M{"host": host, "uri": resource}}, }, { - { "$group", bson.M{ - "_id": bson.D{ - { "h", "$host" }, - { "u", "$uri" }, - { "s", "$id_origin_h" }, - { "d", "$id_resp_h" }, - }, - "host": bson.M{ "$first": "$host" }, - "uri": bson.M{ "$first": "$uri" }, - "src": bson.M{ "$first": "$id_origin_h" }, - "dst": bson.M{ "$first": "$id_resp_h" }, + {"$project", bson.M{ + "_id": 0, + "uid": 1, }}, }, { - { "$lookup", bson.M{ - "from": uconnCollection, - "localField": "dst", - "foreignField": "dst", - "as": "uconn", + {"$lookup", bson.M{ + "from": connCollection, + "localField": "uid", + "foreignField": "uid", + "as": "conn", }}, }, { - { "$unwind", "$uconn" }, + {"$unwind", "$conn"}, }, { - { "$redact", bson.M{ - "$cond": bson.M{ - "if": bson.M{ - "$eq": []interface{}{ - "$src", - "$uconn.src", - }, - }, - "then": "$$KEEP", - "else": "$$PRUNE", - }, + {"$project", bson.M{ + "orig_bytes": "$conn.orig_bytes", + "resp_bytes": "$conn.resp_bytes", + "src": "$conn.id_origin_h", }}, }, { - { "$project", bson.M{ - "id": 1, - "connection_count": "$uconn.connection_count", - "src": 1, - "dst": 1, - "local_src": "$uconn.local_src", - "local_dst": "$uconn.local_dst", - "total_bytes": "$uconn.total_bytes", - "average_bytes": "$uconn.average_bytes", - "total_duration": "$uconn.total_duration", + {"$group", bson.M{ + "_id": "src", + "total_bytes": bson.D{ + {"$sum", bson.D{ + {"$add", []interface{}{ + "$orig_bytes", + "$resp_bytes", + }}, + }}, + }, + "total_conn": bson.D{ + {"$sum", bson.M{ + "$add": 1, + }}, + }, }}, }, } var totalBytes int var totalConnections int - var uniqueConnCount int - uniqueConnections := ssn.DB(db).C(httpCollection).Pipe(httpPipeline).Iter() - var uconn structure.UniqueConnection - for uniqueConnections.Next(&uconn) { - totalBytes += uconn.TotalBytes - totalConnections += uconn.ConnectionCount - uniqueConnCount++ + var uConnCount int + connIter := ssn.DB(db).C(httpCollection).Pipe(httpPipeline).Iter() + var srcGroup structure.SrcIPGroup + + for connIter.Next(&srcGroup) { + totalBytes += srcGroup.TotalBytes + totalConnections += srcGroup.TotalConns + uConnCount++ } - if uniqueConnections.Err() != nil { - fmt.Println(uniqueConnections.Err()) + + if connIter.Err() != nil { + return connIter.Err() } + blURL.Connections = totalConnections - blURL.UniqueConnections = uniqueConnCount + blURL.UniqueConnections = uConnCount blURL.TotalBytes = totalBytes return nil diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index 44f6caec..a21cd6b3 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -27,4 +27,13 @@ type ( AverageBytes float32 `bson:"average_bytes"` TotalDuration float32 `bson:"total_duration"` } + + //srcIPGroup holds information used to find the number of unique connections, + //total connections, and total bytes for a blacklisted url, but are grouped by + //the ip that connected to the blacklisted url + SrcIPGroup struct { + ID bson.ObjectId `bson:"_id,omitempty"` + TotalBytes int `bson:"total_bytes"` + TotalConns int `bson:"total_conn"` + } ) From af2c7f22b2293feabd8a1134ad104fb227e35f00 Mon Sep 17 00:00:00 2001 From: Hannah Date: Tue, 5 Sep 2017 17:21:14 -0600 Subject: [PATCH 053/117] Indexed uid to improve speed of blacklisted url false positive solution --- parser/parsetypes/conn.go | 2 +- parser/parsetypes/http.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/parser/parsetypes/conn.go b/parser/parsetypes/conn.go index 644977a2..901f2886 100644 --- a/parser/parsetypes/conn.go +++ b/parser/parsetypes/conn.go @@ -63,7 +63,7 @@ func (in *Conn) TargetCollection(config *config.StructureTableCfg) string { //Indices gives MongoDB indices that should be used with the collection func (in *Conn) Indices() []string { - return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "-duration", "ts"} + return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "-duration", "ts", "uid"} } //Normalize pre processes this type of entry before it is imported by rita diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index e5711af1..a4e017ee 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -84,7 +84,7 @@ func (line *HTTP) TargetCollection(config *config.StructureTableCfg) string { //Indices gives MongoDB indices that should be used with the collection func (line *HTTP) Indices() []string { - return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "$hashed:user_agent"} + return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "$hashed:user_agent", "uid"} } // Normalize fixes up absolute uri's as read by bro to be relative From cda4562cf70857f266c729240b8175c3dd261b4f Mon Sep 17 00:00:00 2001 From: Hannah Date: Thu, 21 Sep 2017 14:06:00 -0600 Subject: [PATCH 054/117] Fixed some simple style errors e.g. Uid -> UID --- analysis/beacon/beacon.go | 2 +- parser/parsetypes/conn.go | 2 +- parser/parsetypes/dns.go | 2 +- parser/parsetypes/http.go | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 5d2d3e74..5113f45f 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -150,7 +150,7 @@ func (t *Beacon) run() { } for localIter.Next(&host) { - t.collectChannel <- host.Ip + t.collectChannel <- host.IP } t.log.Debug("Finding all source / destination pairs for analysis") close(t.collectChannel) diff --git a/parser/parsetypes/conn.go b/parser/parsetypes/conn.go index 901f2886..75a3ad77 100644 --- a/parser/parsetypes/conn.go +++ b/parser/parsetypes/conn.go @@ -12,7 +12,7 @@ type ( ID bson.ObjectId `bson:"_id,omitempty"` // TimeStamp of this connection TimeStamp int64 `bson:"ts" bro:"ts" brotype:"time"` - // Uid is the Unique Id for this connection (generated by Bro) + // UID is the Unique Id for this connection (generated by Bro) UID string `bson:"uid" bro:"uid" brotype:"string"` // Source is the source address for this connection Source string `bson:"id_origin_h" bro:"id.orig_h" brotype:"addr"` diff --git a/parser/parsetypes/dns.go b/parser/parsetypes/dns.go index c71f6c92..919decfa 100644 --- a/parser/parsetypes/dns.go +++ b/parser/parsetypes/dns.go @@ -11,7 +11,7 @@ type DNS struct { ID bson.ObjectId `bson:"_id,omitempty"` // TimeStamp of this connection TimeStamp int64 `bson:"ts" bro:"ts" brotype:"time"` - // Uid is the Unique Id for this connection (generated by Bro) + // UID is the Unique Id for this connection (generated by Bro) UID string `bson:"uid" bro:"uid" brotype:"string"` // Source is the source address for this connection Source string `bson:"id_origin_h" bro:"id.orig_h" brotype:"addr"` diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index a4e017ee..28d373ae 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -15,7 +15,7 @@ type HTTP struct { ID bson.ObjectId `bson:"_id,omitempty"` // TimeStamp of this connection TimeStamp int64 `bson:"ts" bro:"ts" brotype:"time"` - // Uid is the Unique Id for this connection (generated by Bro) + // UID is the Unique Id for this connection (generated by Bro) UID string `bson:"uid" bro:"uid" brotype:"string"` // Source is the source address for this connection Source string `bson:"id_origin_h" bro:"id.orig_h" brotype:"addr"` From 596eafe3ed5c9d0a450b1b792a6691932aee72db Mon Sep 17 00:00:00 2001 From: Hannah Date: Tue, 7 Nov 2017 16:44:26 -0700 Subject: [PATCH 055/117] Moved SrcIPGroup struct out of structure datatypes and into file it was used in --- analysis/blacklist/urls.go | 22 ++++++++++++++++------ datatypes/structure/structure.go | 9 --------- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 30bda0b9..21a1e84d 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -9,17 +9,27 @@ import ( bl "github.com/ocmdev/rita-bl" "github.com/ocmdev/rita/database" data "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" "github.com/ocmdev/rita/datatypes/urls" log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) -type urlShort struct { - URL string `bson:"url"` - URI string `bson:"uri"` -} +type ( + urlShort struct { + URL string `bson:"url"` + URI string `bson:"uri"` + } + + //srcIPGroup holds information used to find the number of unique connections, + //total connections, and total bytes for a blacklisted url, but are grouped by + //the ip that connected to the blacklisted url + SrcIPGroup struct { + ID bson.ObjectId `bson:"_id,omitempty"` + TotalBytes int `bson:"total_bytes"` + TotalConns int `bson:"total_conn"` + } +) //buildBlacklistedURLs builds a set of blacklsited urls from the //iterator provided, the system config, a handle to rita-blacklist, @@ -176,7 +186,7 @@ func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, var totalConnections int var uConnCount int connIter := ssn.DB(db).C(httpCollection).Pipe(httpPipeline).Iter() - var srcGroup structure.SrcIPGroup + var srcGroup SrcIPGroup for connIter.Next(&srcGroup) { totalBytes += srcGroup.TotalBytes diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index a21cd6b3..44f6caec 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -27,13 +27,4 @@ type ( AverageBytes float32 `bson:"average_bytes"` TotalDuration float32 `bson:"total_duration"` } - - //srcIPGroup holds information used to find the number of unique connections, - //total connections, and total bytes for a blacklisted url, but are grouped by - //the ip that connected to the blacklisted url - SrcIPGroup struct { - ID bson.ObjectId `bson:"_id,omitempty"` - TotalBytes int `bson:"total_bytes"` - TotalConns int `bson:"total_conn"` - } ) From 5d566d61f53695374355a3b296aa1681d96059b3 Mon Sep 17 00:00:00 2001 From: Hannah Date: Tue, 7 Nov 2017 17:21:04 -0700 Subject: [PATCH 056/117] Changed Ip to IP --- analysis/structure/hosts.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/analysis/structure/hosts.go b/analysis/structure/hosts.go index 0645af98..ca641a5a 100644 --- a/analysis/structure/hosts.go +++ b/analysis/structure/hosts.go @@ -143,7 +143,7 @@ func setIPv4Binary(selectedDB string, collectionName string, i = 0 } - ipv4 := net.ParseIP(host.Ip) + ipv4 := net.ParseIP(host.IP) ipv4Binary := uint64(binary.BigEndian.Uint32(ipv4[12:16])) //nolint: vet @@ -197,7 +197,7 @@ func setIPv6Binary(selectedDB string, collectionName string, i = 0 } - ipv6 := net.ParseIP(host.Ip) + ipv6 := net.ParseIP(host.IP) ipv6Binary1 := uint64(binary.BigEndian.Uint32(ipv6[0:4])) ipv6Binary2 := uint64(binary.BigEndian.Uint32(ipv6[4:8])) ipv6Binary3 := uint64(binary.BigEndian.Uint32(ipv6[8:12])) From e3ba0e57a2745e67816dc7a18efcafb9dbf82875 Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Tue, 14 Nov 2017 14:01:43 -0600 Subject: [PATCH 057/117] Updating Dockerfile to include tables.yaml --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index 01f0d030..84337995 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,5 +18,8 @@ COPY . . RUN make FROM alpine:latest + +RUN mkdir /root/.rita +COPY --from=rita-builder /go/src/github.com/ocmdev/rita/etc/tables.yaml /root/.rita/ COPY --from=rita-builder /go/src/github.com/ocmdev/rita/rita . ENTRYPOINT ["./rita"] From df03bbbc652be82e5601e9e7b1066d74a8db293b Mon Sep 17 00:00:00 2001 From: logan Date: Wed, 15 Nov 2017 18:27:19 -0700 Subject: [PATCH 058/117] Fix interactive flag hack, fix flipped condition --- install.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/install.sh b/install.sh index 725979d4..930479a6 100755 --- a/install.sh +++ b/install.sh @@ -19,12 +19,12 @@ set -o pipefail OLD_PS1=$PS1 PS1=" " # Hack the interactive flag to get around other .bashrc's -set -i +set +i source $HOME/.bashrc # Clean up our hacks -set +i +set -i PS1=$OLD_PS1 unset OLD_PS1 @@ -109,7 +109,7 @@ __install() { # Check if RITA is already installed, if so ask if this is a re-install if [ ! -z $(command -v rita) ] || - [ -f $GOPATH/bin/rita ] + [ -d $HOME/.rita ] then printf "[+] RITA is already installed.\n" read -p "[-] Would you like to erase it and re-install? [y/n] " -r @@ -213,7 +213,7 @@ __install() { # If the user is using sudo, give ownership to the sudo user - if [ -z ${SUDO_USER+x} ] + if [ ! -z ${SUDO_USER+x} ] then chown -R $SUDO_USER:$SUDO_USER $HOME/go chown -R $SUDO_USER:$SUDO_USER $HOME/.rita From ddabf44154689d46f09adbc94001e1e0a9bab687 Mon Sep 17 00:00:00 2001 From: samuel carroll Date: Mon, 11 Dec 2017 15:44:15 -0700 Subject: [PATCH 059/117] Adding ,nil to fix the version issue with the newest changes to lfshook --- database/resources.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database/resources.go b/database/resources.go index 3d15088d..07597b78 100644 --- a/database/resources.go +++ b/database/resources.go @@ -154,5 +154,5 @@ func addFileLogger(logger *log.Logger, logPath string) { log.ErrorLevel: path.Join(logPath, "error.log"), log.FatalLevel: path.Join(logPath, "fatal.log"), log.PanicLevel: path.Join(logPath, "panic.log"), - })) + }, nil)) } From 04e9de0025556359f7fcd524f5ec8d9c4266dc3a Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 14 Dec 2017 18:09:17 -0700 Subject: [PATCH 060/117] Fix interactive shell hacks, implemented per branch installer --- install.sh | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/install.sh b/install.sh index 930479a6..a6fc9fe9 100755 --- a/install.sh +++ b/install.sh @@ -19,12 +19,12 @@ set -o pipefail OLD_PS1=$PS1 PS1=" " # Hack the interactive flag to get around other .bashrc's -set +i +set -i source $HOME/.bashrc # Clean up our hacks -set -i +set +i PS1=$OLD_PS1 unset OLD_PS1 @@ -196,7 +196,10 @@ __install() { apt-get install -y mongodb-org > /dev/null & __load "[+] Installing MongoDB" ( # Build RITA - go get github.com/ocmdev/rita + mkdir -p $GOPATH/src/github.com/ocmdev/rita + # Get the install script's directory in case it's run from elsewhere + RITASRC="$(dirname "$(realpath ${0})")" + cp -R $RITASRC/. $GOPATH/src/github.com/ocmdev/rita/ cd $GOPATH/src/github.com/ocmdev/rita make install > /dev/null ) & __load "[+] Installing RITA" From 58b229c244af24681f8ff84bb5cba225ecce7f64 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 14 Dec 2017 19:30:36 -0700 Subject: [PATCH 061/117] Fixed bro installation on ubuntu 14.04 --- install.sh | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/install.sh b/install.sh index a6fc9fe9..42e24899 100755 --- a/install.sh +++ b/install.sh @@ -124,10 +124,11 @@ __install() { __prep # Install installation dependencies - apt-get update > /dev/null & __load "[+] Updating apt" + apt-get update > /dev/null 2>&1 & __load "[+] Updating apt" - apt-get install -y git wget make lsb-release > /dev/null & \ - __load "[+] Installing git, wget, make, and lsb-release" + #Extracting templates from packages made it through... + apt-get install -y git wget make realpath lsb-release > /dev/null 2>&1 & \ + __load "[+] Installing git, wget, make, realpath, and lsb-release" # Install Bro IDS printf "[+] Checking if Bro IDS is installed... " @@ -136,8 +137,14 @@ __install() { [ $(dpkg-query -W -f='${Status}' securityonion-bro 2>/dev/null | grep -c "ok installed") -eq 0 ] then printf "\n" - apt-get install -y bro broctl bro-aux > /dev/null & \ - __load "\t[+] Installing Bro IDS" + ( + echo "deb http://download.opensuse.org/repositories/network:/bro/xUbuntu_$(lsb_release -rs)/ /" >> /etc/apt/sources.list.d/bro.list + wget -nv --quiet "http://download.opensuse.org/repositories/network:bro/xUbuntu_$(lsb_release -rs)/Release.key" -O Release.key + apt-key add - < Release.key > /dev/null 2>&1 + rm Release.key + apt-get update > /dev/null 2>&1 + apt-get install -y bro broctl> /dev/null 2>&1 + ) & __load "\t[+] Installing Bro IDS" else printf "$_SUCCESS\n" fi @@ -192,8 +199,8 @@ __install() { echo "deb [ arch=$(dpkg --print-architecture) ] http://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/3.4 multiverse" > /etc/apt/sources.list.d/mongodb-org-3.4.list - apt-get update > /dev/null & __load "[+] Updating apt" - apt-get install -y mongodb-org > /dev/null & __load "[+] Installing MongoDB" + apt-get update > /dev/null 2>&1 & __load "[+] Updating apt" + apt-get install -y mongodb-org > /dev/null 2>&1 & __load "[+] Installing MongoDB" ( # Build RITA mkdir -p $GOPATH/src/github.com/ocmdev/rita From febab4aa9fd4361bb308ed24a4c1cf2663baa49d Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Mon, 18 Dec 2017 14:22:29 -0700 Subject: [PATCH 062/117] Support CentOS --- install.sh | 261 ++++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 187 insertions(+), 74 deletions(-) diff --git a/install.sh b/install.sh index 42e24899..8d589c19 100755 --- a/install.sh +++ b/install.sh @@ -9,14 +9,23 @@ _SUCCESS="\e[92mSUCCESS\e[0m" #Error handling #Kill 0 to kill subshells as well -trap "printf '\n[!] Installation $_FAILED!\n'; kill 0" ERR INT +__err() { + printf "\n[!] Installation $_FAILED!\n" + kill 0 +} +trap __err ERR INT set -o errexit set -o errtrace set -o pipefail +# Fix $HOME for users under standard sudo +if [ ! -z ${SUDO_USER+x} ]; then + HOME="$( getent passwd $SUDO_USER | cut -d: -f6 )" +fi + # Make sure to source the latest .bashrc # Hack the PS1 variable to get around ubuntu .bashrc -OLD_PS1=$PS1 +_OLD_PS1=$PS1 PS1=" " # Hack the interactive flag to get around other .bashrc's set -i @@ -25,9 +34,8 @@ source $HOME/.bashrc # Clean up our hacks set +i -PS1=$OLD_PS1 -unset OLD_PS1 - +PS1=$_OLD_PS1 +unset _OLD_PS1 __help() { __title @@ -43,13 +51,13 @@ Options: HEREDOC } -__prep() { +__explain() { cat < /dev/null 2>&1 & __load "[+] Updating apt" - - #Extracting templates from packages made it through... - apt-get install -y git wget make realpath lsb-release > /dev/null 2>&1 & \ - __load "[+] Installing git, wget, make, realpath, and lsb-release" +__setOS() { + _OS="$(lsb_release -is)" + if [ "$_OS" != "Ubuntu" -a "$_OS" != "CentOS" ]; then + echo "Unsupported operating system" + _err + fi +} - # Install Bro IDS - printf "[+] Checking if Bro IDS is installed... " +__install_packages() { + while [ ! -z "$1" ]; do + local pkg="$1" + # Translation layer + # apt -> yum + if [ $_PKG_MGR -eq 2 ]; then + case "$pkg" in + "lsb-release") + pkg="redhat-lsb-core" + ;; + realpath) + pkg="coreutils" + ;; + esac + fi + eval $_PKG_INSTALL $pkg >/dev/null 2>&1 + shift + done +} - if [ $(dpkg-query -W -f='${Status}' bro 2>/dev/null | grep -c "ok installed") -eq 0 ] && - [ $(dpkg-query -W -f='${Status}' securityonion-bro 2>/dev/null | grep -c "ok installed") -eq 0 ] - then - printf "\n" - ( - echo "deb http://download.opensuse.org/repositories/network:/bro/xUbuntu_$(lsb_release -rs)/ /" >> /etc/apt/sources.list.d/bro.list - wget -nv --quiet "http://download.opensuse.org/repositories/network:bro/xUbuntu_$(lsb_release -rs)/Release.key" -O Release.key - apt-key add - < Release.key > /dev/null 2>&1 - rm Release.key - apt-get update > /dev/null 2>&1 - apt-get install -y bro broctl> /dev/null 2>&1 - ) & __load "\t[+] Installing Bro IDS" - else - printf "$_SUCCESS\n" +__freshen_packages() { + if [ $_PKG_MGR -eq 1 ]; then #apt + apt-get -qq update > /dev/null 2>&1 + elif [ $_PKG_MGR -eq 2 ]; then #yum + yum -q makecache > /dev/null 2>&1 fi +} - # Install Go - printf "[+] Checking if Go is installed...\n" +__package_installed() { + #Returns true if the package is installed, false otherwise + if [ $_PKG_MGR -eq 1 ]; then # apt + dpkg-query -W -f='${Status}' "$1" 2>/dev/null | grep -q "ok installed" + elif [ $_PKG_MGR -eq 2 ]; then # yum and dnf + rpm -q "$1" >/dev/null + fi +} - # Check if go is not available in the path - if [ -z $(command -v go) ] - then - # Check if go is available in the standard location - if [ ! -e "/usr/local/go" ] - then - ( # golang most recent update - wget -q https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz - tar -zxf go1.8.3.linux-amd64.tar.gz -C /usr/local/ - rm go1.8.3.linux-amd64.tar.gz - echo 'export PATH=$PATH:/usr/local/go/bin' >> $HOME/.bashrc - ) & __load "\t[+] Installing Go" +__add_deb_repo() { + if [ ! -s "/etc/apt/sources.list.d/$2.list" ]; then + if [ ! -z "$3" ]; then + curl -s -L "$3" | apt-key add - > /dev/null 2>&1 fi + echo "$1" > "/etc/apt/sources.list.d/$2.list" + __freshen_packages + fi +} - # Add go to the path - export PATH="$PATH:/usr/local/go/bin" - else +__add_rpm_repo() { + yum-config-manager -q --add-repo=$1 > /dev/null 2>&1 +} + +__check_go_version() { + case `go version | awk '{print $3}'` in + go1|go1.2*|go1.3*|go1.4*|go1.5*|go1.6*|"") echo -e "\e[93m\t[!] WARNING: Go has been detected on this system.\e[0m \tIf you installed Go with apt, make sure your Go installation is up \tto date with 'go version'. RITA has only been tested with golang @@ -178,35 +203,123 @@ __install() { \tversion for you! " sleep 10s + ;; + esac +} + +__install_go() { + # Check if go isn't available in the path + printf "[+] Checking if Go is installed...\n" + if [ ! $(command -v go) ]; then + if [ ! -x "/usr/local/go/bin/go" ]; then + ( + curl -s -o /tmp/golang.tar.gz https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz + tar -zxf /tmp/golang.tar.gz -C /usr/local/ + rm /tmp/golang.tar.gz + ) & __load "\t[+] Installing Go" + fi + printf "\t[+] Adding Go to the PATH...\n" + export PATH="$PATH:/usr/local/go/bin" + echo 'export PATH=$PATH:/usr/local/go/bin' >> $HOME/.bashrc + else + printf "\t[+] Go is installed...\n" fi # Check if the GOPATH isn't set - if [ -z ${GOPATH+x} ] - then + if [ -z ${GOPATH+x} ]; then ( # Set up the GOPATH mkdir -p $HOME/go/{src,pkg,bin} echo 'export GOPATH=$HOME/go' >> $HOME/.bashrc echo 'export PATH=$PATH:$GOPATH/bin' >> $HOME/.bashrc - ) & __load "[+] Configuring Go dev environment" + ) & __load "\t[+] Configuring Go dev environment" export GOPATH=$HOME/go export PATH=$PATH:$GOPATH/bin fi +} + +__install_bro() { + ( + # security onion packages bro on their own + if ! __package_installed bro && ! __package_installed securityonion-bro; then + case "$_OS" in + Ubuntu) + __add_deb_repo "deb http://download.opensuse.org/repositories/network:/bro/xUbuntu_$(lsb_release -rs)/ /" \ + "Bro" \ + "http://download.opensuse.org/repositories/network:bro/xUbuntu_$(lsb_release -rs)/Release.key" + ;; + CentOS) + __add_rpm_repo http://download.opensuse.org/repositories/network:bro/CentOS_7/network:bro.repo + ;; + esac + __install_packages bro broctl + fi + ) & __load "[+] Ensuring Bro IDS is installed" + + if [ ! $(command -v bro) ]; then + printf "\t[+] Adding Bro to the PATH...\n" + echo 'export PATH=$PATH:/opt/bro/bin' >> $HOME/.bashrc + PATH=$PATH:/opt/bro/bin + fi +} + +__install_mongodb() { + case "$_OS" in + Ubuntu) + __add_deb_repo "deb [ arch=$(dpkg --print-architecture) ] http://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/3.4 multiverse" \ + "MongoDB" \ + "https://www.mongodb.org/static/pgp/server-3.4.asc" + ;; + CentOS) + if [ ! -s /etc/yum.repos.d/mongodb-org-3.4.repo ]; then + echo -e '[mongodb-org-3.4]\nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/3.4/x86_64/\ngpgcheck=1\nenabled=1\ngpgkey=https://www.mongodb.org/static/pgp/server-3.4.asc' > /etc/yum.repos.d/mongodb-org-3.4.repo + fi + ;; + esac + __install_packages mongodb-org +} + +__install() { + + # Check if RITA is already installed, if so ask if this is a re-install + if [ ! -z $(command -v rita) ] || [ -d $HOME/.rita ]; then + printf "[+] RITA is already installed.\n" + read -p "[-] Would you like to erase it and re-install? [y/n] " -r + if [[ $REPLY =~ ^[Yy]$ ]] + then + __uninstall + echo "" + else + exit 1 + fi + fi + + # Explain the scripts actions + __explain + + # Figure out which package manager to use + __setPkgMgr + + # Update package sources + __freshen_packages + + # Install "the basics" + __install_packages git wget curl make coreutils realpath lsb-release & \ + __load "[+] Ensuring git, wget, curl, make, coreutils, and lsb-release are installed" + + # Determine the OS, needs lsb-release + __setOS - # Install MongoDB - apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 \ - --recv 0C49F3730359A14518585931BC711F9BA15703C6 > /dev/null 2>&1 & \ - __load "[+] Obtaining the package key for MongoDB" + __install_bro - echo "deb [ arch=$(dpkg --print-architecture) ] http://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/3.4 multiverse" > /etc/apt/sources.list.d/mongodb-org-3.4.list + __install_go + __check_go_version - apt-get update > /dev/null 2>&1 & __load "[+] Updating apt" - apt-get install -y mongodb-org > /dev/null 2>&1 & __load "[+] Installing MongoDB" + __install_mongodb & __load "[+] Installing MongoDB" ( # Build RITA mkdir -p $GOPATH/src/github.com/ocmdev/rita # Get the install script's directory in case it's run from elsewhere - RITASRC="$(dirname "$(realpath ${0})")" - cp -R $RITASRC/. $GOPATH/src/github.com/ocmdev/rita/ + cp -R "$(dirname "$(realpath ${0})")/." $GOPATH/src/github.com/ocmdev/rita/ cd $GOPATH/src/github.com/ocmdev/rita make install > /dev/null ) & __load "[+] Installing RITA" From eb34ad5f69f4935f26f31a3b3ebbc6b52af03d50 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Mon, 18 Dec 2017 17:18:24 -0700 Subject: [PATCH 063/117] Fix sourcing the bashrc on security onion --- install.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/install.sh b/install.sh index 8d589c19..52268190 100755 --- a/install.sh +++ b/install.sh @@ -29,10 +29,13 @@ _OLD_PS1=$PS1 PS1=" " # Hack the interactive flag to get around other .bashrc's set -i +# Make sure weirdness doesn't happen with autocomplete/ etc +set -o posix source $HOME/.bashrc # Clean up our hacks +set +o posix set +i PS1=$_OLD_PS1 unset _OLD_PS1 From 2d1edc45eb009857a1519cc21b50792082cc9809 Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Tue, 2 Jan 2018 15:05:37 -0600 Subject: [PATCH 064/117] Adding mongo config documentation --- docs/Mongo Configuration.md | 362 ++++++++++++++++++++++++++++++++++++ etc/rita.yaml | 33 ++-- 2 files changed, 381 insertions(+), 14 deletions(-) create mode 100644 docs/Mongo Configuration.md diff --git a/docs/Mongo Configuration.md b/docs/Mongo Configuration.md new file mode 100644 index 00000000..94713135 --- /dev/null +++ b/docs/Mongo Configuration.md @@ -0,0 +1,362 @@ +# Mongo Configuration for RITA + +This is the default MongoDB configuration section in RITA's configuration file `~/.rita/config.yaml`. + +```yaml +MongoDB: + # See https://docs.mongodb.com/manual/reference/connection-string/ + ConnectionString: mongodb://localhost:27017 + # Example with authentication. Be sure to change the AuthenticationMechanism as well. + # ConnectionString: mongodb://username:password@localhost:27017 + + # Accepted Values: null, "SCRAM-SHA-1", "MONGODB-CR", "PLAIN" + # Since Mongo version 3.0 the default authentication mechanism is SCRAM-SHA-1 + AuthenticationMechanism: null + + # The time in hours before RITA's connection to MongoDB times out. 0 waits indefinitely. + SocketTimeout: 2 + + # For encrypting data on the wire between RITA and MongoDB + TLS: + Enable: false + #If set, RITA will verify the MongoDB certificate's hostname and validity + VerifyCertificate: false + #If set, RITA will use the provided CA file instead of the system's CA's + CAFile: null +``` + + +## Connection String + +The basic connection string format for RITA is: + +``` +mongodb://[username:password@]host1[:port1] +``` + +References: +- https://docs.mongodb.com/manual/reference/connection-string/ + + +## Authentication + +Possible Values: +* null (default) +* SCRAM-SHA-1 (preferred) +* MONGODB-CR (not tested) +* PLAIN (not tested) +* x509 (not supported) + +To configure MongoDB with authentication, you need to create a user and configure Mongod to require authentication for connections. Then configure RITA to authenticate with the user you created. + + +### Create a Mongo User + +Mongo allows you to enable authentication before or after creating a user. But for simplicity it is easier to create the user first. + +Connect to your mongo instance by running the `mongo` client. By default it will connect to `127.0.0.1` on port `27017` so if you haven't change the defaults you can just run: + +``` +mongo +``` + +And you should be greeted with: + +``` +MongoDB shell version v3.4.10 +connecting to: mongodb://127.0.0.1:27017 +MongoDB server version: 3.4.10 +> +``` + +Next, enter the following command to create a user, replacing `user` and `pwd` values with your desired values. The following example creates a user with the username "rita" and the password "assumebreach". The "userAdminAnyDatabase" role used here is a built-in Superuser level role. To read more about the different built-in roles available visit the link in the references below. + +``` +db.getSiblingDB('admin').createUser( + { + user: "rita", + pwd: "assumebreach", + roles: [ { role: "userAdminAnyDatabase", db: "admin" } ] + } +) +``` + +And finally, exit the mongo shell. + +``` +exit +``` + +References +- https://docs.mongodb.com/manual/tutorial/enable-authentication/ +- https://docs.mongodb.com/manual/reference/built-in-roles/ + + +### Mongo Config + +In a default Mongo installation, authentication is disabled. In older versions of Mongo, this was a serious security vulnerability because Mongo also defaulted to listening on all network interfaces. This meant that remote systems could access the databases without authentication. Since version 3.6, Mongo will only listen on localhost by default. This means that only clients connecting from the same system will be able to connect. + +The version of Mongo that RITA installs has authentication disabled and listens on the localhost interface only. If you wish to use RITA and Mongo on separate systems we recommend you enable both authentication and encryption. + +To enable authentication, edit the Mongo config file. Ubuntu's default location for this file is `/etc/mongod.conf`. Since version 3.0 of Mongo, the default authentication mechanism (when enabled) is "SCRAM-SHA-1". + +Add or modify your security section of the config file to include authorization. + +```yaml +security: + authorization: enabled +``` + +Restart your Mongod service for the changes to take effect. + +``` +service mongod restart +``` + + +References: +- https://docs.mongodb.com/manual/core/authentication/ +- https://docs.mongodb.com/manual/reference/configuration-options/#security-options + + +### RITA Config + +To enable authentication and provide a username and password in RITA, modify the connection string in RITA's config file (`~/.rita/config.yaml`). + +Of the possible values for `AuthenticationMechanism`, the only officially supported values are `null` or `SCRAM-SHA-1`. + +This example configures RITA to authenticate with a username of "rita" and a password of "assumebreach" and to use "SCRAM-SHA-1" for the authentication protocol. + +```yaml +MongoDB: + ConnectionString: mongodb://rita:assumebreach@localhost:27017 + AuthenticationMechanism: SCRAM-SHA-1 +``` + +You can test that RITA is configured correctly by running `rita show-databases`. + +If the connection is successful, RITA will show the list of databases (or no output if you do not have any databases imported yet). + +If authentication is configured incorrectly, RITA will give the following output: + +``` +rita show-databases +Failed to connect to database: server returned error on SASL authentication step: Authentication failed. +``` + + +## Encryption + +Possible Values: +* None (default) +* TLS + * Self-Signed Certificate + * Trusted Certificate + * Certificate Verification + +Mongo's method of encrypting connnections is to use TLS/SSL. But by default Mongo does not have encryption enabled on client connections. + +To quote the Mongo documentation: + +> Before you can use SSL, you must have a .pem file containing a public key certificate and its associated private key. + +> MongoDB can use any valid SSL certificate issued by a certificate authority or a self-signed certificate. If you use a self-signed certificate, although the communications channel will be encrypted, there will be no validation of server identity. Although such a situation will prevent eavesdropping on the connection, it leaves you vulnerable to a man-in-the-middle attack. Using a certificate signed by a trusted certificate authority will permit MongoDB drivers to verify the server’s identity. + +Source: https://docs.mongodb.com/manual/core/security-transport-encryption/ + + +### Mongo Config + +The basic Mongo config file to enable encryption is shown below (`/etc/mongod.conf` on Ubuntu). This `net` configuration will listen on port `27017` (default) and only listen on the local interface `127.0.0.1` (default). If you want to allow remote connections you will need to change the `bindIp` to either `0.0.0.0` for all interfaces or the IP of the specific interface you want to listen on. + +The `ssl` portion of the configuration tells Mongo to _only_ accept encrypted connections. The `requireSSL` setting will refuse any unencrypted connections. The `PEMKeyFile` is the path to the file mentioned above in the quote from Mongo docs. Generating or obtaining this file will be covered below. + +```yaml +net: + port: 27017 + bindIp: 127.0.0.1 + ssl: + mode: requireSSL + PEMKeyFile: /etc/ssl/mongodb-cert.pem +``` + +Restart your Mongod service for the changes to take effect. + +``` +service mongod restart +``` + +References: +- https://docs.mongodb.com/manual/reference/configuration-options/#net-options + + +### RITA Config + +The following RITA configuration (`~/.rita/config.yaml`) snippet is sufficient to enable encrypted communication. Please note that while encryption and authentication are often used together, they are independent settings. The authentication settings aren't shown here but can be added. + +Note: RITA does not support the common `?ssl=true` option on Mongo's connection string to enable encryption. You must use the `TLS` section of RITA's config file. + +```yaml +MongoDB: + TLS: + Enable: true +``` + +Please make sure you understand the different options for certificates and validation detailed below, as well as the potential for man-in-the-middle attacks if configured incorrectly, before exposing Mongo to an untrusted network. + + +### Certificates + +As stated by the Mongo documentation, you can either obtain a certificate signed by a trusted authority or generate your own self-signed certificate. + + +#### Self-Signed + +There are a great many options when generating a self-signed certificate. The following command will generate a private key (mongodb-cert.key) and a public key (mongodb-cert.crt) in x509 format using the RSA algorithm with a 2048-bit key. This certificate will expire 5 years (1825 days) from the time it is generated. + +``` +openssl req -x509 -newkey rsa:2048 -days 1825 -nodes -out mongodb-cert.crt -keyout mongodb-cert.key +``` + +Openssl will then prompt you for several pieces of information. You may fill most of this in with artibrary values that are appropriate to you. But the `Common Name` value is important as it will be used in certificate verification. Set this value to the remote hostname of your Mongo server (i.e. The hostname you will put in your RITA `ConnectionString` config). RITA must be able to reach your Mongo server using this hostname. + +``` +Generating a 2048 bit RSA private key +............................................+++ +............+++ +writing new private key to 'mongodb-cert.key' +----- +You are about to be asked to enter information that will be incorporated +into your certificate request. +What you are about to enter is what is called a Distinguished Name or a DN. +There are quite a few fields but you can leave some blank +For some fields there will be a default value, +If you enter '.', the field will be left blank. +----- +Country Name (2 letter code) [AU]:US +State or Province Name (full name) [Some-State]: +Locality Name (eg, city) []: +Organization Name (eg, company) [Internet Widgits Pty Ltd]: +Organizational Unit Name (eg, section) []: +Common Name (e.g. server FQDN or YOUR name) []:localhost +Email Address []: +``` + +This will leave you with two files: `mongodb-cert.key` and `mongodb-cert.crt`. In order to put them in the .pem file format that Mongo expects, simply concatenate the two files together like so: + +``` +cat mongodb-cert.key mongodb-cert.crt > mongodb-cert.pem +``` + +References: +- https://docs.mongodb.com/manual/tutorial/configure-ssl/ + + +#### Trusted + +Obtaining a trusted certificate is beyond the scope of this document. See the Verification secion below for details on configuring RITA to use a trusted certificate. + +References: +- https://letsencrypt.org/ + + +#### Verification + +By default, RITA will not validate a certificate's authenticity. This is not ideal as it leaves connections open to man-in-the-middle attacks on untrusted networks. + +RITA's `VerifyCertificate` option will validate two things: +1) The certificate is correctly signed by a trusted authority. The trusted authorities are determined by the system's CA store or by specifying a path to a `CAFile` in RITA's config. +2) The `CN` (aka Common Name) field in the certificate must match the hostname where RITA is connecting. That is, the value must be the same as the hostname used in the `ConnectionString`. **This cannot be an IP address.** + +Once you have RITA configured, you can test your configuration by running `rita show-databases`. + +If the connection is successful, RITA will show the list of databases (or no output if you do not have any databases imported yet). + +If encryption or certificate verification is configured incorrectly, RITA will give the following output: + +``` +rita show-databases +Failed to connect to database: no reachable servers +``` + +**Trusted Certificate Verification Example** + +The following example configuration assumes your Mongo server is located at `offensivecountermeasures.com` and that you have obtained and configured Mongo with a certificate signed with a valid certificate authority. In this case, you do not need to specify a `CAFile` path. + +```yaml +MongoDB: + ConnectionString: mongodb://offensivecountermeasures.com:27017 + TLS: + Enable: true + VerifyCertificate: true +``` + +**Self-Signed Certificate Verification Example** + +In order to validate a self-signed certificate, you must specify the path to the CA file (commonly with a .crt extension). If you followed this document to generate one then it will be named `mongodb-cert.crt`. RITA does not need the private key (.key), or the combined file (.pem) which also contains the private key. You should protect the private key and not copy it anywhere unnecessarily. + +We recommend putting your certificate file at `~/.rita/mongodb-cert.crt`, which is used in the example below. The hostname used for the connection in this case is `localhost` and thus when you generated your certificate you must match this hostname. + +```yaml +MongoDB: + ConnectionString: mongodb://localhost:27017 + TLS: + Enable: true + VerifyCertificate: true + CAFile: $HOME/.rita/mongodb-cert.crt +``` + +Note: `~` does not expand in RITA's config file and will cause an error. Use `$HOME` instead. + + +**Self-Signed Certificate Verification Invalid Example** + +The following shows one example of a configuration that will not work. This is because an IP address (`127.0.0.1`) is used in the `ConnectionString`. Even if you set `CN` to `127.0.0.1` or add `IP:127.0.0.1` as a `SAN` when generating your certificate, this will still fail to validate. + +```yaml +MongoDB: + ConnectionString: mongodb://127.0.0.1:27017 + TLS: + Enable: true + VerifyCertificate: true + CAFile: $HOME/.rita/mongodb-cert.crt +``` + + +## Complete Example with Authentication and Encryption + +For completeness, here is an example of RITA's `MongoDB` config section configured for authentication (username "rita" and password "assumebreach") and encryption (self-signed certificate with validation located at "localhost"). + +```yaml +MongoDB: + # See https://docs.mongodb.com/manual/reference/connection-string/ + ConnectionString: mongodb://rita:assumebreach@localhost:27017 + # How to authenticate to MongoDB + # Accepted Values: null, "SCRAM-SHA-1", "MONGODB-CR", "PLAIN" + AuthenticationMechanism: SCRAM-SHA-1 + # The time in hours before RITA's connection to MongoDB times out. 0 waits indefinitely. + SocketTimeout: 2 + # For encrypting data on the wire between RITA and MongoDB + TLS: + Enable: true + #If set, RITA will verify the MongoDB certificate's hostname and validity + VerifyCertificate: true + #If set, RITA will use the provided CA file instead of the system's CA's + CAFile: $HOME/.rita/mongodb-cert.crt +``` + + + + + + + + + + + + + + + + diff --git a/etc/rita.yaml b/etc/rita.yaml index e6442220..7c157711 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -1,18 +1,23 @@ MongoDB: - # See https://docs.mongodb.com/manual/reference/connection-string/ - ConnectionString: mongodb://localhost:27017 - # How to authenticate to MongoDB - # Accepted Values: null, "SCRAM-SHA-1", "MONGODB-CR", "PLAIN" - AuthenticationMechanism: null - # The time in hours before RITA's connection to MongoDB times out. 0 waits indefinitely. - SocketTimeout: 2 - # For encrypting data on the wire between RITA and MongoDB - TLS: - Enable: false - #If set, RITA will verify the MongoDB certificate's hostname and validity - VerifyCertificate: false - #If set, RITA will use the provided CA file instead of the system's CA's - CAFile: null + # See https://docs.mongodb.com/manual/reference/connection-string/ + ConnectionString: mongodb://localhost:27017 + # Example with authentication. Be sure to change the AuthenticationMechanism as well. + # ConnectionString: mongodb://username:password@localhost:27017 + + # Accepted Values: null, "SCRAM-SHA-1", "MONGODB-CR", "PLAIN" + # Since Mongo version 3.0 the default authentication mechanism is SCRAM-SHA-1 + AuthenticationMechanism: null + + # The time in hours before RITA's connection to MongoDB times out. 0 waits indefinitely. + SocketTimeout: 2 + + # For encrypting data on the wire between RITA and MongoDB + TLS: + Enable: false + #If set, RITA will verify the MongoDB certificate's hostname and validity + VerifyCertificate: false + #If set, RITA will use the provided CA file instead of the system's CA's + CAFile: null LogConfig: # LogLevel From 461d539407fb7033096097930aa722c3ed7583b9 Mon Sep 17 00:00:00 2001 From: logan Date: Thu, 26 Oct 2017 12:16:46 -0600 Subject: [PATCH 065/117] Split integer representations of ip addresses into their own collections --- analysis/structure/hosts.go | 126 +--------------------- analysis/structure/ip.go | 174 +++++++++++++++++++++++++++++++ commands/analyze.go | 6 ++ config/tables.go | 2 + datatypes/structure/structure.go | 27 +++++ etc/rita.yaml | 2 +- 6 files changed, 211 insertions(+), 126 deletions(-) create mode 100644 analysis/structure/ip.go diff --git a/analysis/structure/hosts.go b/analysis/structure/hosts.go index ca641a5a..d96e7550 100644 --- a/analysis/structure/hosts.go +++ b/analysis/structure/hosts.go @@ -1,13 +1,8 @@ package structure import ( - "encoding/binary" - "net" - "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" - structureTypes "github.com/ocmdev/rita/datatypes/structure" - log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) @@ -19,7 +14,7 @@ func BuildHostsCollection(res *database.Resources) { sourceCollectionName, newCollectionName, newCollectionKeys, - pipeline := getHosts(res.Config) + pipseline := getHosts(res.Config) // Aggregate it! errorCheck := res.DB.CreateCollection(newCollectionName, false, newCollectionKeys) @@ -32,8 +27,6 @@ func BuildHostsCollection(res *database.Resources) { defer ssn.Close() res.DB.AggregateCollection(sourceCollectionName, ssn, pipeline) - setIPv4Binary(res.DB.GetSelectedDB(), newCollectionName, ssn, res.Log) - setIPv6Binary(res.DB.GetSelectedDB(), newCollectionName, ssn, res.Log) } //getHosts aggregates the individual hosts from the conn collection and @@ -115,120 +108,3 @@ func getHosts(conf *config.Config) (string, string, []mgo.Index, []bson.D) { return sourceCollectionName, newCollectionName, keys, pipeline } - -//setIPv4Binary sets the binary data for the ipv4 addresses in the dataset -func setIPv4Binary(selectedDB string, collectionName string, - session *mgo.Session, logger *log.Logger) { - coll := session.DB(selectedDB).C(collectionName) - - i := 0 - - var host structureTypes.Host - iter := coll.Find(bson.D{{"ipv4", true}}).Snapshot().Iter() //nolint: vet - - bulkUpdate := coll.Bulk() - - for iter.Next(&host) { - //1000 is the most a MongoDB bulk update operation can handle - if i == 1000 { - bulkUpdate.Unordered() - _, err := bulkUpdate.Run() - if err != nil { - logger.WithFields(log.Fields{ - "error": err.Error(), - }).Error("Unable to write binary representation of IP addresses") - } - - bulkUpdate = coll.Bulk() - i = 0 - } - - ipv4 := net.ParseIP(host.IP) - ipv4Binary := uint64(binary.BigEndian.Uint32(ipv4[12:16])) - - //nolint: vet - bulkUpdate.Update( - bson.D{ - {"_id", host.ID}, - }, - bson.D{ - {"$set", bson.D{ - {"ipv4_binary", ipv4Binary}, - }}, - }, - ) - i++ - } - - //guaranteed to be at least one in the array - bulkUpdate.Unordered() - _, err := bulkUpdate.Run() - if err != nil { - logger.WithFields(log.Fields{ - "error": err.Error(), - }).Error("Unable to write binary representation of IP addresses") - } -} - -//setIPv6Binary sets the binary data for the ipv6 addresses in the dataset -func setIPv6Binary(selectedDB string, collectionName string, - session *mgo.Session, logger *log.Logger) { - coll := session.DB(selectedDB).C(collectionName) - - i := 0 - - var host structureTypes.Host - iter := coll.Find(bson.D{{"ipv4", false}}).Snapshot().Iter() //nolint: vet - - bulkUpdate := coll.Bulk() - - for iter.Next(&host) { - //1000 is the most a MongoDB bulk update operation can handle - if i == 1000 { - bulkUpdate.Unordered() - _, err := bulkUpdate.Run() - if err != nil { - logger.WithFields(log.Fields{ - "error": err.Error(), - }).Error("Unable to write binary representation of IP addresses") - } - - bulkUpdate = coll.Bulk() - i = 0 - } - - ipv6 := net.ParseIP(host.IP) - ipv6Binary1 := uint64(binary.BigEndian.Uint32(ipv6[0:4])) - ipv6Binary2 := uint64(binary.BigEndian.Uint32(ipv6[4:8])) - ipv6Binary3 := uint64(binary.BigEndian.Uint32(ipv6[8:12])) - ipv6Binary4 := uint64(binary.BigEndian.Uint32(ipv6[12:16])) - - //nolint: vet - bulkUpdate.Update( - bson.D{ - {"_id", host.ID}, - }, - bson.D{ - {"$set", bson.D{ - {"ipv6_binary", bson.D{ - {"1", ipv6Binary1}, - {"2", ipv6Binary2}, - {"3", ipv6Binary3}, - {"4", ipv6Binary4}, - }}, - }}, - }, - ) - - i++ - } - - //guaranteed to be at least one in the array - bulkUpdate.Unordered() - _, err := bulkUpdate.Run() - if err != nil { - logger.WithFields(log.Fields{ - "error": err.Error(), - }).Error("Unable to write binary representation of IP addresses") - } -} diff --git a/analysis/structure/ip.go b/analysis/structure/ip.go new file mode 100644 index 00000000..6eea97d7 --- /dev/null +++ b/analysis/structure/ip.go @@ -0,0 +1,174 @@ +package structure + +import ( + "encoding/binary" + "net" + + "github.com/ocmdev/rita/database" + structureTypes "github.com/ocmdev/rita/datatypes/structure" + log "github.com/sirupsen/logrus" + mgo "gopkg.in/mgo.v2" + "gopkg.in/mgo.v2/bson" +) + +//BuildIPv4Collection generates binary representations of the IPv4 addresses in the +//dataset for use in subnetting, address selection, etc. +func BuildIPv4Collection(res *database.Resources) { + ssn := res.DB.Session.Copy() + defer ssn.Close() + + errorCheck := res.DB.CreateCollection( + res.Config.T.Structure.IPv4Table, + false, + []mgo.Index{ + {Key: []string{"ip"}, Unique: true}, + {Key: []string{"ipv4_binary"}}, + }, + ) + if errorCheck != nil { + res.Log.Error("Failed: ", res.Config.T.Structure.IPv4Table, errorCheck) + return + } + + buildIPv4Binary( + res.DB.GetSelectedDB(), + res.Config.T.Structure.HostTable, + res.Config.T.Structure.IPv4Table, + ssn, + res.Log, + ) +} + +//BuildIPv6Collection generates binary representations of the IPv6 addresses in the +//dataset for use in subnetting, address selection, etc. +func BuildIPv6Collection(res *database.Resources) { + ssn := res.DB.Session.Copy() + defer ssn.Close() + + errorCheck := res.DB.CreateCollection( + res.Config.T.Structure.IPv6Table, + false, + []mgo.Index{ + {Key: []string{"ip"}, Unique: true}, + {Key: []string{"ipv6_binary.1"}}, + {Key: []string{"ipv6_binary.2"}}, + {Key: []string{"ipv6_binary.3"}}, + {Key: []string{"ipv6_binary.4"}}, + }, + ) + if errorCheck != nil { + res.Log.Error("Failed: ", res.Config.T.Structure.IPv6Table, errorCheck) + return + } + + buildIPv6Binary( + res.DB.GetSelectedDB(), + res.Config.T.Structure.HostTable, + res.Config.T.Structure.IPv6Table, + ssn, + res.Log, + ) +} + +//buildIPv4Binary sets the binary data for the ipv4 addresses in the dataset +func buildIPv4Binary(selectedDB, hostCollection, destCollection string, + session *mgo.Session, logger *log.Logger) { + srcColl := session.DB(selectedDB).C(hostCollection) + destColl := session.DB(selectedDB).C(destCollection) + i := 0 + + var host structureTypes.Host + iter := srcColl.Find(bson.D{{"ipv4", true}}).Iter() //nolint: vet + + bulkUpdate := destColl.Bulk() + + for iter.Next(&host) { + //1000 is the most a MongoDB bulk update operation can handle + if i == 1000 { + bulkUpdate.Unordered() + _, err := bulkUpdate.Run() + if err != nil { + logger.WithFields(log.Fields{ + "error": err.Error(), + }).Error("Unable to write binary representation of IP addresses") + } + + bulkUpdate = destColl.Bulk() + i = 0 + } + + ipv4 := net.ParseIP(host.IP) + ipv4Struct := structureTypes.IPv4Binary{ + IP: host.IP, + IPv4Binary: int64(binary.BigEndian.Uint32(ipv4[12:16])), + } + bulkUpdate.Insert(ipv4Struct) + + i++ + } + + //guaranteed to be at least one in the array + bulkUpdate.Unordered() + _, err := bulkUpdate.Run() + if err != nil { + logger.WithFields(log.Fields{ + "error": err.Error(), + }).Error("Unable to write binary representation of IP addresses") + } +} + +//buildIPv6Binary sets the binary data for the ipv6 addresses in the dataset +func buildIPv6Binary(selectedDB, hostCollection, destCollection string, + session *mgo.Session, logger *log.Logger) { + srcColl := session.DB(selectedDB).C(hostCollection) + destColl := session.DB(selectedDB).C(destCollection) + i := 0 + + var host structureTypes.Host + iter := srcColl.Find(bson.D{{"ipv4", false}}).Iter() //nolint: vet + + bulkUpdate := destColl.Bulk() + + for iter.Next(&host) { + //1000 is the most a MongoDB bulk update operation can handle + if i == 1000 { + bulkUpdate.Unordered() + _, err := bulkUpdate.Run() + if err != nil { + logger.WithFields(log.Fields{ + "error": err.Error(), + }).Error("Unable to write binary representation of IP addresses") + } + + bulkUpdate = destColl.Bulk() + i = 0 + } + + ipv6 := net.ParseIP(host.IP) + ipv6Binary1 := int64(binary.BigEndian.Uint32(ipv6[0:4])) + ipv6Binary2 := int64(binary.BigEndian.Uint32(ipv6[4:8])) + ipv6Binary3 := int64(binary.BigEndian.Uint32(ipv6[8:12])) + ipv6Binary4 := int64(binary.BigEndian.Uint32(ipv6[12:16])) + ipv6Struct := structureTypes.IPv6Binary{ + IP: host.IP, + IPv6Binary: structureTypes.IPv6Integers{ + I1: ipv6Binary1, + I2: ipv6Binary2, + I3: ipv6Binary3, + I4: ipv6Binary4, + }, + } + bulkUpdate.Insert(ipv6Struct) + + i++ + } + + //guaranteed to be at least one in the array + bulkUpdate.Unordered() + _, err := bulkUpdate.Run() + if err != nil { + logger.WithFields(log.Fields{ + "error": err.Error(), + }).Error("Unable to write binary representation of IP addresses") + } +} diff --git a/commands/analyze.go b/commands/analyze.go index eb6791fe..d6db07ef 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -83,6 +83,12 @@ func analyze(inDb string, configFile string) error { logAnalysisFunc("Unique Hosts", td, res, structure.BuildHostsCollection, ) + logAnalysisFunc("IPv4 Conversion", td, res, + structure.BuildIPv4Collection, + ) + logAnalysisFunc("IPv6 Conversion", td, res, + structure.BuildIPv6Collection, + ) logAnalysisFunc("Unique Hostnames", td, res, dns.BuildHostnamesCollection, ) diff --git a/config/tables.go b/config/tables.go index 58e9ad41..f5419639 100644 --- a/config/tables.go +++ b/config/tables.go @@ -36,6 +36,8 @@ type ( DNSTable string `yaml:"DnsTable"` UniqueConnTable string `yaml:"UniqueConnectionTable"` HostTable string `yaml:"HostTable"` + IPv4Table string `yaml:"IPv4Table"` + IPv6Table string `yaml:"IPv6Table"` } //BlacklistedTableCfg is used to control the blacklisted analysis module diff --git a/datatypes/structure/structure.go b/datatypes/structure/structure.go index d6618108..6732c4bf 100644 --- a/datatypes/structure/structure.go +++ b/datatypes/structure/structure.go @@ -14,6 +14,33 @@ type ( IPv4 bool `bson:"ipv6"` } + //IPv4Binary provides a way to store a binary representation of an + //IPv4 address in MongoDB + IPv4Binary struct { + ID bson.ObjectId `bson:"_id,omitempty"` + IP string `bson:"ip"` + IPv4Binary int64 `bson:"ipv4_binary"` + } + + //IPv6Integers provides a way to store a binary representation of an + //IPv6 address in MongoDB. The 128 bit address is split into four 32 bit + //values. However, MongoDB cannot store unsigned numbers, so we use 64 bit + //integers to hold the values. + IPv6Integers struct { + I1 int64 `bson:"1"` + I2 int64 `bson:"2"` + I3 int64 `bson:"3"` + I4 int64 `bson:"4"` + } + + //IPv6Binary provides a way to store a binary representation of an + //IPv6 address in MongoDB. + IPv6Binary struct { + ID bson.ObjectId `bson:"_id,omitempty"` + IP string `bson:"ip"` + IPv6Binary IPv6Integers `bson:"ipv6_binary"` + } + //UniqueConnection describes a pair of computer interfaces which contacted //each other over the observation period UniqueConnection struct { diff --git a/etc/rita.yaml b/etc/rita.yaml index 7c157711..1ccfdfca 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -1,7 +1,7 @@ MongoDB: # See https://docs.mongodb.com/manual/reference/connection-string/ ConnectionString: mongodb://localhost:27017 - # Example with authentication. Be sure to change the AuthenticationMechanism as well. + # Example with authentication. Be sure to change the AuthenticationMechanism as well. # ConnectionString: mongodb://username:password@localhost:27017 # Accepted Values: null, "SCRAM-SHA-1", "MONGODB-CR", "PLAIN" From 91fcf3bbeabb5804ce4fef0271d8ba1f1eef6bf5 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Fri, 5 Jan 2018 19:27:33 -0700 Subject: [PATCH 066/117] add new tables to config --- analysis/structure/hosts.go | 2 +- commands/analyze.go | 12 +++++------- etc/tables.yaml | 3 ++- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/analysis/structure/hosts.go b/analysis/structure/hosts.go index d96e7550..ddda8b9d 100644 --- a/analysis/structure/hosts.go +++ b/analysis/structure/hosts.go @@ -14,7 +14,7 @@ func BuildHostsCollection(res *database.Resources) { sourceCollectionName, newCollectionName, newCollectionKeys, - pipseline := getHosts(res.Config) + pipeline := getHosts(res.Config) // Aggregate it! errorCheck := res.DB.CreateCollection(newCollectionName, false, newCollectionKeys) diff --git a/commands/analyze.go b/commands/analyze.go index d6db07ef..8ea2e669 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -81,13 +81,11 @@ func analyze(inDb string, configFile string) error { structure.BuildUniqueConnectionsCollection, ) logAnalysisFunc("Unique Hosts", td, res, - structure.BuildHostsCollection, - ) - logAnalysisFunc("IPv4 Conversion", td, res, - structure.BuildIPv4Collection, - ) - logAnalysisFunc("IPv6 Conversion", td, res, - structure.BuildIPv6Collection, + func(innerRes *database.Resources) { + structure.BuildHostsCollection(innerRes) + structure.BuildIPv4Collection(innerRes) + structure.BuildIPv6Collection(innerRes) + }, ) logAnalysisFunc("Unique Hostnames", td, res, dns.BuildHostnamesCollection, diff --git a/etc/tables.yaml b/etc/tables.yaml index ab9eb077..39ad8200 100644 --- a/etc/tables.yaml +++ b/etc/tables.yaml @@ -8,6 +8,8 @@ Structure: DnsTable: dns UniqueConnectionTable: uconn HostTable: host + IPv4Table: ipv4 + IPv6Table: ipv6 BlackListed: Database: rita-blacklist @@ -39,4 +41,3 @@ UserAgent: MetaTables: FilesTable: files DatabasesTable: databases - From 38a4deddfae01db508ad041a739f4c4d67aaa869 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Mon, 15 Jan 2018 16:50:26 -0700 Subject: [PATCH 067/117] Update readme for CentOS 7 --- Readme.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Readme.md b/Readme.md index 1a1336b9..52b50c72 100644 --- a/Readme.md +++ b/Readme.md @@ -17,7 +17,7 @@ The framework ingests [Bro Logs](https://www.bro.org/), and currently supports t Additional functionality is being developed and will be included soon. ### Automatic Installation -**The automatic installer is officially supported on Ubuntu 14.04 LTS (Security Onion) and Ubuntu 16.04 LTS** +**The automatic installer is officially supported on Ubuntu 14.04, 16.04 LTS, Security Onion, and CentOS 7** * Clone the package: `git clone https://github.com/ocmdev/rita.git` @@ -72,7 +72,7 @@ To obtain an API key: * After installing, `rita` should be in your `PATH` * **Option 1**: Import directly from the terminal (one time import) * `rita import -i path/to/your/bro_logs/ -d dataset_name` - * **Option 2**: Set up the Bro configuration in config.yaml for repeated imports + * **Option 2**: Set up the Bro configuration in `~/.rita/config.yaml` for repeated imports * Set `LogPath` to the `path/to/your/bro_logs` * Set `DBPrefix` to an identifier common to your set of logs * Set up the `DirectoryMap` From dfde8fd9590d68e57211d969a67a11a7e09081d0 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Mon, 15 Jan 2018 17:40:48 -0700 Subject: [PATCH 068/117] Go dep init --- .gitignore | 1 + Gopkg.lock | 148 +++++++++++++++++++++++++++++++++++++++++++++++++++++ Gopkg.toml | 61 ++++++++++++++++++++++ Makefile | 4 +- 4 files changed, 212 insertions(+), 2 deletions(-) create mode 100644 Gopkg.lock create mode 100644 Gopkg.toml diff --git a/.gitignore b/.gitignore index c1ce10ac..6e6bdeaf 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,4 @@ *.exe rita debug +vendor/ diff --git a/Gopkg.lock b/Gopkg.lock new file mode 100644 index 00000000..1417bdc0 --- /dev/null +++ b/Gopkg.lock @@ -0,0 +1,148 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + branch = "master" + name = "github.com/golang/protobuf" + packages = [ + "proto", + "ptypes/duration" + ] + revision = "1e59b77b52bf8e4b449a57e6f79f21226d571845" + +[[projects]] + branch = "master" + name = "github.com/google/safebrowsing" + packages = [ + ".", + "internal/safebrowsing_proto" + ] + revision = "fe6951d7ef01b4e46d3008e8a08b55bcdf3c0ee6" + +[[projects]] + name = "github.com/mattn/go-runewidth" + packages = ["."] + revision = "9e777a8366cce605130a531d2cd6363d07ad7317" + version = "v0.0.2" + +[[projects]] + name = "github.com/ocmdev/mgorus" + packages = ["."] + revision = "544a63f222470b2feb3b1142f7edb9a843f5120d" + version = "v0.1.0" + +[[projects]] + name = "github.com/ocmdev/mgosec" + packages = ["."] + revision = "af42afa3ec74143661a863bdae62d36a93c6eca7" + version = "v0.1.0" + +[[projects]] + branch = "master" + name = "github.com/ocmdev/rita-bl" + packages = [ + ".", + "database", + "list", + "sources/lists", + "sources/lists/util", + "sources/rpc" + ] + revision = "070299442c8c467e9501907b53ba4646be71255b" + +[[projects]] + branch = "master" + name = "github.com/olekukonko/tablewriter" + packages = ["."] + revision = "96aac992fc8b1a4c83841a6c3e7178d20d989625" + +[[projects]] + name = "github.com/rifflock/lfshook" + packages = ["."] + revision = "1fdc019a35147ddbb3d25aedf713ad6d1430c144" + version = "v2.2" + +[[projects]] + name = "github.com/sirupsen/logrus" + packages = ["."] + revision = "d682213848ed68c0a260ca37d6dd5ace8423f5ba" + version = "v1.0.4" + +[[projects]] + branch = "master" + name = "github.com/skratchdot/open-golang" + packages = ["open"] + revision = "75fb7ed4208cf72d323d7d02fd1a5964a7a9073c" + +[[projects]] + name = "github.com/urfave/cli" + packages = ["."] + revision = "cfb38830724cc34fedffe9a2a29fb54fa9169cd1" + version = "v1.20.0" + +[[projects]] + branch = "master" + name = "golang.org/x/crypto" + packages = ["ssh/terminal"] + revision = "13931e22f9e72ea58bb73048bc752b48c6d4d4ac" + +[[projects]] + branch = "master" + name = "golang.org/x/net" + packages = ["idna"] + revision = "5ccada7d0a7ba9aeb5d3aca8d3501b4c2a509fec" + +[[projects]] + branch = "master" + name = "golang.org/x/sys" + packages = [ + "unix", + "windows" + ] + revision = "fff93fa7cd278d84afc205751523809c464168ab" + +[[projects]] + branch = "master" + name = "golang.org/x/text" + packages = [ + "collate", + "collate/build", + "internal/colltab", + "internal/gen", + "internal/tag", + "internal/triegen", + "internal/ucd", + "language", + "secure/bidirule", + "transform", + "unicode/bidi", + "unicode/cldr", + "unicode/norm", + "unicode/rangetable" + ] + revision = "e19ae1496984b1c655b8044a65c0300a3c878dd3" + +[[projects]] + branch = "v2" + name = "gopkg.in/mgo.v2" + packages = [ + ".", + "bson", + "internal/json", + "internal/sasl", + "internal/scram" + ] + revision = "3f83fa5005286a7fe593b055f0d7771a7dce4655" + +[[projects]] + branch = "v2" + name = "gopkg.in/yaml.v2" + packages = ["."] + revision = "d670f9405373e636a5a2765eea47fac0c9bc91a4" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "832dc7b3f46a9078eacd699113f3f63b3a214fbeb79a8d164b9a71151c5395b4" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml new file mode 100644 index 00000000..44227bbb --- /dev/null +++ b/Gopkg.toml @@ -0,0 +1,61 @@ +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + + +[[constraint]] + name = "github.com/ocmdev/mgorus" + version = "0.1.0" + +[[constraint]] + name = "github.com/ocmdev/mgosec" + version = "0.1.0" + +[[constraint]] + branch = "master" + name = "github.com/ocmdev/rita-bl" + +[[constraint]] + branch = "master" + name = "github.com/olekukonko/tablewriter" + +[[constraint]] + name = "github.com/rifflock/lfshook" + version = "2.2.0" + +[[constraint]] + name = "github.com/sirupsen/logrus" + version = "1.0.4" + +[[constraint]] + branch = "master" + name = "github.com/skratchdot/open-golang" + +[[constraint]] + name = "github.com/urfave/cli" + version = "1.20.0" + +[[constraint]] + branch = "v2" + name = "gopkg.in/mgo.v2" + +[[constraint]] + branch = "v2" + name = "gopkg.in/yaml.v2" diff --git a/Makefile b/Makefile index a3fee49e..3c13e5d7 100644 --- a/Makefile +++ b/Makefile @@ -6,12 +6,12 @@ LDFLAGS=-ldflags="-X github.com/ocmdev/rita/config.VERSION=${VERSION}" default: - go get + dep ensure go build ${LDFLAGS} # Having issues with 'go install' + LDFLAGS using sudo and the # install script. This is a workaround. install: - go get + dep ensure go build ${LDFLAGS} -o ${GOPATH}/bin/${BINARY} From 340bdaf3aea8cb7d7965a0d9df1bad2bc1b398d3 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Mon, 15 Jan 2018 18:10:23 -0700 Subject: [PATCH 069/117] Install go dep in the installation script --- install.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/install.sh b/install.sh index 52268190..a5fa4b4a 100755 --- a/install.sh +++ b/install.sh @@ -320,6 +320,9 @@ __install() { __install_mongodb & __load "[+] Installing MongoDB" ( # Build RITA + # Ensure go dep is installed + go get -u github.com/golang/dep/cmd/dep + mkdir -p $GOPATH/src/github.com/ocmdev/rita # Get the install script's directory in case it's run from elsewhere cp -R "$(dirname "$(realpath ${0})")/." $GOPATH/src/github.com/ocmdev/rita/ From aedfaae9d89950eefeddcda2ce3b44815143cf67 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Mon, 22 Jan 2018 17:22:11 -0700 Subject: [PATCH 070/117] Better record keeping, import and analyze versions in MetaDB, store ExactVersion and Version which holds the long form and short form results of git describe, respectively --- Makefile | 6 +++--- config/config.go | 14 ++++++++++++-- config/running.go | 3 --- config/static.go | 20 +++++++++++++------- database/meta.go | 26 ++++++++++++++++---------- rita.go | 2 +- 6 files changed, 45 insertions(+), 26 deletions(-) diff --git a/Makefile b/Makefile index 3c13e5d7..e2ebf69d 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,9 @@ -VERSION := $(shell git describe --always --long --dirty --tags) +VERSION := $(shell git describe --abbrev=0 --tags) +EXACT_VERSION := $(shell git describe --always --long --dirty --tags) GOPATH := $(GOPATH) BINARY := rita -LDFLAGS=-ldflags="-X github.com/ocmdev/rita/config.VERSION=${VERSION}" +LDFLAGS=-ldflags="-X github.com/ocmdev/rita/config.Version=${VERSION} -X github.com/ocmdev/rita/config.ExactVersion=${EXACT_VERSION}" default: @@ -14,4 +15,3 @@ default: install: dep ensure go build ${LDFLAGS} -o ${GOPATH}/bin/${BINARY} - diff --git a/config/config.go b/config/config.go index f8e65a94..5b40e855 100644 --- a/config/config.go +++ b/config/config.go @@ -7,8 +7,13 @@ import ( "reflect" ) -//VERSION is filled at compile time with the git version of RITA -var VERSION = "undefined" +//Version is filled at compile time with the git version of RITA +//Version is filled by "git describe --abbrev=0 --tags" +var Version = "undefined" + +//ExactVersion is filled at compile time with the git version of RITA +//ExactVersion is filled by "git describe --always --long --dirty --tags" +var ExactVersion = "undefined" type ( //Config holds the configuration for the running system @@ -19,7 +24,12 @@ type ( } ) +//userConfigPath specifies the path of RITA's static config file +//relative to the user's home directory const userConfigPath = "/.rita/config.yaml" + +//tableConfigPath specifies teh path of RITA's table config file +//relative to the user's home directory const tableConfigPath = "/.rita/tables.yaml" //NOTE: If go ever gets default parameters, default the config options to "" diff --git a/config/running.go b/config/running.go index b43908dd..9c965692 100644 --- a/config/running.go +++ b/config/running.go @@ -13,7 +13,6 @@ type ( //RunningCfg holds configuration options that are parsed at run time RunningCfg struct { MongoDB MongoDBRunningCfg - Version string } //MongoDBRunningCfg holds parsed information for connecting to MongoDB @@ -30,8 +29,6 @@ func loadRunningConfig(config *StaticCfg) (*RunningCfg, error) { var outConfig = new(RunningCfg) var err error - outConfig.Version = VERSION - //parse the tls configuration if config.MongoDB.TLS.Enabled { tlsConf := &tls.Config{} diff --git a/config/static.go b/config/static.go index cb615afa..965eb70c 100644 --- a/config/static.go +++ b/config/static.go @@ -13,13 +13,15 @@ import ( type ( //StaticCfg is the container for other static config sections StaticCfg struct { - MongoDB MongoDBStaticCfg `yaml:"MongoDB"` - Log LogStaticCfg `yaml:"LogConfig"` - Blacklisted BlacklistedStaticCfg `yaml:"BlackListed"` - Crossref CrossrefStaticCfg `yaml:"Crossref"` - Scanning ScanningStaticCfg `yaml:"Scanning"` - Beacon BeaconStaticCfg `yaml:"Beacon"` - Bro BroStaticCfg `yaml:"Bro"` + MongoDB MongoDBStaticCfg `yaml:"MongoDB"` + Log LogStaticCfg `yaml:"LogConfig"` + Blacklisted BlacklistedStaticCfg `yaml:"BlackListed"` + Crossref CrossrefStaticCfg `yaml:"Crossref"` + Scanning ScanningStaticCfg `yaml:"Scanning"` + Beacon BeaconStaticCfg `yaml:"Beacon"` + Bro BroStaticCfg `yaml:"Bro"` + Version string + ExactVersion string } //MongoDBStaticCfg contains the means for connecting to MongoDB @@ -116,5 +118,9 @@ func loadStaticConfig(cfgPath string) (*StaticCfg, error) { // set the socket time out in hours config.MongoDB.SocketTimeout *= time.Hour + // grab the version constants set by the build process + config.Version = Version + config.ExactVersion = ExactVersion + return config, nil } diff --git a/database/meta.go b/database/meta.go index d75f44b8..84a91467 100644 --- a/database/meta.go +++ b/database/meta.go @@ -22,11 +22,12 @@ type ( // DBMetaInfo defines some information about the database DBMetaInfo struct { - ID bson.ObjectId `bson:"_id,omitempty"` // Ident - Name string `bson:"name"` // Top level name of the database - Analyzed bool `bson:"analyzed"` // Has this database been analyzed - UsingDates bool `bson:"dates"` // Whether this db was created with dates enabled - Version string `bson:"version"` // Rita version at import + ID bson.ObjectId `bson:"_id,omitempty"` // Ident + Name string `bson:"name"` // Top level name of the database + Analyzed bool `bson:"analyzed"` // Has this database been analyzed + UsingDates bool `bson:"dates"` // Whether this db was created with dates enabled + ImportVersion string `bson:"import_version"` // Rita version at import + AnalyzeVersion string `bson:"analyze_version"` // Rita version at analyze } ) @@ -40,10 +41,10 @@ func (m *MetaDBHandle) AddNewDB(name string) error { err := ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).Insert( DBMetaInfo{ - Name: name, - Analyzed: false, - UsingDates: m.res.Config.S.Bro.UseDates, - Version: m.res.Config.R.Version, + Name: name, + Analyzed: false, + UsingDates: m.res.Config.S.Bro.UseDates, + ImportVersion: m.res.Config.S.Version, }, ) if err != nil { @@ -129,7 +130,12 @@ func (m *MetaDBHandle) MarkDBAnalyzed(name string, complete bool) error { } err = ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable). - Update(bson.M{"_id": dbr.ID}, bson.M{"$set": bson.M{"analyzed": complete}}) + Update(bson.M{"_id": dbr.ID}, bson.M{ + "$set": bson.D{ + {"analyzed", complete}, + {"analyze_version", m.res.Config.S.Version}, + }, + }) if err != nil { m.res.Log.WithFields(log.Fields{ diff --git a/rita.go b/rita.go index b9550e67..a5c5d363 100644 --- a/rita.go +++ b/rita.go @@ -17,7 +17,7 @@ func main() { // Change the version string with updates so that a quick help command will // let the testers know what version of HT they're on - app.Version = config.VERSION + app.Version = config.Version // Define commands used with this application app.Commands = commands.Commands() From 438e54a43fb6747b9d6cf44053628905171e5bdd Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Mon, 22 Jan 2018 18:42:35 -0700 Subject: [PATCH 071/117] Add version checks for import and analyze, remove version tag when resetting a db --- Gopkg.lock | 8 +++++++- commands/analyze.go | 37 +++++++++++++++++++++++++++++-------- config/running.go | 3 +++ database/meta.go | 38 +++++++++++++++++++++++++++++++++++++- parser/mongodatastore.go | 24 ++++++++++++++++++------ 5 files changed, 94 insertions(+), 16 deletions(-) diff --git a/Gopkg.lock b/Gopkg.lock index 1417bdc0..6e032996 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -1,6 +1,12 @@ # This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. +[[projects]] + name = "github.com/blang/semver" + packages = ["."] + revision = "2ee87856327ba09384cabd113bc6b5d174e9ec0f" + version = "v3.5.1" + [[projects]] branch = "master" name = "github.com/golang/protobuf" @@ -143,6 +149,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "832dc7b3f46a9078eacd699113f3f63b3a214fbeb79a8d164b9a71151c5395b4" + inputs-digest = "faeb74ddc998410e81792434cdede82704ed7479eb84d31e1cc20b2db67a8901" solver-name = "gps-cdcl" solver-version = 1 diff --git a/commands/analyze.go b/commands/analyze.go index eb6791fe..f54963b5 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -4,6 +4,7 @@ import ( "fmt" "time" + "github.com/blang/semver" "github.com/ocmdev/rita/analysis/beacon" "github.com/ocmdev/rita/analysis/blacklist" "github.com/ocmdev/rita/analysis/crossref" @@ -36,26 +37,46 @@ func init() { func analyze(inDb string, configFile string) error { res := database.InitResources(configFile) + var toRunDirty []string var toRun []string // Check to see if we want to run a full database or just one off the command line if inDb == "" { res.Log.Info("Running analysis against all databases") - toRun = append(toRun, res.MetaDB.GetUnAnalyzedDatabases()...) + toRunDirty = append(toRun, res.MetaDB.GetUnAnalyzedDatabases()...) } else { - info, err := res.MetaDB.GetDBMetaInfo(inDb) + toRunDirty = append(toRun, inDb) + } + + // Check for problems + for _, possDB := range toRunDirty { + info, err := res.MetaDB.GetDBMetaInfo(possDB) if err != nil { - errStr := fmt.Sprintf("Error: %s not found.", inDb) + errStr := fmt.Sprintf("Error: %s not found.", possDB) res.Log.Errorf(errStr) - return cli.NewExitError(errStr, -1) + fmt.Println(errStr) + continue } if info.Analyzed { - errStr := fmt.Sprintf("Error: %s is already analyzed.", inDb) + errStr := fmt.Sprintf("Error: %s is already analyzed.", possDB) res.Log.Errorf(errStr) - return cli.NewExitError(errStr, -1) + fmt.Println(errStr) + continue } - - toRun = append(toRun, inDb) + semVer, err := semver.ParseTolerant(info.ImportVersion) + if err != nil { + errStr := fmt.Sprintf("Error: %s is labelled with an incorrect version tag", possDB) + res.Log.Errorf(errStr) + fmt.Println(errStr) + continue + } + if semVer.Major != res.Config.R.Version.Major { + errStr := fmt.Sprintf("Error: %s was parsed by an incompatible version of RITA", possDB) + res.Log.Errorf(errStr) + fmt.Println(errStr) + continue + } + toRun = append(toRun, possDB) } startAll := time.Now() diff --git a/config/running.go b/config/running.go index 9c965692..88c4592a 100644 --- a/config/running.go +++ b/config/running.go @@ -6,6 +6,7 @@ import ( "fmt" "io/ioutil" + "github.com/blang/semver" "github.com/ocmdev/mgosec" ) @@ -13,6 +14,7 @@ type ( //RunningCfg holds configuration options that are parsed at run time RunningCfg struct { MongoDB MongoDBRunningCfg + Version semver.Version } //MongoDBRunningCfg holds parsed information for connecting to MongoDB @@ -58,5 +60,6 @@ func loadRunningConfig(config *StaticCfg) (*RunningCfg, error) { } outConfig.MongoDB.AuthMechanismParsed = authMechanism + outConfig.Version, err = semver.ParseTolerant(config.Version) return outConfig, err } diff --git a/database/meta.go b/database/meta.go index 84a91467..a853a9fb 100644 --- a/database/meta.go +++ b/database/meta.go @@ -4,6 +4,7 @@ import ( "os" "sync" + "github.com/blang/semver" fpt "github.com/ocmdev/rita/parser/fileparsetypes" log "github.com/sirupsen/logrus" "gopkg.in/mgo.v2" @@ -129,11 +130,18 @@ func (m *MetaDBHandle) MarkDBAnalyzed(name string, complete bool) error { return err } + var versionTag string + if complete { + versionTag = m.res.Config.S.Version + } else { + versionTag = "" + } + err = ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable). Update(bson.M{"_id": dbr.ID}, bson.M{ "$set": bson.D{ {"analyzed", complete}, - {"analyze_version", m.res.Config.S.Version}, + {"analyze_version", versionTag}, }, }) @@ -181,6 +189,34 @@ func (m *MetaDBHandle) GetDatabases() []string { return results } +//CheckCompatibleImport checks if a database was imported with a version of +//RITA which is compatible with the running version +func (m *MetaDBHandle) CheckCompatibleImport(targetDatabase string) (bool, error) { + dbData, err := m.GetDBMetaInfo(targetDatabase) + if err != nil { + return false, err + } + existingVer, err := semver.ParseTolerant(dbData.ImportVersion) + if err != nil { + return false, err + } + return m.res.Config.R.Version.Major == existingVer.Major, nil +} + +//CheckCompatibleAnalyze checks if a database was analyzed with a version of +//RITA which is compatible with the running version +func (m *MetaDBHandle) CheckCompatibleAnalyze(targetDatabase string) (bool, error) { + dbData, err := m.GetDBMetaInfo(targetDatabase) + if err != nil { + return false, err + } + existingVer, err := semver.ParseTolerant(dbData.AnalyzeVersion) + if err != nil { + return false, err + } + return m.res.Config.R.Version.Major == existingVer.Major, nil +} + // GetUnAnalyzedDatabases builds a list of database names which have yet to be analyzed func (m *MetaDBHandle) GetUnAnalyzedDatabases() []string { m.logDebug("GetUnAnalyzedDatabases", "entering") diff --git a/parser/mongodatastore.go b/parser/mongodatastore.go index 28e820b7..241eca39 100644 --- a/parser/mongodatastore.go +++ b/parser/mongodatastore.go @@ -136,22 +136,34 @@ func (mongo *MongoDatastore) getCollectionMap(data *ImportedData) (*collectionMa } //check if the database is already analyzed - for _, analyzedDB := range mongo.analyzedDBs { - if analyzedDB == data.TargetDatabase { + + //iterate over indices to save RAM + //nolint: golint + for i, _ := range mongo.analyzedDBs { + if mongo.analyzedDBs[i] == data.TargetDatabase { return nil, errors.New("cannot import bro data into already analyzed database") } } //check if the database was created in an earlier parse targetDBExists := false - for _, unanalyzedDB := range mongo.unanalyzedDBs { - if unanalyzedDB == data.TargetDatabase { + //nolint: golint + for i, _ := range mongo.unanalyzedDBs { + if mongo.unanalyzedDBs[i] == data.TargetDatabase { targetDBExists = true } } - //create the database if it doesn't exist - if !targetDBExists { + if targetDBExists { + compatible, err := mongo.metaDB.CheckCompatibleImport(data.TargetDatabase) + if err != nil { + return nil, err + } + if !compatible { + return nil, errors.New("cannot import bro data into already populated, incompatible database") + } + } else { + //create the database if it doesn't exist err := mongo.metaDB.AddNewDB(data.TargetDatabase) if err != nil { return nil, err From 9676bf761ac5294a39aeb1ee9c0d96a139517aea Mon Sep 17 00:00:00 2001 From: samuel carroll Date: Thu, 25 Jan 2018 14:58:02 -0700 Subject: [PATCH 072/117] Adding updated ranges for expected test results, changed because we changed the way we evaluate beacons, may need to find new tests to fill in the ones that we've changed --- analysis/beacon/beacon_test.go | 19 +++++++++++++++++-- analysis/beacon/beacon_test_data.go | 8 ++++---- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/analysis/beacon/beacon_test.go b/analysis/beacon/beacon_test.go index 563bcc2f..090d786a 100644 --- a/analysis/beacon/beacon_test.go +++ b/analysis/beacon/beacon_test.go @@ -24,39 +24,54 @@ func printAnalysis(res *datatype_beacon.BeaconAnalysisOutput) string { } func TestAnalysis(t *testing.T) { + //There are mock resources to set our database equal to, so we don't have errors + //TODO: update the mock.go file under the database to perfectly mock the MongoDB res := database.InitMockResources("") res.Log.Level = log.DebugLevel res.Config.S.Beacon.DefaultConnectionThresh = 2 + //Assume that we have succeeded until we have proof that we haven't. . . + // If you think about it, that's a good way to live life too. . . fail := false + //Now we want to iterate through all test cases in beacon_test_data.go for i, val := range testDataList { beaconing := newBeacon(res) //set first and last connection times beaconing.minTime = val.ts[0] beaconing.maxTime = val.ts[len(val.ts)-1] + //Now fill in the data that we will need to analyze traffic data := &beaconAnalysisInput{ src: "0.0.0.0", dst: "0.0.0.0", - ts: val.ts, - orig_ip_bytes: val.ds, + ts: val.ts, //these are the timestamps + orig_ip_bytes: val.ds, //these are the data sizes } + //set the wait time for the beaconing analysis beaconing.analysisWg.Add(1) + //Open a channel to the analyze function go beaconing.analyze() + //Feed the data into our new channel beaconing.analysisChannel <- data + //Clonse this input channel close(beaconing.analysisChannel) + //Now set our result to the output to our writeChannel res := <-beaconing.writeChannel + //now we wait for the time we specified earlier beaconing.analysisWg.Wait() + //Now we check if we are inside the acceptable score range status := "PASS" if res.Score < val.minScore || res.Score > val.maxScore { fail = true status = "FAIL" } + //Print Results t.Logf("%d - %s:\n\tExpected Score: %f < x < %f\n\tDescription: %s\n%s\n", i, status, val.minScore, val.maxScore, val.description, printAnalysis(res)) } + //Log the test results if fail { t.Fail() } diff --git a/analysis/beacon/beacon_test_data.go b/analysis/beacon/beacon_test_data.go index 5450b2c9..da555276 100644 --- a/analysis/beacon/beacon_test_data.go +++ b/analysis/beacon/beacon_test_data.go @@ -45,8 +45,8 @@ var testDataList = []testData{ { ts: []int64{181, 3644, 7104, 10741, 14406, 17867, 21589, 25263, 28954, 32633, 36026, 39460, 43114, 46766, 50476, 54078, 57504, 61127, 64850, 68408, 71829, 75698, 79208, 82702, 84500}, ds: []int64{4, 4, 4, 6, 6, 6, 4, 4, 4}, - minScore: 0.90, - maxScore: 1.0, + minScore: 0.7, + maxScore: 0.8, description: "Beacon every 1 hour... Starts at 0 (midnight) ends at 86400 (+24 hours)... Noise added to each timestamp: Gaussian Mu=0 Sigma=100", }, @@ -61,8 +61,8 @@ var testDataList = []testData{ { ts: []int64{0, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 5, 6, 7, 9, 10, 10, 10, 11, 13}, ds: []int64{4, 100, 2, 43, 3}, - minScore: 0.0, - maxScore: 0.7, + minScore: 0.8, + maxScore: 0.9, description: "Connection happens a lot... but not a beacon", }, From 5e3691f5aa951153e9fccc89c729f210935e51ee Mon Sep 17 00:00:00 2001 From: logan Date: Tue, 28 Nov 2017 20:36:27 -0700 Subject: [PATCH 073/117] fix id_origin and change it to id_orig --- analysis/beacon/beacon.go | 2 +- analysis/scanning/scan.go | 4 ++-- analysis/structure/hosts.go | 2 +- analysis/structure/uconn.go | 4 ++-- datatypes/data/data.go | 8 ++++---- parser/parsetypes/conn.go | 6 +++--- parser/parsetypes/dns.go | 6 +++--- parser/parsetypes/http.go | 6 +++--- 8 files changed, 19 insertions(+), 19 deletions(-) diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 5113f45f..32791aa2 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -192,7 +192,7 @@ func (t *Beacon) collect() { var conn data.Conn connIter := session.DB(t.db). C(t.res.Config.T.Structure.ConnTable). - Find(bson.M{"id_origin_h": uconn.Src, "id_resp_h": uconn.Dst}). + Find(bson.M{"id_orig_h": uconn.Src, "id_resp_h": uconn.Dst}). Iter() for connIter.Next(&conn) { diff --git a/analysis/scanning/scan.go b/analysis/scanning/scan.go index c36da153..f6492543 100644 --- a/analysis/scanning/scan.go +++ b/analysis/scanning/scan.go @@ -50,14 +50,14 @@ func getScanningCollectionScript(conf *config.Config) (string, string, []mgo.Ind { {"$group", bson.D{ {"_id", bson.D{ - {"src", "$id_origin_h"}, + {"src", "$id_orig_h"}, {"dst", "$id_resp_h"}, }}, {"connection_count", bson.D{ {"$sum", 1}, }}, {"src", bson.D{ - {"$first", "$id_origin_h"}, + {"$first", "$id_orig_h"}, }}, {"dst", bson.D{ {"$first", "$id_resp_h"}, diff --git a/analysis/structure/hosts.go b/analysis/structure/hosts.go index ca641a5a..f3647388 100644 --- a/analysis/structure/hosts.go +++ b/analysis/structure/hosts.go @@ -60,7 +60,7 @@ func getHosts(conf *config.Config) (string, string, []mgo.Index, []bson.D) { {"$project", bson.D{ {"hosts", []interface{}{ bson.D{ - {"ip", "$id_origin_h"}, + {"ip", "$id_orig_h"}, {"local", "$local_orig"}, }, bson.D{ diff --git a/analysis/structure/uconn.go b/analysis/structure/uconn.go index 1e2de276..9a725b2e 100644 --- a/analysis/structure/uconn.go +++ b/analysis/structure/uconn.go @@ -69,14 +69,14 @@ func getUniqueConnectionsScript(conf *config.Config) (string, string, []mgo.Inde { {"$group", bson.D{ {"_id", bson.D{ - {"src", "$id_origin_h"}, + {"src", "$id_orig_h"}, {"dst", "$id_resp_h"}, }}, {"connection_count", bson.D{ {"$sum", 1}, }}, {"src", bson.D{ - {"$first", "$id_origin_h"}, + {"$first", "$id_orig_h"}, }}, {"dst", bson.D{ {"$first", "$id_resp_h"}, diff --git a/datatypes/data/data.go b/datatypes/data/data.go index 9f981574..088a5d87 100644 --- a/datatypes/data/data.go +++ b/datatypes/data/data.go @@ -7,8 +7,8 @@ type ( Conn struct { Ts int64 `bson:"ts,omitempty"` UID string `bson:"uid"` - Src string `bson:"id_origin_h,omitempty"` - Spt int `bson:"id_origin_p,omitempty"` + Src string `bson:"id_orig_h,omitempty"` + Spt int `bson:"id_orig_p,omitempty"` Dst string `bson:"id_resp_h,omitempty"` Dpt int `bson:"id_resp_p,omitempty"` Dur float64 `bson:"duration,omitempty"` @@ -24,8 +24,8 @@ type ( DNS struct { Ts int64 `bson:"ts"` UID string `bson:"uid"` - Src string `bson:"id_origin_h"` - Spt int `bson:"id_origin_p"` + Src string `bson:"id_orig_h"` + Spt int `bson:"id_orig_p"` Dst string `bson:"id_resp_h"` Dpt int `bson:"id_resp_p"` Proto string `bson:"proto"` diff --git a/parser/parsetypes/conn.go b/parser/parsetypes/conn.go index 75a3ad77..0c5ad413 100644 --- a/parser/parsetypes/conn.go +++ b/parser/parsetypes/conn.go @@ -15,9 +15,9 @@ type ( // UID is the Unique Id for this connection (generated by Bro) UID string `bson:"uid" bro:"uid" brotype:"string"` // Source is the source address for this connection - Source string `bson:"id_origin_h" bro:"id.orig_h" brotype:"addr"` + Source string `bson:"id_orig_h" bro:"id.orig_h" brotype:"addr"` // SourcePort is the source port of this connection - SourcePort int `bson:"id_origin_p" bro:"id.orig_p" brotype:"port"` + SourcePort int `bson:"id_orig_p" bro:"id.orig_p" brotype:"port"` // Destination is the destination of the connection Destination string `bson:"id_resp_h" bro:"id.resp_h" brotype:"addr"` // DestinationPort is the port at the destination host @@ -63,7 +63,7 @@ func (in *Conn) TargetCollection(config *config.StructureTableCfg) string { //Indices gives MongoDB indices that should be used with the collection func (in *Conn) Indices() []string { - return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "-duration", "ts", "uid"} + return []string{"$hashed:id_orig_h", "$hashed:id_resp_h", "-duration", "ts", "uid"} } //Normalize pre processes this type of entry before it is imported by rita diff --git a/parser/parsetypes/dns.go b/parser/parsetypes/dns.go index 919decfa..11cd42fa 100644 --- a/parser/parsetypes/dns.go +++ b/parser/parsetypes/dns.go @@ -14,9 +14,9 @@ type DNS struct { // UID is the Unique Id for this connection (generated by Bro) UID string `bson:"uid" bro:"uid" brotype:"string"` // Source is the source address for this connection - Source string `bson:"id_origin_h" bro:"id.orig_h" brotype:"addr"` + Source string `bson:"id_orig_h" bro:"id.orig_h" brotype:"addr"` // SourcePort is the source port of this connection - SourcePort int `bson:"id_origin_p" bro:"id.orig_p" brotype:"port"` + SourcePort int `bson:"id_orig_p" bro:"id.orig_p" brotype:"port"` // Destination is the destination of the connection Destination string `bson:"id_resp_h" bro:"id.resp_h" brotype:"addr"` // DestinationPort is the port at the destination host @@ -68,7 +68,7 @@ func (in *DNS) TargetCollection(config *config.StructureTableCfg) string { //Indices gives MongoDB indices that should be used with the collection func (in *DNS) Indices() []string { - return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "$hashed:query"} + return []string{"$hashed:id_orig_h", "$hashed:id_resp_h", "$hashed:query"} } //Normalize pre processes this type of entry before it is imported by rita diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index 28d373ae..59f1f719 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -18,9 +18,9 @@ type HTTP struct { // UID is the Unique Id for this connection (generated by Bro) UID string `bson:"uid" bro:"uid" brotype:"string"` // Source is the source address for this connection - Source string `bson:"id_origin_h" bro:"id.orig_h" brotype:"addr"` + Source string `bson:"id_orig_h" bro:"id.orig_h" brotype:"addr"` // SourcePort is the source port of this connection - SourcePort int `bson:"id_origin_p" bro:"id.orig_p" brotype:"port"` + SourcePort int `bson:"id_orig_p" bro:"id.orig_p" brotype:"port"` // Destination is the destination of the connection Destination string `bson:"id_resp_h" bro:"id.resp_h" brotype:"addr"` // DestinationPort is the port at the destination host @@ -84,7 +84,7 @@ func (line *HTTP) TargetCollection(config *config.StructureTableCfg) string { //Indices gives MongoDB indices that should be used with the collection func (line *HTTP) Indices() []string { - return []string{"$hashed:id_origin_h", "$hashed:id_resp_h", "$hashed:user_agent", "uid"} + return []string{"$hashed:id_orig_h", "$hashed:id_resp_h", "$hashed:user_agent", "uid"} } // Normalize fixes up absolute uri's as read by bro to be relative From 561cd49cc04f9c1e6c13e6035f61d43ac53d604b Mon Sep 17 00:00:00 2001 From: logan Date: Thu, 25 Jan 2018 16:50:57 -0700 Subject: [PATCH 074/117] missed an origin in blacklisted --- analysis/blacklist/urls.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 21a1e84d..382f97e3 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -159,7 +159,7 @@ func fillBlacklistedURL(blURL *data.BlacklistedURL, longURL, db, {"$project", bson.M{ "orig_bytes": "$conn.orig_bytes", "resp_bytes": "$conn.resp_bytes", - "src": "$conn.id_origin_h", + "src": "$conn.id_orig_h", }}, }, { From 2fad7484fc48739e746091aef9ed0356623dcb71 Mon Sep 17 00:00:00 2001 From: samuel carroll Date: Fri, 26 Jan 2018 09:48:31 -0700 Subject: [PATCH 075/117] Adding to the 'adding tests' section of the Contributing.md --- Contributing.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/Contributing.md b/Contributing.md index d9ecd245..fc91f559 100644 --- a/Contributing.md +++ b/Contributing.md @@ -34,6 +34,12 @@ There are several ways to contribute code to the RITA project. * All too often code is developed to meet milestones which only undergoes empirical, human testing * We would love to see unit tests throughout RITA + * Currently we only have unit tests for Beacon check under analysis/beacon to + see how tests can be written neatly and easily + * Also when writing tests it is advisable to work backwards, start with what + result you want to get and then work backwards through the code + * When you're ready to test code run `go test ./...` from the root directory + of the project * Feel free to refactor code to increase our ability to test it * Join our [IRC](https://github.com/ocmdev/rita/wiki/RITA-Gittiquette) to learn more From f5b54033083cf534cdf2b7d0da71f3ea8a6d490f Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Fri, 26 Jan 2018 18:04:19 -0700 Subject: [PATCH 076/117] Fix dockerfile due to dep --- Dockerfile | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 84337995..32b03811 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,9 +10,8 @@ #RITA works best with docker-compose. Docker-compose lets you set these mounts #and additionally connect it to MongoDB with ease. FROM golang:1.8-alpine as rita-builder -RUN apk update && apk upgrade && apk add --no-cache git && apk add --no-cache make && apk add --no-cache ca-certificates -RUN mkdir /logs -RUN mkdir $HOME/.rita/ +RUN apk update && apk upgrade && apk add --no-cache git make ca-certificates wget +RUN wget -q -O /go/bin/dep https://github.com/golang/dep/releases/download/v0.3.2/dep-linux-amd64 && chmod +x /go/bin/dep WORKDIR /go/src/github.com/ocmdev/rita COPY . . RUN make From a146d235f41cca7ae1094a665ec4e8c02f3311ef Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Fri, 26 Jan 2018 18:24:36 -0700 Subject: [PATCH 077/117] Pin dep version; add rita to home dir in dockerfile --- Dockerfile | 5 +++-- install.sh | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 32b03811..36b1de57 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,7 +18,8 @@ RUN make FROM alpine:latest -RUN mkdir /root/.rita -COPY --from=rita-builder /go/src/github.com/ocmdev/rita/etc/tables.yaml /root/.rita/ +WORKDIR /root +RUN mkdir .rita +COPY --from=rita-builder /go/src/github.com/ocmdev/rita/etc/tables.yaml .rita/ COPY --from=rita-builder /go/src/github.com/ocmdev/rita/rita . ENTRYPOINT ["./rita"] diff --git a/install.sh b/install.sh index a5fa4b4a..0cc9d2ee 100755 --- a/install.sh +++ b/install.sh @@ -321,8 +321,9 @@ __install() { ( # Build RITA # Ensure go dep is installed - go get -u github.com/golang/dep/cmd/dep - + wget -q -O $GOPATH/bin/dep https://github.com/golang/dep/releases/download/v0.3.2/dep-linux-amd64 + chmod +x $GOPATH/bin/dep + mkdir -p $GOPATH/src/github.com/ocmdev/rita # Get the install script's directory in case it's run from elsewhere cp -R "$(dirname "$(realpath ${0})")/." $GOPATH/src/github.com/ocmdev/rita/ From b39d191811760b4422fced3464e09a28c4e6e4d3 Mon Sep 17 00:00:00 2001 From: logan Date: Thu, 8 Feb 2018 17:34:00 -0700 Subject: [PATCH 078/117] Remove erroneous FileName field in HTTP --- parser/parsetypes/http.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index 59f1f719..43edf55b 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -51,8 +51,6 @@ type HTTP struct { InfoCode int64 `bson:"info_code" bro:"info_code" brotype:"count"` // InfoMsg holds the last seen 1xx message string InfoMsg string `bson:"info_msg" bro:"info_msg" brotype:"string"` - // FileName contains the name of the requested file - FileName string `bson:"filename" bro:"filename" brotype:"string"` // Tags contains a set of indicators of various attributes related to a particular req and // response pair Tags []string `bson:"tags" bro:"tags" brotype:"set[enum]"` From 0efeb3c884f17be4e7e7cc9ebf826296898865da Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Fri, 9 Feb 2018 12:03:48 -0600 Subject: [PATCH 079/117] Updating default install and config locations --- config/config.go | 22 ++------ etc/rita.yaml | 6 +-- install.sh | 132 +++++++++++++++++++++++++++++++---------------- 3 files changed, 95 insertions(+), 65 deletions(-) diff --git a/config/config.go b/config/config.go index 5b40e855..495b508f 100644 --- a/config/config.go +++ b/config/config.go @@ -1,9 +1,7 @@ package config import ( - "fmt" "os" - "os/user" "reflect" ) @@ -26,33 +24,21 @@ type ( //userConfigPath specifies the path of RITA's static config file //relative to the user's home directory -const userConfigPath = "/.rita/config.yaml" +const userConfigPath = "/etc/rita/config.yaml" //tableConfigPath specifies teh path of RITA's table config file //relative to the user's home directory -const tableConfigPath = "/.rita/tables.yaml" +const tableConfigPath = "/etc/rita/tables.yaml" //NOTE: If go ever gets default parameters, default the config options to "" // GetConfig retrieves a configuration in order of precedence func GetConfig(userConfig string, tableConfig string) (*Config, error) { - //var user string - var currUser *user.User - if userConfig == "" || tableConfig == "" { - // Get the user's homedir - var err error - currUser, err = user.Current() - if err != nil { - fmt.Fprintf(os.Stderr, "Could not get user info: %s\n", err.Error()) - return nil, err - } - } - if userConfig == "" { - userConfig = currUser.HomeDir + userConfigPath + userConfig = userConfigPath } if tableConfig == "" { - tableConfig = currUser.HomeDir + tableConfigPath + tableConfig = tableConfigPath } return loadSystemConfig(userConfig, tableConfig) diff --git a/etc/rita.yaml b/etc/rita.yaml index 1ccfdfca..67ad3b4c 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -27,8 +27,8 @@ LogConfig: # 0 = error LogLevel: 2 - # LogPath is the path for Rita's logs, generally this just defaults to - # $HOME/.rita/logs. Logs will only be written here if LogToFile is true + # LogPath is the path for Rita's logs. Make sure permissions are set accordingly. + # Logs will only be written here if LogToFile is true RitaLogPath: $HOME/.rita/logs LogToFile: true @@ -85,7 +85,7 @@ BlackListed: # not be queried. SafeBrowsing: APIKey: "" - Database: $HOME/.rita/safebrowsing + Database: /etc/rita/safebrowsing # These are custom blacklists that you may define. They are lists of either # file paths or urls. These custom blacklists are expected to be simple, diff --git a/install.sh b/install.sh index 0cc9d2ee..d01ec4dc 100755 --- a/install.sh +++ b/install.sh @@ -60,12 +60,12 @@ This script will: 1) Download and install Bro IDS, Go, and MongoDB. -2) Set up a Go development environment in order to 'go get' -and 'build' RITA. This requires us to create a directory "go" -in your home folder and add new PATH and GOPATH entries +2) Set up a Go development environment in order to install +RITA. This requires us to create new directories +in $_INSTALL_PREFIX and add new PATH and GOPATH entries to your .bashrc. -3) Create a configuration directory for RITA in your home folder called .rita +3) Create a configuration directory for RITA in $_CONFIG_PATH HEREDOC @@ -108,12 +108,15 @@ __checkPermissions() { } __uninstall() { - printf "\t[!] Removing $GOPATH/bin/rita \n" - rm -rf $GOPATH/bin/rita - printf "\t[!] Removing $GOPATH/src/github.com/ocmdev \n" - rm -rf $GOPATH/src/github.com/ocmdev - printf "\t[!] Removing $HOME/.rita \n" - rm -rf $HOME/.rita + if [ "$_INSTALL_PREFIX" != "/opt/rita" ]; then + # Too risky to delete files if we don't know where it was installed (e.g. could have installed to /) + printf "\t[!] Automatic uninstall from a non-standard location is not supported \n" + else + printf "\t[!] Removing /opt/rita \n" + rm -rf /opt/rita + fi + printf "\t[!] Removing $_CONFIG_PATH \n" + rm -rf "$_CONFIG_PATH" } __setPkgMgr() { @@ -231,12 +234,12 @@ __install_go() { # Check if the GOPATH isn't set if [ -z ${GOPATH+x} ]; then ( # Set up the GOPATH - mkdir -p $HOME/go/{src,pkg,bin} - echo 'export GOPATH=$HOME/go' >> $HOME/.bashrc - echo 'export PATH=$PATH:$GOPATH/bin' >> $HOME/.bashrc + mkdir -p $_INSTALL_PREFIX/{src,pkg,bin} + #echo "export GOPATH=$_INSTALL_PREFIX" >> $HOME/.bashrc + echo "export PATH=\$PATH:$_INSTALL_PREFIX/bin" >> $HOME/.bashrc ) & __load "\t[+] Configuring Go dev environment" - export GOPATH=$HOME/go - export PATH=$PATH:$GOPATH/bin + export GOPATH=$_INSTALL_PREFIX + export PATH=$PATH:$_INSTALL_PREFIX/bin fi } @@ -284,7 +287,10 @@ __install_mongodb() { __install() { # Check if RITA is already installed, if so ask if this is a re-install - if [ ! -z $(command -v rita) ] || [ -d $HOME/.rita ]; then + if [ ! -z $(command -v rita) ] \ + || [ -d /opt/rita ] \ + || [ -d $_CONFIG_PATH ] + then printf "[+] RITA is already installed.\n" read -p "[-] Would you like to erase it and re-install? [y/n] " -r if [[ $REPLY =~ ^[Yy]$ ]] @@ -312,12 +318,18 @@ __install() { # Determine the OS, needs lsb-release __setOS - __install_bro + if [[ "${_INSTALL_BRO}" = "true" ]] + then + __install_bro + fi - __install_go + __install_go __check_go_version - __install_mongodb & __load "[+] Installing MongoDB" + if [[ "${_INSTALL_MONGO}" = "true" ]] + then + __install_mongodb & __load "[+] Installing MongoDB" + fi ( # Build RITA # Ensure go dep is installed @@ -329,25 +341,23 @@ __install() { cp -R "$(dirname "$(realpath ${0})")/." $GOPATH/src/github.com/ocmdev/rita/ cd $GOPATH/src/github.com/ocmdev/rita make install > /dev/null + # Allow any user to execute rita + chmod 755 $GOPATH/bin/rita ) & __load "[+] Installing RITA" ( # Install the base configuration files - mkdir $HOME/.rita - mkdir $HOME/.rita/logs + mkdir -p $_CONFIG_PATH cd $GOPATH/src/github.com/ocmdev/rita - cp ./LICENSE $HOME/.rita/LICENSE - cp ./etc/rita.yaml $HOME/.rita/config.yaml - cp ./etc/tables.yaml $HOME/.rita/tables.yaml - ) & __load "[+] Installing config files to $HOME/.rita" - - - # If the user is using sudo, give ownership to the sudo user - if [ ! -z ${SUDO_USER+x} ] - then - chown -R $SUDO_USER:$SUDO_USER $HOME/go - chown -R $SUDO_USER:$SUDO_USER $HOME/.rita - fi + cp ./LICENSE $_CONFIG_PATH/LICENSE + cp ./etc/rita.yaml $_CONFIG_PATH/config.yaml + cp ./etc/tables.yaml $_CONFIG_PATH/tables.yaml + touch $_CONFIG_PATH/safebrowsing + chmod 755 $_CONFIG_PATH + # All users can read and write rita's config file + chmod 666 $_CONFIG_PATH/config.yaml + chmod 666 $_CONFIG_PATH/safebrowsing + ) & __load "[+] Installing config files to $_CONFIG_PATH" echo -e " In order to finish the installation, reload your bash config @@ -363,28 +373,62 @@ to stop MongoDB, run 'sudo service mongod stop'." } # start point for installer -__entry() { - - # Check for help - if [[ "${1:-}" =~ ^-h|--help$ ]] - then - __help - exit 0 - fi +__entry() { + _INSTALL_BRO=true + _INSTALL_MONGO=true + _INSTALL_PREFIX=/opt/rita + _CONFIG_PATH=/etc/rita + _INSTALL_RITA=true + _UNINSTALL_RITA=false + + # Parse through command args + while [[ $# -gt 0 ]]; do + case $1 in + -h|--help) + # Display help and exit + __help + exit 0 + ;; + -u|--uninstall) + _UNINSTALL_RITA=true + _INSTALL_RITA=false + _INSTALL_BRO=false + _INSTALL_MONGO=false + ;; + --disable-bro) + _INSTALL_BRO=false + ;; + --disable-mongo) + _INSTALL_MONGO=false + ;; + --prefix) + shift + _INSTALL_PREFIX="$1" + ;; + *) + ;; + esac + shift + done # Check to see if the user has permission to install RITA if __checkPermissions then - # Check if we are uninstalling - if [[ "${1:-}" =~ ^-u|--uninstall ]] + if [[ "${_UNINSTALL_RITA}" = "true" ]] then __uninstall - else + exit 0 + fi + if [[ "${_INSTALL_RITA}" = "true" ]] + then __install + exit 0 fi else printf "You do NOT have permission install RITA\n\n" fi + + } __entry "${@:-}" From 43a81546c50e910ffd27dc7b04dfd511ce7d2f91 Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Fri, 9 Feb 2018 15:46:23 -0600 Subject: [PATCH 080/117] Updating rita config with defaults that work with the default install --- etc/rita.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/etc/rita.yaml b/etc/rita.yaml index 67ad3b4c..daf4ff45 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -37,19 +37,19 @@ LogConfig: # The section Bro configures the bro ingestor Bro: # Path to a top level directory of log files - LogPath: /path/to/top/level/directory/ + LogPath: /opt/bro/logs/ # All databases in this test will get prefixed with the database prefix - DBPrefix: PrefixForDatabase + DBPrefix: rita- # Subdirectories of LogPath may be separated into different databases for the # test. Map each directory to the database you wish that directory to wind up # in. This is good for running tests against multiple subnets and network # segments. Put the directory name on the left side of the colon, and the # database you wish the logs from that directory to go in on the right side. - DirectoryMap: - UniqueDir: SeparateDatabaseName - UniqueDir2: SeparateDatabaseName2 + # DirectoryMap: + # UniqueDir: SeparateDatabaseName + # UniqueDir2: SeparateDatabaseName2 # If set, files which don't match the directory map will be placed # in this default database. @@ -66,7 +66,7 @@ Bro: # of log files in the logpath and wish to treat each day as a separate test. # 24 hours worth of data is the ideal for analysis, and using dates will ensure # that tests are broken into 24 hour periods on midnight in the current timezone. - UseDates: false + UseDates: true # The number of records shipped off to MongoDB at a time. Increasing # the size of the buffer will improve import timings but will leave more From c03cd27af90c43ff5242e6bf90e65b555ba85b2d Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Fri, 9 Feb 2018 19:47:49 -0600 Subject: [PATCH 081/117] Make bro logs readable to users --- install.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/install.sh b/install.sh index d01ec4dc..372ccc25 100755 --- a/install.sh +++ b/install.sh @@ -266,6 +266,7 @@ __install_bro() { echo 'export PATH=$PATH:/opt/bro/bin' >> $HOME/.bashrc PATH=$PATH:/opt/bro/bin fi + chmod 2755 /opt/bro/logs } __install_mongodb() { From ae911c572bbdd4a2a54659e125191ce381f588a5 Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Fri, 9 Feb 2018 19:48:11 -0600 Subject: [PATCH 082/117] Modify config to activate UseDates by default --- etc/rita.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/etc/rita.yaml b/etc/rita.yaml index daf4ff45..3128167b 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -39,8 +39,8 @@ Bro: # Path to a top level directory of log files LogPath: /opt/bro/logs/ - # All databases in this test will get prefixed with the database prefix - DBPrefix: rita- + # If enabled, all databases in this test will get prefixed with the database prefix + # DBPrefix: DBPrefix- # Subdirectories of LogPath may be separated into different databases for the # test. Map each directory to the database you wish that directory to wind up @@ -53,7 +53,7 @@ Bro: # If set, files which don't match the directory map will be placed # in this default database. - #DefaultDatabase: DefaultDatabaseName + DefaultDatabase: rita # There needs to be one metadatabase per test. This database holds information # about the test and the files related to the test. If there are several From 892273538f6a6d14b7e68b41be90d8bd7c244c87 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Sun, 4 Feb 2018 21:06:50 -0700 Subject: [PATCH 083/117] Add the ability to split logs into dbs by subfolder, clean up the cli --- commands/analyze.go | 8 ++--- commands/commands.go | 13 ++------ commands/delete-database.go | 25 +++++++------- commands/import.go | 55 ++++++++++++++++--------------- commands/reporting.go | 13 +++----- commands/reset-analysis.go | 13 ++++---- commands/show-beacons.go | 15 +++++---- commands/show-bl-hostname.go | 4 +-- commands/show-bl-ip.go | 12 +++---- commands/show-bl-url.go | 4 +-- commands/show-explodedDns.go | 15 +++++---- commands/show-long-connections.go | 15 +++++---- commands/show-scans.go | 17 +++++----- commands/show-urls.go | 32 +++++++++--------- commands/show-user-agents.go | 15 +++++---- config/running.go | 28 ++++++++++++++++ config/static.go | 9 ++--- database/meta.go | 3 +- etc/rita.yaml | 19 ----------- parser/fsimporter.go | 8 ++--- parser/indexedfile.go | 27 ++++++++------- 21 files changed, 181 insertions(+), 169 deletions(-) diff --git a/commands/analyze.go b/commands/analyze.go index ad396ec1..96e4deb6 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -21,14 +21,14 @@ import ( func init() { analyzeCommand := cli.Command{ - Name: "analyze", - Usage: "Analyze imported databases, if no [database,d] flag is specified will attempt all", + Name: "analyze", + Usage: "Analyze imported databases. If no database is specified, every database will be analyzed.", + ArgsUsage: "[database]", Flags: []cli.Flag{ - databaseFlag, configFlag, }, Action: func(c *cli.Context) error { - return analyze(c.String("database"), c.String("config")) + return analyze(c.Args().Get(0), c.String("config")) }, } diff --git a/commands/commands.go b/commands/commands.go index 2ada596f..066e4dbd 100644 --- a/commands/commands.go +++ b/commands/commands.go @@ -12,24 +12,17 @@ var ( // below are some prebuilt flags that get used often in various commands - // databaseFlag allows users to specify which database they'd like to use - databaseFlag = cli.StringFlag{ - Name: "database, d", - Usage: "execute this command against the database named `NAME`", - Value: "", - } - // threadFlag allows users to specify how many threads should be used threadFlag = cli.IntFlag{ Name: "threads, t", - Usage: "use `N` threads when executing this command", + Usage: "Use `N` threads when executing this command", Value: runtime.NumCPU(), } // configFlag allows users to specify an alternate config file to use configFlag = cli.StringFlag{ Name: "config, c", - Usage: "use `CONFIGFILE` as configuration when running this command", + Usage: "Use a given `CONFIG_FILE` when running this command", Value: "", } @@ -37,7 +30,7 @@ var ( // report instead of the simple csv style output humanFlag = cli.BoolFlag{ Name: "human-readable, H", - Usage: "print a report instead of csv", + Usage: "Print a report instead of csv", } blSortFlag = cli.StringFlag{ diff --git a/commands/delete-database.go b/commands/delete-database.go index 78fd1d22..0b2b49ac 100644 --- a/commands/delete-database.go +++ b/commands/delete-database.go @@ -13,19 +13,20 @@ import ( func init() { reset := cli.Command{ - Name: "delete-database", - Usage: "Delete an imported database", + Name: "delete-database", + Usage: "Delete an imported database", + ArgsUsage: "", Flags: []cli.Flag{ - databaseFlag, configFlag, }, Action: func(c *cli.Context) error { res := database.InitResources(c.String("config")) - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) + db := c.Args().Get(0) + if db == "" { + return cli.NewExitError("Specify a database", -1) } - fmt.Println("Are you sure you want to delete database", c.String("database"), "[Y/n]") + fmt.Print("Are you sure you want to delete database ", db, " [y/N] ") read := bufio.NewReader(os.Stdin) @@ -36,13 +37,15 @@ func init() { response = strings.ToLower(strings.TrimSpace(response)) if response == "y" || response == "yes" { - fmt.Println("Deleting database:", c.String("database")) - return res.MetaDB.DeleteDB(c.String("database")) - } else if response == "n" || response == "no" { - return cli.NewExitError("Database "+c.String("database")+" was not deleted.", 0) + fmt.Println("Deleting database:", db) + err = res.MetaDB.DeleteDB(db) + if err != nil { + return cli.NewExitError("ERROR: "+err.Error(), -1) + } } else { - return cli.NewExitError("Aborted, nothing deleted.", -1) + return cli.NewExitError("Database "+db+" was not deleted.", 0) } + return nil }, } diff --git a/commands/import.go b/commands/import.go index abb1db2d..8928f2bc 100644 --- a/commands/import.go +++ b/commands/import.go @@ -2,7 +2,10 @@ package commands import ( "fmt" + "os" + "strings" + "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" "github.com/ocmdev/rita/parser" "github.com/ocmdev/rita/util" @@ -11,19 +14,15 @@ import ( func init() { importCommand := cli.Command{ - Name: "import", - Usage: "Import bro logs into the database", + Name: "import", + Usage: "Import bro logs into a database", + ArgsUsage: " ", Flags: []cli.Flag{ threadFlag, configFlag, cli.StringFlag{ - Name: "import-dir, i", - Usage: "Import bro logs from a `directory` into a database. This overides the config file. Must be used with --database, -d", - Value: "", - }, - cli.StringFlag{ - Name: "database, d", - Usage: "Store imported bro logs into a database with the given `name`. This overides the config file. Must be used with --import-dir, -i", + Name: "split, s", + Usage: "Split the imported bro logs. Accepted values: \"subfolder\", \"date\"", Value: "", }, }, @@ -36,33 +35,35 @@ func init() { // doImport runs the importer func doImport(c *cli.Context) error { res := database.InitResources(c.String("config")) - importDir := c.String("import-dir") - databaseName := c.String("database") + importDir := c.Args().Get(0) + targetDatabase := c.Args().Get(1) + splitStrategy := strings.ToLower(c.String("split")) threads := util.Max(c.Int("threads")/2, 1) - //one flag was set - if importDir != "" && databaseName == "" || importDir == "" && databaseName != "" { - return cli.NewExitError( - "Import failed.\nUse 'rita import' to import the directories "+ - "specified in the config file or 'rita import -i [import-dir] -d [database-name]' "+ - "to import bro logs from a given directory.", -1) + if importDir == "" || targetDatabase == "" { + return cli.NewExitError("Specify a directory to import and a target database", -1) + } + + //Remove tailing / when applicable + if strings.HasSuffix(importDir, string(os.PathSeparator)) { + importDir = importDir[:len(importDir)-len(string(os.PathSeparator))] } - //both flags were set - if importDir != "" && databaseName != "" { - res.Config.S.Bro.LogPath = importDir - res.Config.S.Bro.DBPrefix = "" - //Clear out the directory map and set the default database - res.Config.S.Bro.DirectoryMap = make(map[string]string) - res.Config.S.Bro.DefaultDatabase = databaseName + res.Config.R.Bro.ImportDirectory = importDir + res.Config.R.Bro.TargetDatabase = targetDatabase + res.Config.R.Bro.SplitStrategy = config.SplitNone + if splitStrategy == "subfolder" { + res.Config.R.Bro.SplitStrategy = config.SplitSubfolder + } else if splitStrategy == "date" { + res.Config.R.Bro.SplitStrategy = config.SplitDate } - res.Log.Infof("Importing %s\n", res.Config.S.Bro.LogPath) - fmt.Println("[+] Importing " + res.Config.S.Bro.LogPath) + res.Log.Infof("Importing %s\n", res.Config.R.Bro.ImportDirectory) + fmt.Println("[+] Importing " + res.Config.R.Bro.ImportDirectory) importer := parser.NewFSImporter(res, threads, threads) datastore := parser.NewMongoDatastore(res.DB.Session, res.MetaDB, res.Config.S.Bro.ImportBuffer, res.Log) importer.Run(datastore) - res.Log.Infof("Finished importing %s\n", res.Config.S.Bro.LogPath) + res.Log.Infof("Finished importing %s\n", res.Config.R.Bro.ImportDirectory) return nil } diff --git a/commands/reporting.go b/commands/reporting.go index 4b14c400..7ef874ba 100644 --- a/commands/reporting.go +++ b/commands/reporting.go @@ -9,19 +9,16 @@ import ( func init() { command := cli.Command{ - Name: "html-report", - Usage: "Write analysis information to html output", + Name: "html-report", + Usage: "Create an html report for an analyzed database. " + + "If no database is specified, a report will be created for every database.", + ArgsUsage: "[database]", Flags: []cli.Flag{ configFlag, - cli.StringFlag{ - Name: "database, d", - Usage: "Specify which databases to export, otherwise will export all databases", - Value: "", - }, }, Action: func(c *cli.Context) error { res := database.InitResources(c.String("config")) - databaseName := c.String("database") + databaseName := c.Args().Get(0) var databases []string if databaseName != "" { databases = append(databases, databaseName) diff --git a/commands/reset-analysis.go b/commands/reset-analysis.go index d52019e4..70d4478b 100644 --- a/commands/reset-analysis.go +++ b/commands/reset-analysis.go @@ -13,18 +13,19 @@ import ( func init() { reset := cli.Command{ - Name: "reset-analysis", - Usage: "Reset analysis of one or more databases", + Name: "reset-analysis", + Usage: "Reset analysis of a database", + ArgsUsage: "", Flags: []cli.Flag{ - databaseFlag, configFlag, }, Action: func(c *cli.Context) error { res := database.InitResources(c.String("config")) - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) + db := c.Args().Get(0) + if db == "" { + return cli.NewExitError("Specify a database", -1) } - return cleanAnalysis(c.String("database"), res) + return cleanAnalysis(db, res) }, } diff --git a/commands/show-beacons.go b/commands/show-beacons.go index a6adc46f..829c884c 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -13,11 +13,11 @@ import ( func init() { command := cli.Command{ - Name: "show-beacons", - Usage: "Print beacon information to standard out", + Name: "show-beacons", + Usage: "Print hosts which show signs of C2 software", + ArgsUsage: "", Flags: []cli.Flag{ humanFlag, - databaseFlag, configFlag, }, Action: showBeacons, @@ -27,18 +27,19 @@ func init() { } func showBeacons(c *cli.Context) error { - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) + db := c.Args().Get(0) + if db == "" { + return cli.NewExitError("Specify a database", -1) } res := database.InitResources(c.String("config")) - res.DB.SelectDB(c.String("database")) + res.DB.SelectDB(db) var data []beaconData.BeaconAnalysisView ssn := res.DB.Session.Copy() resultsView := beacon.GetBeaconResultsView(res, ssn, 0) if resultsView == nil { - return cli.NewExitError("No results were found for "+c.String("database"), -1) + return cli.NewExitError("No results were found for "+db, -1) } resultsView.All(&data) ssn.Close() diff --git a/commands/show-bl-hostname.go b/commands/show-bl-hostname.go index 3d2e7cd9..7c0d3258 100644 --- a/commands/show-bl-hostname.go +++ b/commands/show-bl-hostname.go @@ -19,9 +19,9 @@ import ( func init() { blHostnames := cli.Command{ - Name: "show-bl-hostnames", + Name: "show-bl-hostnames", + ArgsUsage: "", Flags: []cli.Flag{ - databaseFlag, humanFlag, blConnFlag, blSortFlag, diff --git a/commands/show-bl-ip.go b/commands/show-bl-ip.go index f52ba8f4..b356c043 100644 --- a/commands/show-bl-ip.go +++ b/commands/show-bl-ip.go @@ -20,9 +20,9 @@ const endl = "\r\n" func init() { blSourceIPs := cli.Command{ - Name: "show-bl-source-ips", + Name: "show-bl-source-ips", + ArgsUsage: "", Flags: []cli.Flag{ - databaseFlag, humanFlag, blConnFlag, blSortFlag, @@ -33,9 +33,9 @@ func init() { } blDestIPs := cli.Command{ - Name: "show-bl-dest-ips", + Name: "show-bl-dest-ips", + ArgsUsage: "", Flags: []cli.Flag{ - databaseFlag, humanFlag, blConnFlag, blSortFlag, @@ -49,12 +49,12 @@ func init() { } func parseBLArgs(c *cli.Context) (string, string, bool, bool, error) { - db := c.String("database") + db := c.Args().Get(0) sort := c.String("sort") connected := c.Bool("connected") human := c.Bool("human-readable") if db == "" { - return db, sort, connected, human, cli.NewExitError("Specify a database with -d", -1) + return db, sort, connected, human, cli.NewExitError("Specify a database", -1) } if sort != "conn" && sort != "uconn" && sort != "total_bytes" { return db, sort, connected, human, cli.NewExitError("Invalid option passed to sort flag", -1) diff --git a/commands/show-bl-url.go b/commands/show-bl-url.go index d217f44a..3dc17747 100644 --- a/commands/show-bl-url.go +++ b/commands/show-bl-url.go @@ -18,9 +18,9 @@ import ( func init() { blURLs := cli.Command{ - Name: "show-bl-urls", + Name: "show-bl-urls", + ArgsUsage: "", Flags: []cli.Flag{ - databaseFlag, humanFlag, blConnFlag, blSortFlag, diff --git a/commands/show-explodedDns.go b/commands/show-explodedDns.go index ba4aac0f..85286355 100644 --- a/commands/show-explodedDns.go +++ b/commands/show-explodedDns.go @@ -13,27 +13,28 @@ import ( func init() { command := cli.Command{ - Name: "show-exploded-dns", - Usage: "Print dns analysis. Exposes covert dns channels.", + Name: "show-exploded-dns", + Usage: "Print dns analysis. Exposes covert dns channels.", + ArgsUsage: "", Flags: []cli.Flag{ humanFlag, - databaseFlag, configFlag, }, Action: func(c *cli.Context) error { - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) + db := c.Args().Get(0) + if db == "" { + return cli.NewExitError("Specify a database", -1) } res := database.InitResources(c.String("config")) var explodedResults []dns.ExplodedDNS - iter := res.DB.Session.DB(c.String("database")).C(res.Config.T.DNS.ExplodedDNSTable).Find(nil) + iter := res.DB.Session.DB(db).C(res.Config.T.DNS.ExplodedDNSTable).Find(nil) iter.Sort("-subdomains").All(&explodedResults) if len(explodedResults) == 0 { - return cli.NewExitError("No results were found for "+c.String("database"), -1) + return cli.NewExitError("No results were found for "+db, -1) } if c.Bool("human-readable") { diff --git a/commands/show-long-connections.go b/commands/show-long-connections.go index 71ac7067..ea5ae1f3 100644 --- a/commands/show-long-connections.go +++ b/commands/show-long-connections.go @@ -14,29 +14,30 @@ import ( func init() { command := cli.Command{ - Name: "show-long-connections", - Usage: "Print long connections and relevant information", + Name: "show-long-connections", + Usage: "Print long connections and relevant information", + ArgsUsage: "", Flags: []cli.Flag{ humanFlag, - databaseFlag, configFlag, }, Action: func(c *cli.Context) error { - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) + db := c.Args().Get(0) + if db == "" { + return cli.NewExitError("Specify a database", -1) } res := database.InitResources(c.String("config")) var longConns []data.Conn - coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.Structure.ConnTable) + coll := res.DB.Session.DB(db).C(res.Config.T.Structure.ConnTable) sortStr := "-duration" coll.Find(nil).Sort(sortStr).All(&longConns) if len(longConns) == 0 { - return cli.NewExitError("No results were found for "+c.String("database"), -1) + return cli.NewExitError("No results were found for "+db, -1) } if c.Bool("human-readable") { diff --git a/commands/show-scans.go b/commands/show-scans.go index 940b6239..2a2c99e7 100644 --- a/commands/show-scans.go +++ b/commands/show-scans.go @@ -14,31 +14,32 @@ import ( func init() { command := cli.Command{ - Name: "show-scans", - Usage: "Print scanning information", + Name: "show-scans", + Usage: "Print scanning information", + ArgsUsage: "", Flags: []cli.Flag{ humanFlag, - databaseFlag, configFlag, cli.BoolFlag{ Name: "ports, P", - Usage: "show which individual ports were scanned", + Usage: "Show which individual ports were scanned", }, }, Action: func(c *cli.Context) error { - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) + db := c.Args().Get(0) + if db == "" { + return cli.NewExitError("Specify a database", -1) } showPorts := c.Bool("ports") res := database.InitResources(c.String("config")) var scans []scanning.Scan - coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.Scanning.ScanTable) + coll := res.DB.Session.DB(db).C(res.Config.T.Scanning.ScanTable) coll.Find(nil).All(&scans) if len(scans) == 0 { - return cli.NewExitError("No results were found for "+c.String("database"), -1) + return cli.NewExitError("No results were found for "+db, -1) } if c.Bool("human-readable") { diff --git a/commands/show-urls.go b/commands/show-urls.go index ecd9c26f..7f4897ab 100644 --- a/commands/show-urls.go +++ b/commands/show-urls.go @@ -14,27 +14,28 @@ import ( func init() { longURLs := cli.Command{ - Name: "show-long-urls", - Usage: "Print the longest urls", + Name: "show-long-urls", + Usage: "Print the longest urls", + ArgsUsage: "", Flags: []cli.Flag{ humanFlag, - databaseFlag, configFlag, }, Action: func(c *cli.Context) error { - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) + db := c.Args().Get(0) + if db == "" { + return cli.NewExitError("Specify a database", -1) } res := database.InitResources(c.String("config")) var urls []urls.URL - coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.Urls.UrlsTable) + coll := res.DB.Session.DB(db).C(res.Config.T.Urls.UrlsTable) coll.Find(nil).Sort("-length").All(&urls) if len(urls) == 0 { - return cli.NewExitError("No results were found for "+c.String("database"), -1) + return cli.NewExitError("No results were found for "+db, -1) } if c.Bool("human-readable") { @@ -55,26 +56,27 @@ func init() { vistedURLs := cli.Command{ - Name: "show-most-visited-urls", - Usage: "Print the most visited urls", + Name: "show-most-visited-urls", + Usage: "Print the most visited urls", + ArgsUsage: "", Flags: []cli.Flag{ humanFlag, - databaseFlag, }, Action: func(c *cli.Context) error { - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) + db := c.Args().Get(0) + if db == "" { + return cli.NewExitError("Specify a database", -1) } - res := database.InitResources("") + res := database.InitResources(c.String("config")) var urls []urls.URL - coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.Urls.UrlsTable) + coll := res.DB.Session.DB(db).C(res.Config.T.Urls.UrlsTable) coll.Find(nil).Sort("-count").All(&urls) if len(urls) == 0 { - return cli.NewExitError("No results were found for "+c.String("database"), -1) + return cli.NewExitError("No results were found for "+db, -1) } if c.Bool("human-readable") { diff --git a/commands/show-user-agents.go b/commands/show-user-agents.go index f7650923..7694a18b 100644 --- a/commands/show-user-agents.go +++ b/commands/show-user-agents.go @@ -13,11 +13,11 @@ import ( func init() { command := cli.Command{ - Name: "show-user-agents", - Usage: "Print user agent information", + Name: "show-user-agents", + Usage: "Print user agent information", + ArgsUsage: "", Flags: []cli.Flag{ humanFlag, - databaseFlag, cli.BoolFlag{ Name: "least-used, l", Usage: "Sort the user agents from least used to most used.", @@ -25,14 +25,15 @@ func init() { configFlag, }, Action: func(c *cli.Context) error { - if c.String("database") == "" { - return cli.NewExitError("Specify a database with -d", -1) + db := c.Args().Get(0) + if db == "" { + return cli.NewExitError("Specify a database", -1) } res := database.InitResources(c.String("config")) var agents []useragent.UserAgent - coll := res.DB.Session.DB(c.String("database")).C(res.Config.T.UserAgent.UserAgentTable) + coll := res.DB.Session.DB(db).C(res.Config.T.UserAgent.UserAgentTable) var sortStr string if c.Bool("least-used") { @@ -44,7 +45,7 @@ func init() { coll.Find(nil).Sort(sortStr).All(&agents) if len(agents) == 0 { - return cli.NewExitError("No results were found for "+c.String("database"), -1) + return cli.NewExitError("No results were found for "+db, -1) } if c.Bool("human-readable") { diff --git a/config/running.go b/config/running.go index 88c4592a..b39f4c3c 100644 --- a/config/running.go +++ b/config/running.go @@ -10,10 +10,31 @@ import ( "github.com/ocmdev/mgosec" ) +//BroSplitStrategy defines how log records should be split +//into separate databases upon import +type BroSplitStrategy int + +const ( + //SplitNone forces all log records into the same database + SplitNone BroSplitStrategy = 0 + + //SplitSubfolder appends the subfolder names to the root db name with dashes. + //For example a log at ./subfolder1/conn.log would be imported into + //DBName-subfolder1 + SplitSubfolder BroSplitStrategy = 1 + + //SplitDate splits the log records by the date according to the record's timestamp. + //NOTE: this option should not be used in "live" installations of RITA, + //since Bro may insert log records timestamped for the previous day into + //the next day's logset. + SplitDate BroSplitStrategy = 2 +) + type ( //RunningCfg holds configuration options that are parsed at run time RunningCfg struct { MongoDB MongoDBRunningCfg + Bro BroRunningCfg Version semver.Version } @@ -24,6 +45,13 @@ type ( TLSConfig *tls.Config } } + + //BroRunningCfg controls the file parser + BroRunningCfg struct { + ImportDirectory string + TargetDatabase string + SplitStrategy BroSplitStrategy + } ) // loadRunningConfig attempts deserializes data in the static config diff --git a/config/static.go b/config/static.go index 965eb70c..2d918097 100644 --- a/config/static.go +++ b/config/static.go @@ -81,13 +81,8 @@ type ( //BroStaticCfg controls the file parser BroStaticCfg struct { - LogPath string `yaml:"LogPath"` - DBPrefix string `yaml:"DBPrefix"` - MetaDB string `yaml:"MetaDB"` - DirectoryMap map[string]string `yaml:"DirectoryMap"` - DefaultDatabase string `yaml:"DefaultDatabase"` - UseDates bool `yaml:"UseDates"` - ImportBuffer int `yaml:"ImportBuffer"` + MetaDB string `yaml:"MetaDB"` + ImportBuffer int `yaml:"ImportBuffer"` } ) diff --git a/database/meta.go b/database/meta.go index a853a9fb..45b81192 100644 --- a/database/meta.go +++ b/database/meta.go @@ -5,6 +5,7 @@ import ( "sync" "github.com/blang/semver" + "github.com/ocmdev/rita/config" fpt "github.com/ocmdev/rita/parser/fileparsetypes" log "github.com/sirupsen/logrus" "gopkg.in/mgo.v2" @@ -44,7 +45,7 @@ func (m *MetaDBHandle) AddNewDB(name string) error { DBMetaInfo{ Name: name, Analyzed: false, - UsingDates: m.res.Config.S.Bro.UseDates, + UsingDates: m.res.Config.R.Bro.SplitStrategy == config.SplitDate, ImportVersion: m.res.Config.S.Version, }, ) diff --git a/etc/rita.yaml b/etc/rita.yaml index 3128167b..49aa1369 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -36,25 +36,6 @@ LogConfig: # The section Bro configures the bro ingestor Bro: - # Path to a top level directory of log files - LogPath: /opt/bro/logs/ - - # If enabled, all databases in this test will get prefixed with the database prefix - # DBPrefix: DBPrefix- - - # Subdirectories of LogPath may be separated into different databases for the - # test. Map each directory to the database you wish that directory to wind up - # in. This is good for running tests against multiple subnets and network - # segments. Put the directory name on the left side of the colon, and the - # database you wish the logs from that directory to go in on the right side. - # DirectoryMap: - # UniqueDir: SeparateDatabaseName - # UniqueDir2: SeparateDatabaseName2 - - # If set, files which don't match the directory map will be placed - # in this default database. - DefaultDatabase: rita - # There needs to be one metadatabase per test. This database holds information # about the test and the files related to the test. If there are several # subnets mapped in DirectoryMap each will be handled separately and that diff --git a/parser/fsimporter.go b/parser/fsimporter.go index f69cfbe1..3b99625a 100644 --- a/parser/fsimporter.go +++ b/parser/fsimporter.go @@ -49,7 +49,7 @@ func (fs *FSImporter) Run(datastore Datastore) { fmt.Println("\t[-] Finding files to parse") //find all of the bro log paths - files := readDir(fs.res.Config.S.Bro.LogPath, fs.res.Log) + files := readDir(fs.res.Config.R.Bro.ImportDirectory, fs.res.Log) //hash the files and get their stats indexedFiles := indexFiles(files, fs.indexingThreads, fs.res.Config, fs.res.Log) @@ -65,7 +65,7 @@ func (fs *FSImporter) Run(datastore Datastore) { indexedFiles = removeOldFilesFromIndex(indexedFiles, fs.res.MetaDB, fs.res.Log) parseFiles(indexedFiles, fs.parseThreads, - fs.res.Config.S.Bro.UseDates, datastore, fs.res.Log) + fs.res.Config.R.Bro.SplitStrategy, datastore, fs.res.Log) datastore.Flush() updateFilesIndex(indexedFiles, fs.res.MetaDB, fs.res.Log) @@ -156,7 +156,7 @@ func indexFiles(files []string, indexingThreads int, //errors and parses the bro files line by line into the database. //NOTE: side effect: this sets the dates field on the indexedFiles func parseFiles(indexedFiles []*fpt.IndexedFile, parsingThreads int, - useDates bool, datastore Datastore, logger *log.Logger) { + splitStrategy config.BroSplitStrategy, datastore Datastore, logger *log.Logger) { //set up parallel parsing n := len(indexedFiles) parsingWG := new(sync.WaitGroup) @@ -215,7 +215,7 @@ func parseFiles(indexedFiles []*fpt.IndexedFile, parsingThreads int, //figure out what database this line is heading for targetCollection := indexedFiles[j].TargetCollection targetDB := indexedFiles[j].TargetDatabase - if useDates { + if splitStrategy == config.SplitDate { targetDB += "-" + date } diff --git a/parser/indexedfile.go b/parser/indexedfile.go index 28ef83d5..83d19657 100644 --- a/parser/indexedfile.go +++ b/parser/indexedfile.go @@ -1,6 +1,7 @@ package parser import ( + "bytes" "crypto/md5" "errors" "fmt" @@ -82,7 +83,7 @@ func newIndexedFile(filePath string, config *config.Config, return toReturn, errors.New("Could not find a target collection for file") } - toReturn.TargetDatabase = getTargetDatabase(filePath, &config.S.Bro) + toReturn.TargetDatabase = getTargetDatabase(filePath, &config.R.Bro) if toReturn.TargetDatabase == "" { fileHandle.Close() return toReturn, errors.New("Could not find a dataset for file") @@ -113,16 +114,20 @@ func getFileHash(fileHandle *os.File, fInfo os.FileInfo) (string, error) { //getTargetDatabase assigns a database to a log file based on the path, //and the bro config -func getTargetDatabase(path string, broConfig *config.BroStaticCfg) string { - // check the directory map - for key, val := range broConfig.DirectoryMap { - if strings.Contains(path, key) { - return broConfig.DBPrefix + val +func getTargetDatabase(filePath string, broConfig *config.BroRunningCfg) string { + var targetDatabase bytes.Buffer + targetDatabase.WriteString(broConfig.TargetDatabase) + //Append subfolders to target db + if broConfig.SplitStrategy == config.SplitSubfolder { + relativeStartIndex := len(broConfig.ImportDirectory) + pathSep := string(os.PathSeparator) + relativePath := filePath[relativeStartIndex+len(pathSep):] + pathPieces := strings.Split(relativePath, pathSep) + pathPieces = pathPieces[:len(pathPieces)-1] + for _, piece := range pathPieces { + targetDatabase.WriteString("-") + targetDatabase.WriteString(piece) } } - //If a default database is specified put it in there - if broConfig.DefaultDatabase != "" { - return broConfig.DBPrefix + broConfig.DefaultDatabase - } - return "" + return targetDatabase.String() } From 52aa03cd0a95fe6ee6e127dbb980410fb4a3a24d Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Fri, 9 Feb 2018 19:43:19 -0700 Subject: [PATCH 084/117] Removed UseDates feature and made subfolder split the only import strategy --- commands/analyze.go | 7 +-- commands/import.go | 57 ++++++++++++++----------- commands/reporting.go | 6 +-- commands/show-explodedDns.go | 2 +- config/running.go | 28 ------------ config/static.go | 13 +++++- database/meta.go | 49 ++------------------- etc/rita.yaml | 8 ++++ parser/fileparsetypes/fileparsetypes.go | 1 - parser/fsimporter.go | 33 ++------------ parser/indexedfile.go | 27 ++++++------ 11 files changed, 79 insertions(+), 152 deletions(-) diff --git a/commands/analyze.go b/commands/analyze.go index 96e4deb6..23893788 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -21,9 +21,10 @@ import ( func init() { analyzeCommand := cli.Command{ - Name: "analyze", - Usage: "Analyze imported databases. If no database is specified, every database will be analyzed.", - ArgsUsage: "[database]", + Name: "analyze", + Usage: "Analyze imported databases", + UsageText: "rita analyze [command options] [database]\n\n" + + "If no database is specified, every database will be analyzed.", Flags: []cli.Flag{ configFlag, }, diff --git a/commands/import.go b/commands/import.go index 8928f2bc..33316e23 100644 --- a/commands/import.go +++ b/commands/import.go @@ -3,9 +3,9 @@ package commands import ( "fmt" "os" + "path/filepath" "strings" - "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" "github.com/ocmdev/rita/parser" "github.com/ocmdev/rita/util" @@ -14,17 +14,17 @@ import ( func init() { importCommand := cli.Command{ - Name: "import", - Usage: "Import bro logs into a database", - ArgsUsage: " ", + Name: "import", + Usage: "Import bro logs into a target database", + UsageText: "rita import [command options] [ ]\n\n" + + "Logs directly in will be imported into a database" + + " named . Files in a subfolder of will be imported" + + " into -$SUBFOLDER_NAME. " + + " and will be loaded from the configuration file unless" + + " BOTH arguments are supplied.", Flags: []cli.Flag{ threadFlag, configFlag, - cli.StringFlag{ - Name: "split, s", - Usage: "Split the imported bro logs. Accepted values: \"subfolder\", \"date\"", - Value: "", - }, }, Action: doImport, } @@ -37,33 +37,38 @@ func doImport(c *cli.Context) error { res := database.InitResources(c.String("config")) importDir := c.Args().Get(0) targetDatabase := c.Args().Get(1) - splitStrategy := strings.ToLower(c.String("split")) threads := util.Max(c.Int("threads")/2, 1) - if importDir == "" || targetDatabase == "" { - return cli.NewExitError("Specify a directory to import and a target database", -1) + //check if one argument is set but not the other + if importDir != "" && targetDatabase == "" || + importDir == "" && targetDatabase != "" { + return cli.NewExitError("Both and are required to override the config file.", -1) } - //Remove tailing / when applicable - if strings.HasSuffix(importDir, string(os.PathSeparator)) { - importDir = importDir[:len(importDir)-len(string(os.PathSeparator))] - } + //check if the user overrode the config file + if importDir != "" && targetDatabase != "" { + //expand relative path + //nolint: vetshadow + importDir, err := filepath.Abs(importDir) + if err != nil { + return cli.NewExitError(err.Error(), -1) + } + + //Remove tailing / when applicable + if strings.HasSuffix(importDir, string(os.PathSeparator)) { + importDir = importDir[:len(importDir)-len(string(os.PathSeparator))] + } - res.Config.R.Bro.ImportDirectory = importDir - res.Config.R.Bro.TargetDatabase = targetDatabase - res.Config.R.Bro.SplitStrategy = config.SplitNone - if splitStrategy == "subfolder" { - res.Config.R.Bro.SplitStrategy = config.SplitSubfolder - } else if splitStrategy == "date" { - res.Config.R.Bro.SplitStrategy = config.SplitDate + res.Config.S.Bro.ImportDirectory = importDir + res.Config.S.Bro.DBRoot = targetDatabase } - res.Log.Infof("Importing %s\n", res.Config.R.Bro.ImportDirectory) - fmt.Println("[+] Importing " + res.Config.R.Bro.ImportDirectory) + res.Log.Infof("Importing %s\n", res.Config.S.Bro.ImportDirectory) + fmt.Println("[+] Importing " + res.Config.S.Bro.ImportDirectory) importer := parser.NewFSImporter(res, threads, threads) datastore := parser.NewMongoDatastore(res.DB.Session, res.MetaDB, res.Config.S.Bro.ImportBuffer, res.Log) importer.Run(datastore) - res.Log.Infof("Finished importing %s\n", res.Config.R.Bro.ImportDirectory) + res.Log.Infof("Finished importing %s\n", res.Config.S.Bro.ImportDirectory) return nil } diff --git a/commands/reporting.go b/commands/reporting.go index 7ef874ba..4b2db372 100644 --- a/commands/reporting.go +++ b/commands/reporting.go @@ -9,10 +9,10 @@ import ( func init() { command := cli.Command{ - Name: "html-report", - Usage: "Create an html report for an analyzed database. " + + Name: "html-report", + Usage: "Create an html report for an analyzed database", + UsageText: "rita html-report [command-options] [database]\n\n" + "If no database is specified, a report will be created for every database.", - ArgsUsage: "[database]", Flags: []cli.Flag{ configFlag, }, diff --git a/commands/show-explodedDns.go b/commands/show-explodedDns.go index 85286355..5f0f0973 100644 --- a/commands/show-explodedDns.go +++ b/commands/show-explodedDns.go @@ -14,7 +14,7 @@ func init() { command := cli.Command{ Name: "show-exploded-dns", - Usage: "Print dns analysis. Exposes covert dns channels.", + Usage: "Print dns analysis. Exposes covert dns channels", ArgsUsage: "", Flags: []cli.Flag{ humanFlag, diff --git a/config/running.go b/config/running.go index b39f4c3c..88c4592a 100644 --- a/config/running.go +++ b/config/running.go @@ -10,31 +10,10 @@ import ( "github.com/ocmdev/mgosec" ) -//BroSplitStrategy defines how log records should be split -//into separate databases upon import -type BroSplitStrategy int - -const ( - //SplitNone forces all log records into the same database - SplitNone BroSplitStrategy = 0 - - //SplitSubfolder appends the subfolder names to the root db name with dashes. - //For example a log at ./subfolder1/conn.log would be imported into - //DBName-subfolder1 - SplitSubfolder BroSplitStrategy = 1 - - //SplitDate splits the log records by the date according to the record's timestamp. - //NOTE: this option should not be used in "live" installations of RITA, - //since Bro may insert log records timestamped for the previous day into - //the next day's logset. - SplitDate BroSplitStrategy = 2 -) - type ( //RunningCfg holds configuration options that are parsed at run time RunningCfg struct { MongoDB MongoDBRunningCfg - Bro BroRunningCfg Version semver.Version } @@ -45,13 +24,6 @@ type ( TLSConfig *tls.Config } } - - //BroRunningCfg controls the file parser - BroRunningCfg struct { - ImportDirectory string - TargetDatabase string - SplitStrategy BroSplitStrategy - } ) // loadRunningConfig attempts deserializes data in the static config diff --git a/config/static.go b/config/static.go index 2d918097..cff02cad 100644 --- a/config/static.go +++ b/config/static.go @@ -5,6 +5,7 @@ import ( "io/ioutil" "os" "reflect" + "strings" "time" yaml "gopkg.in/yaml.v2" @@ -81,8 +82,10 @@ type ( //BroStaticCfg controls the file parser BroStaticCfg struct { - MetaDB string `yaml:"MetaDB"` - ImportBuffer int `yaml:"ImportBuffer"` + ImportDirectory string `yaml:"ImportDirectory"` + DBRoot string `yaml:"DBRoot"` + MetaDB string `yaml:"MetaDB"` + ImportBuffer int `yaml:"ImportBuffer"` } ) @@ -113,6 +116,12 @@ func loadStaticConfig(cfgPath string) (*StaticCfg, error) { // set the socket time out in hours config.MongoDB.SocketTimeout *= time.Hour + // clean the import path + // remove tailing / when applicable + if strings.HasSuffix(config.Bro.ImportDirectory, string(os.PathSeparator)) { + config.Bro.ImportDirectory = config.Bro.ImportDirectory[:len(config.Bro.ImportDirectory)-len(string(config.Bro.ImportDirectory))] + } + // grab the version constants set by the build process config.Version = Version config.ExactVersion = ExactVersion diff --git a/database/meta.go b/database/meta.go index 45b81192..528e2b5c 100644 --- a/database/meta.go +++ b/database/meta.go @@ -5,7 +5,6 @@ import ( "sync" "github.com/blang/semver" - "github.com/ocmdev/rita/config" fpt "github.com/ocmdev/rita/parser/fileparsetypes" log "github.com/sirupsen/logrus" "gopkg.in/mgo.v2" @@ -27,7 +26,6 @@ type ( ID bson.ObjectId `bson:"_id,omitempty"` // Ident Name string `bson:"name"` // Top level name of the database Analyzed bool `bson:"analyzed"` // Has this database been analyzed - UsingDates bool `bson:"dates"` // Whether this db was created with dates enabled ImportVersion string `bson:"import_version"` // Rita version at import AnalyzeVersion string `bson:"analyze_version"` // Rita version at analyze } @@ -35,7 +33,6 @@ type ( // AddNewDB adds a new database to the DBMetaInfo table func (m *MetaDBHandle) AddNewDB(name string) error { - m.logDebug("AddNewDB", "entering") m.lock.Lock() defer m.lock.Unlock() ssn := m.res.DB.Session.Copy() @@ -45,7 +42,6 @@ func (m *MetaDBHandle) AddNewDB(name string) error { DBMetaInfo{ Name: name, Analyzed: false, - UsingDates: m.res.Config.R.Bro.SplitStrategy == config.SplitDate, ImportVersion: m.res.Config.S.Version, }, ) @@ -57,13 +53,11 @@ func (m *MetaDBHandle) AddNewDB(name string) error { return err } - m.logDebug("AddNewDB", "exiting") return nil } // DeleteDB removes a database managed by RITA func (m *MetaDBHandle) DeleteDB(name string) error { - m.logDebug("DeleteDB", "entering") m.lock.Lock() defer m.lock.Unlock() ssn := m.res.DB.Session.Copy() @@ -90,29 +84,17 @@ func (m *MetaDBHandle) DeleteDB(name string) error { if err != nil { return err } - if db.UsingDates { - date := name[len(name)-10:] - name = name[:len(name)-11] - _, err = ssn.DB(m.DB).C("files").RemoveAll( - bson.M{"database": name, "dates": date}, - ) - if err != nil { - return err - } - } else { - _, err = ssn.DB(m.DB).C("files").RemoveAll(bson.M{"database": name}) - if err != nil { - return err - } + + _, err = ssn.DB(m.DB).C("files").RemoveAll(bson.M{"database": name}) + if err != nil { + return err } - m.logDebug("DeleteDB", "exiting") return nil } // MarkDBAnalyzed marks a database as having been analyzed func (m *MetaDBHandle) MarkDBAnalyzed(name string, complete bool) error { - m.logDebug("MarkDBAnalyzed", "entering") m.lock.Lock() defer m.lock.Unlock() @@ -155,7 +137,6 @@ func (m *MetaDBHandle) MarkDBAnalyzed(name string, complete bool) error { }).Error("could not update database entry in meta") return err } - m.logDebug("MarkDBAnalyzed", "exiting") return nil } @@ -172,7 +153,6 @@ func (m *MetaDBHandle) GetDBMetaInfo(name string) (DBMetaInfo, error) { // GetDatabases returns a list of databases being tracked in metadb or an empty array on failure func (m *MetaDBHandle) GetDatabases() []string { - m.logDebug("GetDatabases", "entering") m.lock.Lock() defer m.lock.Unlock() @@ -186,7 +166,6 @@ func (m *MetaDBHandle) GetDatabases() []string { for iter.Next(&db) { results = append(results, db.Name) } - m.logDebug("GetDatabases", "exiting") return results } @@ -220,7 +199,6 @@ func (m *MetaDBHandle) CheckCompatibleAnalyze(targetDatabase string) (bool, erro // GetUnAnalyzedDatabases builds a list of database names which have yet to be analyzed func (m *MetaDBHandle) GetUnAnalyzedDatabases() []string { - m.logDebug("GetUnAnalyzedDatabases", "entering") m.lock.Lock() defer m.lock.Unlock() ssn := m.res.DB.Session.Copy() @@ -232,13 +210,11 @@ func (m *MetaDBHandle) GetUnAnalyzedDatabases() []string { for iter.Next(&cur) { results = append(results, cur.Name) } - m.logDebug("GetUnAnalyzedDatabases", "exiting") return results } // GetAnalyzedDatabases builds a list of database names which have been analyzed func (m *MetaDBHandle) GetAnalyzedDatabases() []string { - m.logDebug("GetUnAnalyzedDatabases", "entering") m.lock.Lock() defer m.lock.Unlock() ssn := m.res.DB.Session.Copy() @@ -250,7 +226,6 @@ func (m *MetaDBHandle) GetAnalyzedDatabases() []string { for iter.Next(&cur) { results = append(results, cur.Name) } - m.logDebug("GetUnAnalyzedDatabases", "exiting") return results } @@ -262,7 +237,6 @@ func (m *MetaDBHandle) GetAnalyzedDatabases() []string { // from the database, in the case of failure return a zero length list of files and generat a log // message. func (m *MetaDBHandle) GetFiles() ([]fpt.IndexedFile, error) { - m.logDebug("GetFiles", "entering") m.lock.Lock() defer m.lock.Unlock() var toReturn []fpt.IndexedFile @@ -278,13 +252,11 @@ func (m *MetaDBHandle) GetFiles() ([]fpt.IndexedFile, error) { }).Error("could not fetch files from meta database") return nil, err } - m.logDebug("GetFiles", "exiting") return toReturn, nil } //AddParsedFiles adds indexed files to the files the metaDB using the bulk API func (m *MetaDBHandle) AddParsedFiles(files []*fpt.IndexedFile) error { - m.logDebug("AddParsedFiles", "entering") m.lock.Lock() defer m.lock.Unlock() if len(files) == 0 { @@ -318,7 +290,6 @@ func (m *MetaDBHandle) AddParsedFiles(files []*fpt.IndexedFile) error { // isBuilt checks to see if a file table exists, as the existence of parsed files is prerequisite // to the existance of anything else. func (m *MetaDBHandle) isBuilt() bool { - m.logDebug("isBuilt", "entering") m.lock.Lock() defer m.lock.Unlock() ssn := m.res.DB.Session.Copy() @@ -338,14 +309,12 @@ func (m *MetaDBHandle) isBuilt() bool { } } - m.logDebug("isBuilt", "exiting") return false } // createMetaDB creates a new metadata database failure is not an option, // if this function fails it will bring down the system. func (m *MetaDBHandle) createMetaDB() { - m.logDebug("newMetaDBHandle", "entering") m.lock.Lock() defer m.lock.Unlock() @@ -400,14 +369,4 @@ func (m *MetaDBHandle) createMetaDB() { err = ssn.DB(m.DB).C(m.res.Config.T.Meta.DatabasesTable).EnsureIndex(idx) errchk(err) - m.logDebug("newMetaDBHandle", "exiting") -} - -// logDebug will simply output some state info -func (m *MetaDBHandle) logDebug(function, message string) { - m.res.Log.WithFields(log.Fields{ - "function": function, - "package": "database", - "module": "meta", - }).Debug(message) } diff --git a/etc/rita.yaml b/etc/rita.yaml index 49aa1369..db0ccc68 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -36,6 +36,14 @@ LogConfig: # The section Bro configures the bro ingestor Bro: + # Path to a top level directory of log files + ImportDirectory: /opt/bro/logs/ + + # Files directly in the ImportDirectory will be imported into a database + # given by DBRoot. Files in a subfolder of ImportDirectory will be imported + # into DBRoot-$SUBFOLDER_NAME. + DBRoot: "RITA" + # There needs to be one metadatabase per test. This database holds information # about the test and the files related to the test. If there are several # subnets mapped in DirectoryMap each will be handled separately and that diff --git a/parser/fileparsetypes/fileparsetypes.go b/parser/fileparsetypes/fileparsetypes.go index c33d8e24..8cc889a4 100644 --- a/parser/fileparsetypes/fileparsetypes.go +++ b/parser/fileparsetypes/fileparsetypes.go @@ -32,7 +32,6 @@ type IndexedFile struct { Hash string `bson:"hash"` TargetCollection string `bson:"collection"` TargetDatabase string `bson:"database"` - Dates []string `bson:"dates"` ParseTime time.Time `bson:"time_complete"` header *BroHeader broDataFactory func() pt.BroData diff --git a/parser/fsimporter.go b/parser/fsimporter.go index 3b99625a..9061abb5 100644 --- a/parser/fsimporter.go +++ b/parser/fsimporter.go @@ -5,7 +5,6 @@ import ( "io/ioutil" "os" "path" - "reflect" "strings" "sync" "time" @@ -13,7 +12,6 @@ import ( "github.com/ocmdev/rita/config" "github.com/ocmdev/rita/database" fpt "github.com/ocmdev/rita/parser/fileparsetypes" - "github.com/ocmdev/rita/parser/parsetypes" "github.com/ocmdev/rita/util" log "github.com/sirupsen/logrus" ) @@ -49,7 +47,7 @@ func (fs *FSImporter) Run(datastore Datastore) { fmt.Println("\t[-] Finding files to parse") //find all of the bro log paths - files := readDir(fs.res.Config.R.Bro.ImportDirectory, fs.res.Log) + files := readDir(fs.res.Config.S.Bro.ImportDirectory, fs.res.Log) //hash the files and get their stats indexedFiles := indexFiles(files, fs.indexingThreads, fs.res.Config, fs.res.Log) @@ -64,8 +62,7 @@ func (fs *FSImporter) Run(datastore Datastore) { indexedFiles = removeOldFilesFromIndex(indexedFiles, fs.res.MetaDB, fs.res.Log) - parseFiles(indexedFiles, fs.parseThreads, - fs.res.Config.R.Bro.SplitStrategy, datastore, fs.res.Log) + parseFiles(indexedFiles, fs.parseThreads, datastore, fs.res.Log) datastore.Flush() updateFilesIndex(indexedFiles, fs.res.MetaDB, fs.res.Log) @@ -154,9 +151,7 @@ func indexFiles(files []string, indexingThreads int, //threads to use to parse the files, whether or not to sort data by date, // a MogoDB datastore object to store the bro data in, and a logger to report //errors and parses the bro files line by line into the database. -//NOTE: side effect: this sets the dates field on the indexedFiles -func parseFiles(indexedFiles []*fpt.IndexedFile, parsingThreads int, - splitStrategy config.BroSplitStrategy, datastore Datastore, logger *log.Logger) { +func parseFiles(indexedFiles []*fpt.IndexedFile, parsingThreads int, datastore Datastore, logger *log.Logger) { //set up parallel parsing n := len(indexedFiles) parsingWG := new(sync.WaitGroup) @@ -199,25 +194,9 @@ func parseFiles(indexedFiles []*fpt.IndexedFile, parsingThreads int, ) if data != nil { - //set the dates this file is represeting - date := getDateForLogEntry(data, indexedFiles[j].GetFieldMap()) - dateFound := false - for _, parsedDate := range indexedFiles[j].Dates { - if parsedDate == date { - dateFound = true - break - } - } - if !dateFound { - indexedFiles[j].Dates = append(indexedFiles[j].Dates, date) - } - //figure out what database this line is heading for targetCollection := indexedFiles[j].TargetCollection targetDB := indexedFiles[j].TargetDatabase - if splitStrategy == config.SplitDate { - targetDB += "-" + date - } datastore.Store(&ImportedData{ BroData: data, @@ -281,9 +260,3 @@ func updateFilesIndex(indexedFiles []*fpt.IndexedFile, metaDatabase *database.Me logger.Error("Could not update the list of parsed files") } } - -func getDateForLogEntry(broData parsetypes.BroData, fieldMap fpt.BroHeaderIndexMap) string { - data := reflect.ValueOf(broData).Elem() - ts := time.Unix(data.Field(fieldMap["ts"]).Int(), 0) - return fmt.Sprintf("%d-%02d-%02d", ts.Year(), ts.Month(), ts.Day()) -} diff --git a/parser/indexedfile.go b/parser/indexedfile.go index 83d19657..c2c570e2 100644 --- a/parser/indexedfile.go +++ b/parser/indexedfile.go @@ -83,7 +83,7 @@ func newIndexedFile(filePath string, config *config.Config, return toReturn, errors.New("Could not find a target collection for file") } - toReturn.TargetDatabase = getTargetDatabase(filePath, &config.R.Bro) + toReturn.TargetDatabase = getTargetDatabase(filePath, &config.S.Bro) if toReturn.TargetDatabase == "" { fileHandle.Close() return toReturn, errors.New("Could not find a dataset for file") @@ -114,20 +114,21 @@ func getFileHash(fileHandle *os.File, fInfo os.FileInfo) (string, error) { //getTargetDatabase assigns a database to a log file based on the path, //and the bro config -func getTargetDatabase(filePath string, broConfig *config.BroRunningCfg) string { +func getTargetDatabase(filePath string, broConfig *config.BroStaticCfg) string { var targetDatabase bytes.Buffer - targetDatabase.WriteString(broConfig.TargetDatabase) + targetDatabase.WriteString(broConfig.DBRoot) //Append subfolders to target db - if broConfig.SplitStrategy == config.SplitSubfolder { - relativeStartIndex := len(broConfig.ImportDirectory) - pathSep := string(os.PathSeparator) - relativePath := filePath[relativeStartIndex+len(pathSep):] - pathPieces := strings.Split(relativePath, pathSep) - pathPieces = pathPieces[:len(pathPieces)-1] - for _, piece := range pathPieces { - targetDatabase.WriteString("-") - targetDatabase.WriteString(piece) - } + relativeStartIndex := len(broConfig.ImportDirectory) + pathSep := string(os.PathSeparator) + relativePath := filePath[relativeStartIndex+len(pathSep):] + + //This routine uses Split rather than substring (0, index of path sep) + //because we may wish to add all the subdirectories to the db prefix + pathPieces := strings.Split(relativePath, pathSep) + //if there is more than just the file name + if len(pathPieces) > 1 { + targetDatabase.WriteString("-") + targetDatabase.WriteString(pathPieces[0]) } return targetDatabase.String() } From fc08377f6bcd68fd13a04e16456c45b0163b482e Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Fri, 9 Feb 2018 22:09:44 -0600 Subject: [PATCH 085/117] Readme and config file updates --- Readme.md | 44 +++++++++++--------------------------------- etc/rita.yaml | 12 +----------- 2 files changed, 12 insertions(+), 44 deletions(-) diff --git a/Readme.md b/Readme.md index 52b50c72..ef353634 100644 --- a/Readme.md +++ b/Readme.md @@ -22,7 +22,6 @@ Additional functionality is being developed and will be included soon. * Clone the package: `git clone https://github.com/ocmdev/rita.git` * Change into the source directory: `cd rita` -* Make the installer executable: `chmod +x install.sh` * Run the installer: `sudo ./install.sh` * Source your .bashrc (the installer added RITA to the PATH): `source ~/.bashrc` * Start MongoDB: `sudo service mongod start` @@ -36,7 +35,7 @@ To install each component of RITA by hand, [check out the instructions in the wi ### Configuration File RITA contains a yaml format configuration file. -You can specify the location for the configuration file with the **-c** command line flag. If not specified, RITA will first look for the configuration in **~/.rita/config.yaml** then **/etc/rita/config.yaml**. +You can specify the location for the configuration file with the **-c** command line flag. If not specified, RITA will look for the configuration in **/etc/rita/config.yaml**. ### API Keys @@ -65,50 +64,29 @@ To obtain an API key: * ```bro -r pcap_to_log.pcap local "Site::local_nets += { 192.168.0.0/24 }" "Log::default_rotation_interval = 1 day"``` * **Option 2**: Install Bro and let it monitor an interface directly [[instructions](https://www.bro.org/sphinx/quickstart/)] - * You may wish to [compile Bro from source](https://www.bro.org/sphinx/install/install.html) for performance reasons + * You may wish to [compile Bro from source](https://www.bro.org/sphinx/install/install.html) for performance reasons. [This script](https://github.com/ocmdev/bro-install) can help automate the process. * The automated installer for RITA installs pre-compiled Bro binaries #### Importing Data Into RITA - * After installing, `rita` should be in your `PATH` + * After installing, `rita` should be in your `PATH` and the config file should be set up ready to go. Once your Bro install has collected some logs (Bro will normally rotate logs on the hour) you can run `rita import`. Alternatively, you can manually import existing logs using one of the following options: * **Option 1**: Import directly from the terminal (one time import) - * `rita import -i path/to/your/bro_logs/ -d dataset_name` - * **Option 2**: Set up the Bro configuration in `~/.rita/config.yaml` for repeated imports - * Set `LogPath` to the `path/to/your/bro_logs` - * Set `DBPrefix` to an identifier common to your set of logs - * Set up the `DirectoryMap` - * Logs found in folders which match the substring on the left are imported - into the dataset on the right - * Example - * Say you have two sets of logs to analyze - * `/share/bro_logs/networkA` - * `/share/bro_logs/networkB` - * A correct Bro config section would look like - ```yaml - Bro: - LogPath: /share/bro_logs/ - DBPrefix: MyCompany_ - DirectoryMap: - networkA: A - networkB: B - ``` - * This would import `/share/brologs/networkA` into `MyCompany_A` and - `/share/brologs/networkB` into `MyCompany_B` - + * `rita import path/to/your/bro_logs/ database_name` + * **Option 2**: Set up the Bro configuration in `/etc/rita/config.yaml` for repeated imports + * Set `ImportDirectory` to the `path/to/your/bro_logs`. The default is `/opt/bro/logs` + * Set `DBRoot` to an identifier common to your set of logs #### Analyzing Data With RITA * **Option 1**: Analyze one dataset - * `rita analyze -d dataset_name` - * Ex: `rita analyze -d MyCompany_A` + * `rita analyze dataset_name` + * Ex: `rita analyze MyCompany_A` * **Option 2**: Analyze all imported datasets * `rita analyze` #### Examining Data With RITA * Use the **show-X** commands * `-H` displays human readable data - * `rita show-beacons -d dataset_name -H` - * `rita show-blacklisted -d dataset_name -H` - -**A link to a video tutorial will be added soon!** + * `rita show-beacons dataset_name -H` + * `rita show-blacklisted dataset_name -H` ### Getting help Head over to [OFTC and join #ocmdev](https://webchat.oftc.net/?channels=ocmdev) for any questions you may have. diff --git a/etc/rita.yaml b/etc/rita.yaml index db0ccc68..8af0498c 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -44,19 +44,9 @@ Bro: # into DBRoot-$SUBFOLDER_NAME. DBRoot: "RITA" - # There needs to be one metadatabase per test. This database holds information - # about the test and the files related to the test. If there are several - # subnets mapped in DirectoryMap each will be handled separately and that - # separation is handled by the metadatabase. + # This database holds information about the procesed files and databases. MetaDB: MetaDatabase - # If use dates is true the logs will be split into databases by date using the - # current system's timezone. This is best for if you have multiple days worth - # of log files in the logpath and wish to treat each day as a separate test. - # 24 hours worth of data is the ideal for analysis, and using dates will ensure - # that tests are broken into 24 hour periods on midnight in the current timezone. - UseDates: true - # The number of records shipped off to MongoDB at a time. Increasing # the size of the buffer will improve import timings but will leave more # records unimported if there is an error From eebf935fdadbff7ad00c587f7cd4c10922d285c8 Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Mon, 12 Feb 2018 12:12:59 -0600 Subject: [PATCH 086/117] Cleaning filepaths before use --- commands/import.go | 7 ------- config/static.go | 11 +++++------ 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/commands/import.go b/commands/import.go index 33316e23..96c49aed 100644 --- a/commands/import.go +++ b/commands/import.go @@ -2,9 +2,7 @@ package commands import ( "fmt" - "os" "path/filepath" - "strings" "github.com/ocmdev/rita/database" "github.com/ocmdev/rita/parser" @@ -54,11 +52,6 @@ func doImport(c *cli.Context) error { return cli.NewExitError(err.Error(), -1) } - //Remove tailing / when applicable - if strings.HasSuffix(importDir, string(os.PathSeparator)) { - importDir = importDir[:len(importDir)-len(string(os.PathSeparator))] - } - res.Config.S.Bro.ImportDirectory = importDir res.Config.S.Bro.DBRoot = targetDatabase } diff --git a/config/static.go b/config/static.go index cff02cad..49e57e46 100644 --- a/config/static.go +++ b/config/static.go @@ -5,8 +5,8 @@ import ( "io/ioutil" "os" "reflect" - "strings" "time" + "path/filepath" yaml "gopkg.in/yaml.v2" ) @@ -116,11 +116,10 @@ func loadStaticConfig(cfgPath string) (*StaticCfg, error) { // set the socket time out in hours config.MongoDB.SocketTimeout *= time.Hour - // clean the import path - // remove tailing / when applicable - if strings.HasSuffix(config.Bro.ImportDirectory, string(os.PathSeparator)) { - config.Bro.ImportDirectory = config.Bro.ImportDirectory[:len(config.Bro.ImportDirectory)-len(string(config.Bro.ImportDirectory))] - } + // clean all filepaths + config.Log.RitaLogPath = filepath.Clean(config.Log.RitaLogPath) + config.Blacklisted.SafeBrowsing.Database = filepath.Clean(config.Blacklisted.SafeBrowsing.Database) + config.Bro.ImportDirectory = filepath.Clean(config.Bro.ImportDirectory) // grab the version constants set by the build process config.Version = Version From c55147dd29272a6c3fdbe01a824bc188b025a6e3 Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Wed, 14 Feb 2018 13:44:37 -0600 Subject: [PATCH 087/117] Fixed _err typos --- install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install.sh b/install.sh index 372ccc25..bb182294 100755 --- a/install.sh +++ b/install.sh @@ -134,7 +134,7 @@ __setPkgMgr() { fi if [ $_PKG_MGR -eq 3 ]; then echo "Unsupported package manager" - _err + __err fi } @@ -142,7 +142,7 @@ __setOS() { _OS="$(lsb_release -is)" if [ "$_OS" != "Ubuntu" -a "$_OS" != "CentOS" ]; then echo "Unsupported operating system" - _err + __err fi } From 02b69e10c80518e7c9086fe75bd45e5f904d093e Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Sat, 17 Feb 2018 03:28:46 -0600 Subject: [PATCH 088/117] Update to dockerfile --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 36b1de57..b27689cd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,7 +19,7 @@ RUN make FROM alpine:latest WORKDIR /root -RUN mkdir .rita -COPY --from=rita-builder /go/src/github.com/ocmdev/rita/etc/tables.yaml .rita/ +RUN mkdir /etc/rita +COPY --from=rita-builder /go/src/github.com/ocmdev/rita/etc/tables.yaml /etc/rita/ COPY --from=rita-builder /go/src/github.com/ocmdev/rita/rita . ENTRYPOINT ["./rita"] From 05e5cd9f7576c573a07ccb3d15272ab1ec64c97d Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 8 Mar 2018 14:03:50 -0700 Subject: [PATCH 089/117] ActiveCM rename --- Contributing.md | 16 +++++------ Dockerfile | 6 ++-- Gopkg.lock | 34 +++++++++++++++++------ Gopkg.toml | 6 ++-- Makefile | 6 ++-- Readme.md | 12 ++++---- analysis/beacon/beacon.go | 10 +++---- analysis/beacon/beacon_test.go | 4 +-- analysis/blacklist/blacklist.go | 14 +++++----- analysis/blacklist/hostnames.go | 12 ++++---- analysis/blacklist/ips.go | 10 +++---- analysis/blacklist/urls.go | 10 +++---- analysis/crossref/beaconing.go | 6 ++-- analysis/crossref/blacklist-dest-ips.go | 6 ++-- analysis/crossref/blacklist-source-ips.go | 6 ++-- analysis/crossref/crossref.go | 4 +-- analysis/crossref/scanning.go | 4 +-- analysis/dns/explodedDNS.go | 2 +- analysis/dns/hostnames.go | 8 +++--- analysis/scanning/scan.go | 4 +-- analysis/structure/hosts.go | 4 +-- analysis/structure/ip.go | 4 +-- analysis/structure/uconn.go | 4 +-- analysis/urls/url.go | 4 +-- analysis/useragent/useragent.go | 4 +-- commands/analyze.go | 20 ++++++------- commands/delete-database.go | 2 +- commands/import.go | 6 ++-- commands/reporting.go | 4 +-- commands/reset-analysis.go | 2 +- commands/show-beacons.go | 6 ++-- commands/show-bl-hostname.go | 8 +++--- commands/show-bl-ip.go | 6 ++-- commands/show-bl-url.go | 8 +++--- commands/show-databases.go | 2 +- commands/show-explodedDns.go | 4 +-- commands/show-long-connections.go | 4 +-- commands/show-scans.go | 4 +-- commands/show-urls.go | 4 +-- commands/show-user-agents.go | 4 +-- commands/test-config.go | 2 +- config/running.go | 2 +- database/meta.go | 2 +- database/mock.go | 2 +- database/resources.go | 8 +++--- datatypes/crossref/crossref.go | 2 +- install.sh | 8 +++--- parser/datastore.go | 2 +- parser/fileparser.go | 4 +-- parser/fileparsetypes/fileparsetypes.go | 2 +- parser/fsimporter.go | 8 +++--- parser/indexedfile.go | 6 ++-- parser/mongodatastore.go | 2 +- parser/parsetypes/conn.go | 2 +- parser/parsetypes/dns.go | 2 +- parser/parsetypes/http.go | 2 +- parser/parsetypes/parsetypes.go | 2 +- reporting/report-beacons.go | 8 +++--- reporting/report-bl-dest-ips.go | 8 +++--- reporting/report-bl-hostnames.go | 10 +++---- reporting/report-bl-source-ips.go | 8 +++--- reporting/report-bl-urls.go | 10 +++---- reporting/report-explodedDns.go | 6 ++-- reporting/report-long-connections.go | 6 ++-- reporting/report-scans.go | 6 ++-- reporting/report-urls.go | 6 ++-- reporting/report-useragents.go | 6 ++-- reporting/report.go | 6 ++-- reporting/templates/templates.go | 10 +++---- rita.go | 4 +-- 70 files changed, 226 insertions(+), 210 deletions(-) diff --git a/Contributing.md b/Contributing.md index fc91f559..98500f4b 100644 --- a/Contributing.md +++ b/Contributing.md @@ -13,7 +13,7 @@ there. Just be sure to do the following: * Check if the bug is already accounted for on the -[Github issue tracker](https://github.com/ocmdev/rita/issues) +[Github issue tracker](https://github.com/activecm/rita/issues) * If an issue already exists, add the following info in a comment * If not, create an issue, and include the following info * Give very specific descriptions of how to reproduce the bug @@ -41,22 +41,22 @@ There are several ways to contribute code to the RITA project. * When you're ready to test code run `go test ./...` from the root directory of the project * Feel free to refactor code to increase our ability to test it - * Join our [IRC](https://github.com/ocmdev/rita/wiki/RITA-Gittiquette) to + * Join our [IRC](https://github.com/activecm/rita/wiki/RITA-Gittiquette) to learn more * Add new features: * If you would like to become involved in the development effort, please hop - on our [OFTC channel at #ocmdev](https://webchat.oftc.net/?channels=ocmdev) + on our [OFTC channel at #activecm](https://webchat.oftc.net/?channels=activecm) and chat about what is currently being worked on. All of these tasks ultimately culminate in a pull request being issued, reviewed, and merged. When interacting with RITA through Git please check out the -[RITA Gittiquette page](https://github.com/ocmdev/rita/wiki/RITA-Gittiquette). +[RITA Gittiquette page](https://github.com/activecm/rita/wiki/RITA-Gittiquette). Go limits the ways you may use Git with an open source project such as RITA, so it is important that you understand the procedures laid out here. ### Gittiquette Summary -* We currently have a dev and master branch on OCMDev +* We currently have a dev and master branch on activecm * Master is our tagged release branch * Dev is our development and staging branch * As more users come to rely on RITA, we will introduce a release-testing branch @@ -72,9 +72,9 @@ your feature branch off of it * Submit a pull request on Github ### Switching to the `dev` Branch -* Install RITA using either the [installer](https://raw.githubusercontent.com/ocmdev/rita/master/install.sh) or -[manually](https://github.com/ocmdev/rita/wiki/Installation) -* `cd $GOPATH/src/github.com/ocmdev/rita` +* Install RITA using either the [installer](https://raw.githubusercontent.com/activecm/rita/master/install.sh) or +[manually](https://github.com/activecm/rita/wiki/Installation) +* `cd $GOPATH/src/github.com/activecm/rita` * `git checkout dev` * `make install` * Configure a config file for the dev branch diff --git a/Dockerfile b/Dockerfile index b27689cd..561e8c40 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,7 +12,7 @@ FROM golang:1.8-alpine as rita-builder RUN apk update && apk upgrade && apk add --no-cache git make ca-certificates wget RUN wget -q -O /go/bin/dep https://github.com/golang/dep/releases/download/v0.3.2/dep-linux-amd64 && chmod +x /go/bin/dep -WORKDIR /go/src/github.com/ocmdev/rita +WORKDIR /go/src/github.com/activecm/rita COPY . . RUN make @@ -20,6 +20,6 @@ FROM alpine:latest WORKDIR /root RUN mkdir /etc/rita -COPY --from=rita-builder /go/src/github.com/ocmdev/rita/etc/tables.yaml /etc/rita/ -COPY --from=rita-builder /go/src/github.com/ocmdev/rita/rita . +COPY --from=rita-builder /go/src/github.com/activecm/rita/etc/tables.yaml /etc/rita/ +COPY --from=rita-builder /go/src/github.com/activecm/rita/rita . ENTRYPOINT ["./rita"] diff --git a/Gopkg.lock b/Gopkg.lock index 6e032996..51a17b95 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -1,6 +1,30 @@ # This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. +[[projects]] + name = "github.com/activecm/mgorus" + packages = ["."] + revision = "544a63f222470b2feb3b1142f7edb9a843f5120d" + version = "v0.1.0" + +[[projects]] + name = "github.com/activecm/mgosec" + packages = ["."] + revision = "af42afa3ec74143661a863bdae62d36a93c6eca7" + version = "v0.1.0" + +[[projects]] + branch = "master" + name = "github.com/activecm/rita-bl" + packages = [ + ".", + "database", + "list", + "sources/lists", + "sources/rpc" + ] + revision = "070299442c8c467e9501907b53ba4646be71255b" + [[projects]] name = "github.com/blang/semver" packages = ["."] @@ -31,12 +55,6 @@ revision = "9e777a8366cce605130a531d2cd6363d07ad7317" version = "v0.0.2" -[[projects]] - name = "github.com/ocmdev/mgorus" - packages = ["."] - revision = "544a63f222470b2feb3b1142f7edb9a843f5120d" - version = "v0.1.0" - [[projects]] name = "github.com/ocmdev/mgosec" packages = ["."] @@ -47,10 +65,8 @@ branch = "master" name = "github.com/ocmdev/rita-bl" packages = [ - ".", "database", "list", - "sources/lists", "sources/lists/util", "sources/rpc" ] @@ -149,6 +165,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "faeb74ddc998410e81792434cdede82704ed7479eb84d31e1cc20b2db67a8901" + inputs-digest = "2cd8cd4548384c70c9d6a0facc232a8a7ed8573751f8cbea14c27b2153d38f8c" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 44227bbb..57779c5f 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -21,16 +21,16 @@ [[constraint]] - name = "github.com/ocmdev/mgorus" + name = "github.com/activecm/mgorus" version = "0.1.0" [[constraint]] - name = "github.com/ocmdev/mgosec" + name = "github.com/activecm/mgosec" version = "0.1.0" [[constraint]] branch = "master" - name = "github.com/ocmdev/rita-bl" + name = "github.com/activecm/rita-bl" [[constraint]] branch = "master" diff --git a/Makefile b/Makefile index e2ebf69d..e83b93bc 100644 --- a/Makefile +++ b/Makefile @@ -3,15 +3,15 @@ EXACT_VERSION := $(shell git describe --always --long --dirty --tags) GOPATH := $(GOPATH) BINARY := rita -LDFLAGS=-ldflags="-X github.com/ocmdev/rita/config.Version=${VERSION} -X github.com/ocmdev/rita/config.ExactVersion=${EXACT_VERSION}" +LDFLAGS=-ldflags="-X github.com/activecm/rita/config.Version=${VERSION} -X github.com/activecm/rita/config.ExactVersion=${EXACT_VERSION}" default: - dep ensure +# dep ensure go build ${LDFLAGS} # Having issues with 'go install' + LDFLAGS using sudo and the # install script. This is a workaround. install: - dep ensure +# dep ensure go build ${LDFLAGS} -o ${GOPATH}/bin/${BINARY} diff --git a/Readme.md b/Readme.md index ef353634..e0e1c2d4 100644 --- a/Readme.md +++ b/Readme.md @@ -20,17 +20,17 @@ Additional functionality is being developed and will be included soon. **The automatic installer is officially supported on Ubuntu 14.04, 16.04 LTS, Security Onion, and CentOS 7** * Clone the package: -`git clone https://github.com/ocmdev/rita.git` +`git clone https://github.com/activecm/rita.git` * Change into the source directory: `cd rita` * Run the installer: `sudo ./install.sh` * Source your .bashrc (the installer added RITA to the PATH): `source ~/.bashrc` * Start MongoDB: `sudo service mongod start` ### Docker Installation -RITA is available as a Docker image at ocmdev/rita, [check out the instructions in the wiki](https://github.com/ocmdev/rita/wiki/Docker-Installation). +RITA is available as a Docker image at activecm/rita, [check out the instructions in the wiki](https://github.com/activecm/rita/wiki/Docker-Installation). ### Manual Installation -To install each component of RITA by hand, [check out the instructions in the wiki](https://github.com/ocmdev/rita/wiki/Installation). +To install each component of RITA by hand, [check out the instructions in the wiki](https://github.com/activecm/rita/wiki/Installation). ### Configuration File RITA contains a yaml format configuration file. @@ -64,7 +64,7 @@ To obtain an API key: * ```bro -r pcap_to_log.pcap local "Site::local_nets += { 192.168.0.0/24 }" "Log::default_rotation_interval = 1 day"``` * **Option 2**: Install Bro and let it monitor an interface directly [[instructions](https://www.bro.org/sphinx/quickstart/)] - * You may wish to [compile Bro from source](https://www.bro.org/sphinx/install/install.html) for performance reasons. [This script](https://github.com/ocmdev/bro-install) can help automate the process. + * You may wish to [compile Bro from source](https://www.bro.org/sphinx/install/install.html) for performance reasons. [This script](https://github.com/activecm/bro-install) can help automate the process. * The automated installer for RITA installs pre-compiled Bro binaries #### Importing Data Into RITA @@ -89,10 +89,10 @@ To obtain an API key: * `rita show-blacklisted dataset_name -H` ### Getting help -Head over to [OFTC and join #ocmdev](https://webchat.oftc.net/?channels=ocmdev) for any questions you may have. +Head over to [OFTC and join #activecm](https://webchat.oftc.net/?channels=activecm) for any questions you may have. ### Contributing to RITA -To contribute to RITA visit our [Contributing Guide](https://github.com/ocmdev/rita/blob/master/Contributing.md) +To contribute to RITA visit our [Contributing Guide](https://github.com/activecm/rita/blob/master/Contributing.md) ### License GNU GPL V3 diff --git a/analysis/beacon/beacon.go b/analysis/beacon/beacon.go index 32791aa2..adb6c040 100644 --- a/analysis/beacon/beacon.go +++ b/analysis/beacon/beacon.go @@ -10,11 +10,11 @@ import ( mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" - "github.com/ocmdev/rita/database" - dataBeacon "github.com/ocmdev/rita/datatypes/beacon" - "github.com/ocmdev/rita/datatypes/data" - "github.com/ocmdev/rita/datatypes/structure" - "github.com/ocmdev/rita/util" + "github.com/activecm/rita/database" + dataBeacon "github.com/activecm/rita/datatypes/beacon" + "github.com/activecm/rita/datatypes/data" + "github.com/activecm/rita/datatypes/structure" + "github.com/activecm/rita/util" log "github.com/sirupsen/logrus" ) diff --git a/analysis/beacon/beacon_test.go b/analysis/beacon/beacon_test.go index 090d786a..d36155a1 100644 --- a/analysis/beacon/beacon_test.go +++ b/analysis/beacon/beacon_test.go @@ -5,8 +5,8 @@ import ( "reflect" "testing" - "github.com/ocmdev/rita/database" - datatype_beacon "github.com/ocmdev/rita/datatypes/beacon" + "github.com/activecm/rita/database" + datatype_beacon "github.com/activecm/rita/datatypes/beacon" log "github.com/sirupsen/logrus" ) diff --git a/analysis/blacklist/blacklist.go b/analysis/blacklist/blacklist.go index 19ef1265..bb63e46c 100644 --- a/analysis/blacklist/blacklist.go +++ b/analysis/blacklist/blacklist.go @@ -6,13 +6,13 @@ import ( "net/http" "os" - bl "github.com/ocmdev/rita-bl" - blDB "github.com/ocmdev/rita-bl/database" - "github.com/ocmdev/rita-bl/list" - "github.com/ocmdev/rita-bl/sources/lists" - "github.com/ocmdev/rita-bl/sources/rpc" - "github.com/ocmdev/rita/config" - "github.com/ocmdev/rita/database" + bl "github.com/activecm/rita-bl" + blDB "github.com/activecm/rita-bl/database" + "github.com/activecm/rita-bl/list" + "github.com/activecm/rita-bl/sources/lists" + "github.com/activecm/rita-bl/sources/rpc" + "github.com/activecm/rita/config" + "github.com/activecm/rita/database" log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" ) diff --git a/analysis/blacklist/hostnames.go b/analysis/blacklist/hostnames.go index db059552..552cf66f 100644 --- a/analysis/blacklist/hostnames.go +++ b/analysis/blacklist/hostnames.go @@ -3,13 +3,13 @@ package blacklist import ( "unsafe" - "github.com/ocmdev/rita-bl/list" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/dns" - "github.com/ocmdev/rita/datatypes/structure" + "github.com/activecm/rita-bl/list" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/dns" + "github.com/activecm/rita/datatypes/structure" - bl "github.com/ocmdev/rita-bl" - data "github.com/ocmdev/rita/datatypes/blacklist" + bl "github.com/activecm/rita-bl" + data "github.com/activecm/rita/datatypes/blacklist" log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" diff --git a/analysis/blacklist/ips.go b/analysis/blacklist/ips.go index 396b2c3f..27fe63fc 100644 --- a/analysis/blacklist/ips.go +++ b/analysis/blacklist/ips.go @@ -3,11 +3,11 @@ package blacklist import ( "unsafe" - bl "github.com/ocmdev/rita-bl" - "github.com/ocmdev/rita-bl/list" - "github.com/ocmdev/rita/database" - data "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" + bl "github.com/activecm/rita-bl" + "github.com/activecm/rita-bl/list" + "github.com/activecm/rita/database" + data "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" diff --git a/analysis/blacklist/urls.go b/analysis/blacklist/urls.go index 382f97e3..76b4b3b0 100644 --- a/analysis/blacklist/urls.go +++ b/analysis/blacklist/urls.go @@ -4,12 +4,12 @@ import ( "errors" "strings" - "github.com/ocmdev/rita-bl/list" + "github.com/activecm/rita-bl/list" - bl "github.com/ocmdev/rita-bl" - "github.com/ocmdev/rita/database" - data "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/urls" + bl "github.com/activecm/rita-bl" + "github.com/activecm/rita/database" + data "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/urls" log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" diff --git a/analysis/crossref/beaconing.go b/analysis/crossref/beaconing.go index 697c744c..65eab76a 100644 --- a/analysis/crossref/beaconing.go +++ b/analysis/crossref/beaconing.go @@ -1,9 +1,9 @@ package crossref import ( - "github.com/ocmdev/rita/analysis/beacon" - "github.com/ocmdev/rita/database" - dataBeacon "github.com/ocmdev/rita/datatypes/beacon" + "github.com/activecm/rita/analysis/beacon" + "github.com/activecm/rita/database" + dataBeacon "github.com/activecm/rita/datatypes/beacon" ) type ( diff --git a/analysis/crossref/blacklist-dest-ips.go b/analysis/crossref/blacklist-dest-ips.go index 11e379aa..4dfebf71 100644 --- a/analysis/crossref/blacklist-dest-ips.go +++ b/analysis/crossref/blacklist-dest-ips.go @@ -1,9 +1,9 @@ package crossref import ( - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" "gopkg.in/mgo.v2/bson" ) diff --git a/analysis/crossref/blacklist-source-ips.go b/analysis/crossref/blacklist-source-ips.go index 71b60e21..c35dafc4 100644 --- a/analysis/crossref/blacklist-source-ips.go +++ b/analysis/crossref/blacklist-source-ips.go @@ -1,9 +1,9 @@ package crossref import ( - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" "gopkg.in/mgo.v2/bson" ) diff --git a/analysis/crossref/crossref.go b/analysis/crossref/crossref.go index b46b6429..0752caac 100644 --- a/analysis/crossref/crossref.go +++ b/analysis/crossref/crossref.go @@ -6,8 +6,8 @@ import ( mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" - "github.com/ocmdev/rita/database" - dataXRef "github.com/ocmdev/rita/datatypes/crossref" + "github.com/activecm/rita/database" + dataXRef "github.com/activecm/rita/datatypes/crossref" ) // getXRefSelectors is a place to add new selectors to the crossref module diff --git a/analysis/crossref/scanning.go b/analysis/crossref/scanning.go index 2fc0f6ba..b831f0dd 100644 --- a/analysis/crossref/scanning.go +++ b/analysis/crossref/scanning.go @@ -1,8 +1,8 @@ package crossref import ( - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/scanning" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/scanning" ) type ( diff --git a/analysis/dns/explodedDNS.go b/analysis/dns/explodedDNS.go index c5ad1347..e0469f7d 100644 --- a/analysis/dns/explodedDNS.go +++ b/analysis/dns/explodedDNS.go @@ -1,7 +1,7 @@ package dns import ( - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/database" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) diff --git a/analysis/dns/hostnames.go b/analysis/dns/hostnames.go index 3925e96a..32e5ad00 100644 --- a/analysis/dns/hostnames.go +++ b/analysis/dns/hostnames.go @@ -1,10 +1,10 @@ package dns import ( - "github.com/ocmdev/rita/config" - "github.com/ocmdev/rita/database" - dnsTypes "github.com/ocmdev/rita/datatypes/dns" - "github.com/ocmdev/rita/util" + "github.com/activecm/rita/config" + "github.com/activecm/rita/database" + dnsTypes "github.com/activecm/rita/datatypes/dns" + "github.com/activecm/rita/util" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) diff --git a/analysis/scanning/scan.go b/analysis/scanning/scan.go index f6492543..779b6dc4 100644 --- a/analysis/scanning/scan.go +++ b/analysis/scanning/scan.go @@ -1,8 +1,8 @@ package scanning import ( - "github.com/ocmdev/rita/config" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/config" + "github.com/activecm/rita/database" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" diff --git a/analysis/structure/hosts.go b/analysis/structure/hosts.go index 36c7e9cc..4e45ce13 100644 --- a/analysis/structure/hosts.go +++ b/analysis/structure/hosts.go @@ -1,8 +1,8 @@ package structure import ( - "github.com/ocmdev/rita/config" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/config" + "github.com/activecm/rita/database" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) diff --git a/analysis/structure/ip.go b/analysis/structure/ip.go index 6eea97d7..98ba7add 100644 --- a/analysis/structure/ip.go +++ b/analysis/structure/ip.go @@ -4,8 +4,8 @@ import ( "encoding/binary" "net" - "github.com/ocmdev/rita/database" - structureTypes "github.com/ocmdev/rita/datatypes/structure" + "github.com/activecm/rita/database" + structureTypes "github.com/activecm/rita/datatypes/structure" log "github.com/sirupsen/logrus" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" diff --git a/analysis/structure/uconn.go b/analysis/structure/uconn.go index 9a725b2e..54e94499 100644 --- a/analysis/structure/uconn.go +++ b/analysis/structure/uconn.go @@ -1,8 +1,8 @@ package structure import ( - "github.com/ocmdev/rita/config" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/config" + "github.com/activecm/rita/database" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" diff --git a/analysis/urls/url.go b/analysis/urls/url.go index 76d5fdf1..788a630b 100644 --- a/analysis/urls/url.go +++ b/analysis/urls/url.go @@ -1,8 +1,8 @@ package urls import ( - "github.com/ocmdev/rita/config" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/config" + "github.com/activecm/rita/database" "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" diff --git a/analysis/useragent/useragent.go b/analysis/useragent/useragent.go index 72d3d2b4..528273c0 100644 --- a/analysis/useragent/useragent.go +++ b/analysis/useragent/useragent.go @@ -1,8 +1,8 @@ package useragent import ( - "github.com/ocmdev/rita/config" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/config" + "github.com/activecm/rita/database" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" diff --git a/commands/analyze.go b/commands/analyze.go index 23893788..1d853b26 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -5,16 +5,16 @@ import ( "time" "github.com/blang/semver" - "github.com/ocmdev/rita/analysis/beacon" - "github.com/ocmdev/rita/analysis/blacklist" - "github.com/ocmdev/rita/analysis/crossref" - "github.com/ocmdev/rita/analysis/dns" - "github.com/ocmdev/rita/analysis/scanning" - "github.com/ocmdev/rita/analysis/structure" - "github.com/ocmdev/rita/analysis/urls" - "github.com/ocmdev/rita/analysis/useragent" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/util" + "github.com/activecm/rita/analysis/beacon" + "github.com/activecm/rita/analysis/blacklist" + "github.com/activecm/rita/analysis/crossref" + "github.com/activecm/rita/analysis/dns" + "github.com/activecm/rita/analysis/scanning" + "github.com/activecm/rita/analysis/structure" + "github.com/activecm/rita/analysis/urls" + "github.com/activecm/rita/analysis/useragent" + "github.com/activecm/rita/database" + "github.com/activecm/rita/util" log "github.com/sirupsen/logrus" "github.com/urfave/cli" ) diff --git a/commands/delete-database.go b/commands/delete-database.go index 0b2b49ac..e24fd88b 100644 --- a/commands/delete-database.go +++ b/commands/delete-database.go @@ -7,7 +7,7 @@ import ( "os" "strings" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/database" "github.com/urfave/cli" ) diff --git a/commands/import.go b/commands/import.go index 96c49aed..95ac99bd 100644 --- a/commands/import.go +++ b/commands/import.go @@ -4,9 +4,9 @@ import ( "fmt" "path/filepath" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/parser" - "github.com/ocmdev/rita/util" + "github.com/activecm/rita/database" + "github.com/activecm/rita/parser" + "github.com/activecm/rita/util" "github.com/urfave/cli" ) diff --git a/commands/reporting.go b/commands/reporting.go index 4b2db372..91536c32 100644 --- a/commands/reporting.go +++ b/commands/reporting.go @@ -1,8 +1,8 @@ package commands import ( - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/reporting" + "github.com/activecm/rita/database" + "github.com/activecm/rita/reporting" "github.com/urfave/cli" ) diff --git a/commands/reset-analysis.go b/commands/reset-analysis.go index 70d4478b..e489d648 100644 --- a/commands/reset-analysis.go +++ b/commands/reset-analysis.go @@ -7,7 +7,7 @@ import ( "os" "strings" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/database" "github.com/urfave/cli" ) diff --git a/commands/show-beacons.go b/commands/show-beacons.go index 829c884c..15263dc2 100644 --- a/commands/show-beacons.go +++ b/commands/show-beacons.go @@ -4,9 +4,9 @@ import ( "encoding/csv" "os" - "github.com/ocmdev/rita/analysis/beacon" - "github.com/ocmdev/rita/database" - beaconData "github.com/ocmdev/rita/datatypes/beacon" + "github.com/activecm/rita/analysis/beacon" + "github.com/activecm/rita/database" + beaconData "github.com/activecm/rita/datatypes/beacon" "github.com/olekukonko/tablewriter" "github.com/urfave/cli" ) diff --git a/commands/show-bl-hostname.go b/commands/show-bl-hostname.go index 7c0d3258..25d0798a 100644 --- a/commands/show-bl-hostname.go +++ b/commands/show-bl-hostname.go @@ -7,10 +7,10 @@ import ( "strconv" "strings" - "github.com/ocmdev/rita/analysis/dns" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" + "github.com/activecm/rita/analysis/dns" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" "github.com/olekukonko/tablewriter" "github.com/urfave/cli" "gopkg.in/mgo.v2/bson" diff --git a/commands/show-bl-ip.go b/commands/show-bl-ip.go index b356c043..99e0acda 100644 --- a/commands/show-bl-ip.go +++ b/commands/show-bl-ip.go @@ -7,9 +7,9 @@ import ( "strconv" "strings" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" "github.com/olekukonko/tablewriter" "github.com/urfave/cli" "gopkg.in/mgo.v2/bson" diff --git a/commands/show-bl-url.go b/commands/show-bl-url.go index 3dc17747..95fe0fa6 100644 --- a/commands/show-bl-url.go +++ b/commands/show-bl-url.go @@ -7,10 +7,10 @@ import ( "strconv" "strings" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" - "github.com/ocmdev/rita/datatypes/urls" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" + "github.com/activecm/rita/datatypes/urls" "github.com/olekukonko/tablewriter" "github.com/urfave/cli" "gopkg.in/mgo.v2/bson" diff --git a/commands/show-databases.go b/commands/show-databases.go index 5a7dc277..a0269a32 100644 --- a/commands/show-databases.go +++ b/commands/show-databases.go @@ -3,7 +3,7 @@ package commands import ( "fmt" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/database" "github.com/urfave/cli" ) diff --git a/commands/show-explodedDns.go b/commands/show-explodedDns.go index 5f0f0973..f82b6265 100644 --- a/commands/show-explodedDns.go +++ b/commands/show-explodedDns.go @@ -4,8 +4,8 @@ import ( "encoding/csv" "os" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/dns" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/dns" "github.com/olekukonko/tablewriter" "github.com/urfave/cli" ) diff --git a/commands/show-long-connections.go b/commands/show-long-connections.go index ea5ae1f3..83b51165 100644 --- a/commands/show-long-connections.go +++ b/commands/show-long-connections.go @@ -5,8 +5,8 @@ import ( "os" "strconv" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/data" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/data" "github.com/olekukonko/tablewriter" "github.com/urfave/cli" ) diff --git a/commands/show-scans.go b/commands/show-scans.go index 2a2c99e7..8dbc2a0a 100644 --- a/commands/show-scans.go +++ b/commands/show-scans.go @@ -5,8 +5,8 @@ import ( "os" "strconv" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/scanning" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/scanning" "github.com/olekukonko/tablewriter" "github.com/urfave/cli" ) diff --git a/commands/show-urls.go b/commands/show-urls.go index 7f4897ab..665c2425 100644 --- a/commands/show-urls.go +++ b/commands/show-urls.go @@ -5,8 +5,8 @@ import ( "os" "strings" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/urls" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/urls" "github.com/olekukonko/tablewriter" "github.com/urfave/cli" ) diff --git a/commands/show-user-agents.go b/commands/show-user-agents.go index 7694a18b..00d66db4 100644 --- a/commands/show-user-agents.go +++ b/commands/show-user-agents.go @@ -4,8 +4,8 @@ import ( "encoding/csv" "os" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/useragent" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/useragent" "github.com/olekukonko/tablewriter" "github.com/urfave/cli" ) diff --git a/commands/test-config.go b/commands/test-config.go index d6d9f9fb..3b90d91b 100644 --- a/commands/test-config.go +++ b/commands/test-config.go @@ -4,7 +4,7 @@ import ( "fmt" "os" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/database" "github.com/urfave/cli" yaml "gopkg.in/yaml.v2" ) diff --git a/config/running.go b/config/running.go index 88c4592a..08c672c4 100644 --- a/config/running.go +++ b/config/running.go @@ -7,7 +7,7 @@ import ( "io/ioutil" "github.com/blang/semver" - "github.com/ocmdev/mgosec" + "github.com/activecm/mgosec" ) type ( diff --git a/database/meta.go b/database/meta.go index 528e2b5c..c31ca325 100644 --- a/database/meta.go +++ b/database/meta.go @@ -5,7 +5,7 @@ import ( "sync" "github.com/blang/semver" - fpt "github.com/ocmdev/rita/parser/fileparsetypes" + fpt "github.com/activecm/rita/parser/fileparsetypes" log "github.com/sirupsen/logrus" "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" diff --git a/database/mock.go b/database/mock.go index e67b3806..7762eacc 100644 --- a/database/mock.go +++ b/database/mock.go @@ -4,7 +4,7 @@ import ( "fmt" "os" - "github.com/ocmdev/rita/config" + "github.com/activecm/rita/config" ) // InitMockResources grabs the configuration file and intitializes the configuration data diff --git a/database/resources.go b/database/resources.go index 07597b78..19425415 100644 --- a/database/resources.go +++ b/database/resources.go @@ -10,10 +10,10 @@ import ( mgo "gopkg.in/mgo.v2" - "github.com/ocmdev/mgorus" - "github.com/ocmdev/mgosec" - "github.com/ocmdev/rita/config" - "github.com/ocmdev/rita/util" + "github.com/activecm/mgorus" + "github.com/activecm/mgosec" + "github.com/activecm/rita/config" + "github.com/activecm/rita/util" "github.com/rifflock/lfshook" log "github.com/sirupsen/logrus" ) diff --git a/datatypes/crossref/crossref.go b/datatypes/crossref/crossref.go index 30670a91..3fbaa783 100644 --- a/datatypes/crossref/crossref.go +++ b/datatypes/crossref/crossref.go @@ -1,7 +1,7 @@ package crossref import ( - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/database" ) type ( diff --git a/install.sh b/install.sh index bb182294..4c44bcc9 100755 --- a/install.sh +++ b/install.sh @@ -337,10 +337,10 @@ __install() { wget -q -O $GOPATH/bin/dep https://github.com/golang/dep/releases/download/v0.3.2/dep-linux-amd64 chmod +x $GOPATH/bin/dep - mkdir -p $GOPATH/src/github.com/ocmdev/rita + mkdir -p $GOPATH/src/github.com/activecm/rita # Get the install script's directory in case it's run from elsewhere - cp -R "$(dirname "$(realpath ${0})")/." $GOPATH/src/github.com/ocmdev/rita/ - cd $GOPATH/src/github.com/ocmdev/rita + cp -R "$(dirname "$(realpath ${0})")/." $GOPATH/src/github.com/activecm/rita/ + cd $GOPATH/src/github.com/activecm/rita make install > /dev/null # Allow any user to execute rita chmod 755 $GOPATH/bin/rita @@ -349,7 +349,7 @@ __install() { ( # Install the base configuration files mkdir -p $_CONFIG_PATH - cd $GOPATH/src/github.com/ocmdev/rita + cd $GOPATH/src/github.com/activecm/rita cp ./LICENSE $_CONFIG_PATH/LICENSE cp ./etc/rita.yaml $_CONFIG_PATH/config.yaml cp ./etc/tables.yaml $_CONFIG_PATH/tables.yaml diff --git a/parser/datastore.go b/parser/datastore.go index d949db30..e20792d3 100644 --- a/parser/datastore.go +++ b/parser/datastore.go @@ -1,6 +1,6 @@ package parser -import "github.com/ocmdev/rita/parser/parsetypes" +import "github.com/activecm/rita/parser/parsetypes" //Datastore allows RITA to store bro data in a database type Datastore interface { diff --git a/parser/fileparser.go b/parser/fileparser.go index b7f25aa5..ed122b6d 100644 --- a/parser/fileparser.go +++ b/parser/fileparser.go @@ -10,8 +10,8 @@ import ( "strings" "time" - fpt "github.com/ocmdev/rita/parser/fileparsetypes" - pt "github.com/ocmdev/rita/parser/parsetypes" + fpt "github.com/activecm/rita/parser/fileparsetypes" + pt "github.com/activecm/rita/parser/parsetypes" log "github.com/sirupsen/logrus" ) diff --git a/parser/fileparsetypes/fileparsetypes.go b/parser/fileparsetypes/fileparsetypes.go index 8cc889a4..68b2f7b0 100644 --- a/parser/fileparsetypes/fileparsetypes.go +++ b/parser/fileparsetypes/fileparsetypes.go @@ -3,7 +3,7 @@ package fileparsetypes import ( "time" - pt "github.com/ocmdev/rita/parser/parsetypes" + pt "github.com/activecm/rita/parser/parsetypes" "gopkg.in/mgo.v2/bson" ) diff --git a/parser/fsimporter.go b/parser/fsimporter.go index 9061abb5..6f90a174 100644 --- a/parser/fsimporter.go +++ b/parser/fsimporter.go @@ -9,10 +9,10 @@ import ( "sync" "time" - "github.com/ocmdev/rita/config" - "github.com/ocmdev/rita/database" - fpt "github.com/ocmdev/rita/parser/fileparsetypes" - "github.com/ocmdev/rita/util" + "github.com/activecm/rita/config" + "github.com/activecm/rita/database" + fpt "github.com/activecm/rita/parser/fileparsetypes" + "github.com/activecm/rita/util" log "github.com/sirupsen/logrus" ) diff --git a/parser/indexedfile.go b/parser/indexedfile.go index c2c570e2..3718086f 100644 --- a/parser/indexedfile.go +++ b/parser/indexedfile.go @@ -11,9 +11,9 @@ import ( log "github.com/sirupsen/logrus" - "github.com/ocmdev/rita/config" - fpt "github.com/ocmdev/rita/parser/fileparsetypes" - pt "github.com/ocmdev/rita/parser/parsetypes" + "github.com/activecm/rita/config" + fpt "github.com/activecm/rita/parser/fileparsetypes" + pt "github.com/activecm/rita/parser/parsetypes" ) //newIndexedFile takes in a file path and the bro config and opens up the diff --git a/parser/mongodatastore.go b/parser/mongodatastore.go index 241eca39..ccdd94d2 100644 --- a/parser/mongodatastore.go +++ b/parser/mongodatastore.go @@ -6,7 +6,7 @@ import ( log "github.com/sirupsen/logrus" - "github.com/ocmdev/rita/database" + "github.com/activecm/rita/database" mgo "gopkg.in/mgo.v2" ) diff --git a/parser/parsetypes/conn.go b/parser/parsetypes/conn.go index 0c5ad413..c481f14f 100644 --- a/parser/parsetypes/conn.go +++ b/parser/parsetypes/conn.go @@ -1,7 +1,7 @@ package parsetypes import ( - "github.com/ocmdev/rita/config" + "github.com/activecm/rita/config" "gopkg.in/mgo.v2/bson" ) diff --git a/parser/parsetypes/dns.go b/parser/parsetypes/dns.go index 11cd42fa..0b599eae 100644 --- a/parser/parsetypes/dns.go +++ b/parser/parsetypes/dns.go @@ -1,7 +1,7 @@ package parsetypes import ( - "github.com/ocmdev/rita/config" + "github.com/activecm/rita/config" "gopkg.in/mgo.v2/bson" ) diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index 43edf55b..85d31529 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -3,7 +3,7 @@ package parsetypes import ( "net/url" - "github.com/ocmdev/rita/config" + "github.com/activecm/rita/config" "gopkg.in/mgo.v2/bson" ) diff --git a/parser/parsetypes/parsetypes.go b/parser/parsetypes/parsetypes.go index 4ee61826..cdb52019 100644 --- a/parser/parsetypes/parsetypes.go +++ b/parser/parsetypes/parsetypes.go @@ -1,6 +1,6 @@ package parsetypes -import "github.com/ocmdev/rita/config" +import "github.com/activecm/rita/config" //BroData holds a line of a bro log type BroData interface { diff --git a/reporting/report-beacons.go b/reporting/report-beacons.go index 4785723a..862e2fb6 100644 --- a/reporting/report-beacons.go +++ b/reporting/report-beacons.go @@ -5,10 +5,10 @@ import ( "html/template" "os" - "github.com/ocmdev/rita/analysis/beacon" - "github.com/ocmdev/rita/database" - beaconData "github.com/ocmdev/rita/datatypes/beacon" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/analysis/beacon" + "github.com/activecm/rita/database" + beaconData "github.com/activecm/rita/datatypes/beacon" + "github.com/activecm/rita/reporting/templates" ) func printBeacons(db string, res *database.Resources) error { diff --git a/reporting/report-bl-dest-ips.go b/reporting/report-bl-dest-ips.go index d4136de3..cbff1234 100644 --- a/reporting/report-bl-dest-ips.go +++ b/reporting/report-bl-dest-ips.go @@ -6,10 +6,10 @@ import ( "gopkg.in/mgo.v2/bson" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" + "github.com/activecm/rita/reporting/templates" ) func printBLDestIPs(db string, res *database.Resources) error { diff --git a/reporting/report-bl-hostnames.go b/reporting/report-bl-hostnames.go index f6ae0aad..f054bbf6 100644 --- a/reporting/report-bl-hostnames.go +++ b/reporting/report-bl-hostnames.go @@ -8,11 +8,11 @@ import ( "gopkg.in/mgo.v2/bson" - "github.com/ocmdev/rita/analysis/dns" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/analysis/dns" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" + "github.com/activecm/rita/reporting/templates" ) func printBLHostnames(db string, res *database.Resources) error { diff --git a/reporting/report-bl-source-ips.go b/reporting/report-bl-source-ips.go index 2ebcd29c..61628b96 100644 --- a/reporting/report-bl-source-ips.go +++ b/reporting/report-bl-source-ips.go @@ -8,10 +8,10 @@ import ( "gopkg.in/mgo.v2/bson" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" + "github.com/activecm/rita/reporting/templates" ) func printBLSourceIPs(db string, res *database.Resources) error { diff --git a/reporting/report-bl-urls.go b/reporting/report-bl-urls.go index b44ab175..67fff8cc 100644 --- a/reporting/report-bl-urls.go +++ b/reporting/report-bl-urls.go @@ -8,11 +8,11 @@ import ( "gopkg.in/mgo.v2/bson" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/blacklist" - "github.com/ocmdev/rita/datatypes/structure" - "github.com/ocmdev/rita/datatypes/urls" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/blacklist" + "github.com/activecm/rita/datatypes/structure" + "github.com/activecm/rita/datatypes/urls" + "github.com/activecm/rita/reporting/templates" ) func printBLURLs(db string, res *database.Resources) error { diff --git a/reporting/report-explodedDns.go b/reporting/report-explodedDns.go index 2512e5c0..17b3eea4 100644 --- a/reporting/report-explodedDns.go +++ b/reporting/report-explodedDns.go @@ -5,9 +5,9 @@ import ( "html/template" "os" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/dns" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/dns" + "github.com/activecm/rita/reporting/templates" ) func printDNS(db string, res *database.Resources) error { diff --git a/reporting/report-long-connections.go b/reporting/report-long-connections.go index dc261b60..6e45d347 100644 --- a/reporting/report-long-connections.go +++ b/reporting/report-long-connections.go @@ -5,9 +5,9 @@ import ( "html/template" "os" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/data" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/data" + "github.com/activecm/rita/reporting/templates" ) func printLongConns(db string, res *database.Resources) error { diff --git a/reporting/report-scans.go b/reporting/report-scans.go index 66f1b44d..9f361e01 100644 --- a/reporting/report-scans.go +++ b/reporting/report-scans.go @@ -6,9 +6,9 @@ import ( "os" "sort" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/scanning" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/scanning" + "github.com/activecm/rita/reporting/templates" ) func printScans(db string, res *database.Resources) error { diff --git a/reporting/report-urls.go b/reporting/report-urls.go index 2c5f2bbe..41000795 100644 --- a/reporting/report-urls.go +++ b/reporting/report-urls.go @@ -5,9 +5,9 @@ import ( "html/template" "os" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/urls" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/urls" + "github.com/activecm/rita/reporting/templates" ) func printLongURLs(db string, res *database.Resources) error { diff --git a/reporting/report-useragents.go b/reporting/report-useragents.go index c0c6d872..6d03b056 100644 --- a/reporting/report-useragents.go +++ b/reporting/report-useragents.go @@ -5,9 +5,9 @@ import ( "html/template" "os" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/datatypes/useragent" - "github.com/ocmdev/rita/reporting/templates" + "github.com/activecm/rita/database" + "github.com/activecm/rita/datatypes/useragent" + "github.com/activecm/rita/reporting/templates" ) func printUserAgents(db string, res *database.Resources) error { diff --git a/reporting/report.go b/reporting/report.go index 00f3f315..72a9667e 100644 --- a/reporting/report.go +++ b/reporting/report.go @@ -8,9 +8,9 @@ import ( "os" "strconv" - "github.com/ocmdev/rita/database" - htmlTempl "github.com/ocmdev/rita/reporting/templates" - "github.com/ocmdev/rita/util" + "github.com/activecm/rita/database" + htmlTempl "github.com/activecm/rita/reporting/templates" + "github.com/activecm/rita/util" "github.com/skratchdot/open-golang/open" ) diff --git a/reporting/templates/templates.go b/reporting/templates/templates.go index c2552c91..82ca546d 100644 --- a/reporting/templates/templates.go +++ b/reporting/templates/templates.go @@ -8,7 +8,7 @@ type ReportingInfo struct { Writer template.HTML } -var ocmdevImg = "\"Offensive" +var activecmImg = "\"Offensive" var dbHeader = ` @@ -19,7 +19,7 @@ var dbHeader = `
      - ` + ocmdevImg + ` + ` + activecmImg + `
    • RITA
    • Viewing: {{.DB}}
    • @@ -34,7 +34,7 @@ var dbHeader = `
    • Long URLs
    • User Agents
    • - RITA on + RITA on
    • @@ -49,11 +49,11 @@ var homeHeader = `
        - ` + ocmdevImg + ` + ` + activecmImg + `
      • RITA
      • - RITA on + RITA on
      • diff --git a/rita.go b/rita.go index a5c5d363..82ffc18d 100644 --- a/rita.go +++ b/rita.go @@ -4,8 +4,8 @@ import ( "os" "runtime" - "github.com/ocmdev/rita/commands" - "github.com/ocmdev/rita/config" + "github.com/activecm/rita/commands" + "github.com/activecm/rita/config" "github.com/urfave/cli" ) From 73deefaa578ed0c3be7c8c9ced8ebb3cd8e33bce Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 8 Mar 2018 15:38:47 -0700 Subject: [PATCH 090/117] Remove invalid lock file --- Gopkg.lock | 170 ----------------------------------------------------- 1 file changed, 170 deletions(-) delete mode 100644 Gopkg.lock diff --git a/Gopkg.lock b/Gopkg.lock deleted file mode 100644 index 51a17b95..00000000 --- a/Gopkg.lock +++ /dev/null @@ -1,170 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - name = "github.com/activecm/mgorus" - packages = ["."] - revision = "544a63f222470b2feb3b1142f7edb9a843f5120d" - version = "v0.1.0" - -[[projects]] - name = "github.com/activecm/mgosec" - packages = ["."] - revision = "af42afa3ec74143661a863bdae62d36a93c6eca7" - version = "v0.1.0" - -[[projects]] - branch = "master" - name = "github.com/activecm/rita-bl" - packages = [ - ".", - "database", - "list", - "sources/lists", - "sources/rpc" - ] - revision = "070299442c8c467e9501907b53ba4646be71255b" - -[[projects]] - name = "github.com/blang/semver" - packages = ["."] - revision = "2ee87856327ba09384cabd113bc6b5d174e9ec0f" - version = "v3.5.1" - -[[projects]] - branch = "master" - name = "github.com/golang/protobuf" - packages = [ - "proto", - "ptypes/duration" - ] - revision = "1e59b77b52bf8e4b449a57e6f79f21226d571845" - -[[projects]] - branch = "master" - name = "github.com/google/safebrowsing" - packages = [ - ".", - "internal/safebrowsing_proto" - ] - revision = "fe6951d7ef01b4e46d3008e8a08b55bcdf3c0ee6" - -[[projects]] - name = "github.com/mattn/go-runewidth" - packages = ["."] - revision = "9e777a8366cce605130a531d2cd6363d07ad7317" - version = "v0.0.2" - -[[projects]] - name = "github.com/ocmdev/mgosec" - packages = ["."] - revision = "af42afa3ec74143661a863bdae62d36a93c6eca7" - version = "v0.1.0" - -[[projects]] - branch = "master" - name = "github.com/ocmdev/rita-bl" - packages = [ - "database", - "list", - "sources/lists/util", - "sources/rpc" - ] - revision = "070299442c8c467e9501907b53ba4646be71255b" - -[[projects]] - branch = "master" - name = "github.com/olekukonko/tablewriter" - packages = ["."] - revision = "96aac992fc8b1a4c83841a6c3e7178d20d989625" - -[[projects]] - name = "github.com/rifflock/lfshook" - packages = ["."] - revision = "1fdc019a35147ddbb3d25aedf713ad6d1430c144" - version = "v2.2" - -[[projects]] - name = "github.com/sirupsen/logrus" - packages = ["."] - revision = "d682213848ed68c0a260ca37d6dd5ace8423f5ba" - version = "v1.0.4" - -[[projects]] - branch = "master" - name = "github.com/skratchdot/open-golang" - packages = ["open"] - revision = "75fb7ed4208cf72d323d7d02fd1a5964a7a9073c" - -[[projects]] - name = "github.com/urfave/cli" - packages = ["."] - revision = "cfb38830724cc34fedffe9a2a29fb54fa9169cd1" - version = "v1.20.0" - -[[projects]] - branch = "master" - name = "golang.org/x/crypto" - packages = ["ssh/terminal"] - revision = "13931e22f9e72ea58bb73048bc752b48c6d4d4ac" - -[[projects]] - branch = "master" - name = "golang.org/x/net" - packages = ["idna"] - revision = "5ccada7d0a7ba9aeb5d3aca8d3501b4c2a509fec" - -[[projects]] - branch = "master" - name = "golang.org/x/sys" - packages = [ - "unix", - "windows" - ] - revision = "fff93fa7cd278d84afc205751523809c464168ab" - -[[projects]] - branch = "master" - name = "golang.org/x/text" - packages = [ - "collate", - "collate/build", - "internal/colltab", - "internal/gen", - "internal/tag", - "internal/triegen", - "internal/ucd", - "language", - "secure/bidirule", - "transform", - "unicode/bidi", - "unicode/cldr", - "unicode/norm", - "unicode/rangetable" - ] - revision = "e19ae1496984b1c655b8044a65c0300a3c878dd3" - -[[projects]] - branch = "v2" - name = "gopkg.in/mgo.v2" - packages = [ - ".", - "bson", - "internal/json", - "internal/sasl", - "internal/scram" - ] - revision = "3f83fa5005286a7fe593b055f0d7771a7dce4655" - -[[projects]] - branch = "v2" - name = "gopkg.in/yaml.v2" - packages = ["."] - revision = "d670f9405373e636a5a2765eea47fac0c9bc91a4" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - inputs-digest = "2cd8cd4548384c70c9d6a0facc232a8a7ed8573751f8cbea14c27b2153d38f8c" - solver-name = "gps-cdcl" - solver-version = 1 From 12aac486900116a9ceaa9870be8856e6f574f300 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 8 Mar 2018 15:41:45 -0700 Subject: [PATCH 091/117] Point rita-bl towards the in activecm branch --- Gopkg.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Gopkg.toml b/Gopkg.toml index 57779c5f..babfd5e8 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -29,7 +29,7 @@ version = "0.1.0" [[constraint]] - branch = "master" + branch = "activecm" name = "github.com/activecm/rita-bl" [[constraint]] From 689121da9cb393d048623213fe1eda663fc4ab0f Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 8 Mar 2018 16:03:38 -0700 Subject: [PATCH 092/117] Reinstate dep ensure in the makefile --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index e83b93bc..8103aabd 100644 --- a/Makefile +++ b/Makefile @@ -7,11 +7,11 @@ LDFLAGS=-ldflags="-X github.com/activecm/rita/config.Version=${VERSION} -X githu default: -# dep ensure + dep ensure go build ${LDFLAGS} # Having issues with 'go install' + LDFLAGS using sudo and the # install script. This is a workaround. install: -# dep ensure + dep ensure go build ${LDFLAGS} -o ${GOPATH}/bin/${BINARY} From f16ed5110063648f20e86fb24ea45d9dcb781c70 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 8 Mar 2018 17:18:04 -0700 Subject: [PATCH 093/117] Fix ocmdev mentions that weren't found by sed --- Readme.md | 6 +++--- docs/Mongo Configuration.md | 4 ++-- install.sh | 6 +++--- reporting/templates/templates.go | 6 +++--- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Readme.md b/Readme.md index e0e1c2d4..683c7c1d 100644 --- a/Readme.md +++ b/Readme.md @@ -1,6 +1,6 @@ # RITA (Real Intelligence Threat Analytics) -Brought to you by Offensive CounterMeasures. +Brought to you by Active CounterMeasures. --- ### What is Here @@ -89,11 +89,11 @@ To obtain an API key: * `rita show-blacklisted dataset_name -H` ### Getting help -Head over to [OFTC and join #activecm](https://webchat.oftc.net/?channels=activecm) for any questions you may have. +Please create an issue on GitHub if you have any questions or concerns. ### Contributing to RITA To contribute to RITA visit our [Contributing Guide](https://github.com/activecm/rita/blob/master/Contributing.md) ### License GNU GPL V3 -© Offensive CounterMeasures ™ +© Active CounterMeasures ™ diff --git a/docs/Mongo Configuration.md b/docs/Mongo Configuration.md index 94713135..dc51919b 100644 --- a/docs/Mongo Configuration.md +++ b/docs/Mongo Configuration.md @@ -281,11 +281,11 @@ Failed to connect to database: no reachable servers **Trusted Certificate Verification Example** -The following example configuration assumes your Mongo server is located at `offensivecountermeasures.com` and that you have obtained and configured Mongo with a certificate signed with a valid certificate authority. In this case, you do not need to specify a `CAFile` path. +The following example configuration assumes your Mongo server is located at `activecountermeasures.com` and that you have obtained and configured Mongo with a certificate signed with a valid certificate authority. In this case, you do not need to specify a `CAFile` path. ```yaml MongoDB: - ConnectionString: mongodb://offensivecountermeasures.com:27017 + ConnectionString: mongodb://activecountermeasures.com:27017 TLS: Enable: true VerifyCertificate: true diff --git a/install.sh b/install.sh index 4c44bcc9..6da29eea 100755 --- a/install.sh +++ b/install.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash # -# RITA is brought to you by Offensive CounterMeasures. -# offensivecountermeasures.com +# RITA is brought to you by Active CounterMeasures. +# activecountermeasures.com _NAME=$(basename "${0}") _FAILED="\e[91mFAILED\e[0m" @@ -79,7 +79,7 @@ __title() { / | | _ \\ _|_\ ___| _| _/ _\\ -Brought to you by Offensive CounterMeasures +Brought to you by Active CounterMeasures " } diff --git a/reporting/templates/templates.go b/reporting/templates/templates.go index 82ca546d..c9fd9a6f 100644 --- a/reporting/templates/templates.go +++ b/reporting/templates/templates.go @@ -8,7 +8,7 @@ type ReportingInfo struct { Writer template.HTML } -var activecmImg = "\"Offensive" +var activecmImg = "\"Active" var dbHeader = ` @@ -18,7 +18,7 @@ var dbHeader = `
          - + ` + activecmImg + `
        • RITA
        • @@ -48,7 +48,7 @@ var homeHeader = `
            - + ` + activecmImg + `
          • RITA
          • From a29e8d2919d0bbb28f9a312f8cc5e08a6b158d4f Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 8 Mar 2018 17:23:31 -0700 Subject: [PATCH 094/117] Fix activecm image in reporting --- reporting/templates/templates.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reporting/templates/templates.go b/reporting/templates/templates.go index c9fd9a6f..2801a0ab 100644 --- a/reporting/templates/templates.go +++ b/reporting/templates/templates.go @@ -8,7 +8,7 @@ type ReportingInfo struct { Writer template.HTML } -var activecmImg = "\"Active" +var activecmImg = "\"Active" var dbHeader = ` From 52050c86e55e900a0d344be2d14363ee36c890ba Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 8 Mar 2018 17:33:25 -0700 Subject: [PATCH 095/117] Fix size of activecm image in reporting --- reporting/templates/templates.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reporting/templates/templates.go b/reporting/templates/templates.go index 2801a0ab..26360208 100644 --- a/reporting/templates/templates.go +++ b/reporting/templates/templates.go @@ -8,7 +8,7 @@ type ReportingInfo struct { Writer template.HTML } -var activecmImg = "\"Active" +var activecmImg = "\"Active" var dbHeader = ` From d058aaa5fd69e873c3f50ed70d783dab21c7543e Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 8 Mar 2018 17:37:51 -0700 Subject: [PATCH 096/117] Remove docker installation --- Readme.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/Readme.md b/Readme.md index 683c7c1d..a225225a 100644 --- a/Readme.md +++ b/Readme.md @@ -26,9 +26,6 @@ Additional functionality is being developed and will be included soon. * Source your .bashrc (the installer added RITA to the PATH): `source ~/.bashrc` * Start MongoDB: `sudo service mongod start` -### Docker Installation -RITA is available as a Docker image at activecm/rita, [check out the instructions in the wiki](https://github.com/activecm/rita/wiki/Docker-Installation). - ### Manual Installation To install each component of RITA by hand, [check out the instructions in the wiki](https://github.com/activecm/rita/wiki/Installation). From e3fd75ea0ab561ea7f42f7fed60bd46de801bd65 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 8 Mar 2018 17:42:59 -0700 Subject: [PATCH 097/117] Point the rita-bl dep back at master --- Gopkg.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Gopkg.toml b/Gopkg.toml index babfd5e8..57779c5f 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -29,7 +29,7 @@ version = "0.1.0" [[constraint]] - branch = "activecm" + branch = "master" name = "github.com/activecm/rita-bl" [[constraint]] From 3ce34b5a9084a4658e57ad83aa389682ca13108f Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Mon, 12 Mar 2018 22:02:39 -0600 Subject: [PATCH 098/117] Initial commit for new installer. Install with ./new-installer --version new-installer --- etc/rita.yaml | 2 +- new-install.sh | 619 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 620 insertions(+), 1 deletion(-) create mode 100755 new-install.sh diff --git a/etc/rita.yaml b/etc/rita.yaml index 8af0498c..cea6b660 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -64,7 +64,7 @@ BlackListed: # not be queried. SafeBrowsing: APIKey: "" - Database: /etc/rita/safebrowsing + Database: /var/lib/rita/safebrowsing # These are custom blacklists that you may define. They are lists of either # file paths or urls. These custom blacklists are expected to be simple, diff --git a/new-install.sh b/new-install.sh new file mode 100755 index 00000000..98777f86 --- /dev/null +++ b/new-install.sh @@ -0,0 +1,619 @@ +#!/usr/bin/env bash +# +# RITA is brought to you by Active CounterMeasures. +# activecountermeasures.com + +# CONSTANTS +_NAME=$(basename "${0}") +_FAILED="\e[91mFAILED\e[0m" +_SUCCESS="\e[92mSUCCESS\e[0m" +_ITEM="[-]" +_IMPORTANT="[!]" +_QUESTION="[?]" +_SUBITEM="\t$_ITEM" +_SUBIMPORTANT="\t$_IMPORTANT" +_SUBQUESTION="\t$_QUESTION" + + +# ERROR HANDLING +#Kill 0 to kill subshells as well +__err() { + printf "\n$_IMPORTANT Installation $_FAILED on line $1.\n" + exit 1 +} + +__int() { + printf "\n$_IMPORTANT Installation \e[91mCANCELLED\e[0m.\n" +} + +trap '__err $LINENO' ERR +trap '__int' INT + +set -o errexit +set -o errtrace +set -o pipefail + + +# PERMISSIONS GADGET +# The user must run the build process, but root must install +# software. In order to make sure the appropriate users +# take the right actions, we call sudo in the script itself. + +# Prevent user running sudo themselves +if [ ! -z ${SUDO_USER+x} ]; then + printf "Please run the RITA installer without sudo.\n" + exit 1 +fi + +# Root is running the script without sudo +if [ "$EUID" = "0" ]; then + _ELEVATE="" +else + printf "$_IMPORTANT The RITA installer requires root privileges for some tasks. \n" + printf "$_IMPORTANT \"sudo\" will be used when necessary. \n" + _SUDO="$(type -fp sudo)" + if [ -z $_SUDO ]; then + printf "\"sudo\" was not found on the system. Please log in as root \n" + printf "before running the installer, or install \"sudo\". \n" + exit 1 + fi + $_SUDO -v + if [ $? -ne 0 ]; then + printf "The installer was unable to elevate privileges using \"sudo\". \n" + printf "Please make sure your account has \"sudo\" privileges. \n" + fi + # _ELEVATE is separate from _SUDO since environment variables may need + # to be passed + _ELEVATE="$_SUDO" +fi + +# ENTRYPOINT +__entry() { + _UNINSTALL_RITA=false + + # Optional Dependencies + _INSTALL_BRO=true + _INSTALL_MONGO=true + + # Install locations + _INSTALL_PREFIX=/usr/local + _CONFIG_PATH=/etc/rita + _VAR_PATH=/var/lib/rita + + # FOR an OPT style installation + # NOTE: RITA itself must be changed to agree with the + # _CONFIG_PATH and _VAR_PATH + # _INSTALL_PREFIX=/opt/rita + # _CONFIG_PATH=/etc/opt/rita + # _VAR_PATH=/var/opt/rita + + # Parse through command args + while [[ $# -gt 0 ]]; do + case $1 in + -h|--help) + # Display help and exit + __help + exit 0 + ;; + -u|--uninstall) + _UNINSTALL_RITA=true + _INSTALL_BRO=false + _INSTALL_MONGO=false + ;; + --disable-bro) + _INSTALL_BRO=false + ;; + --disable-mongo) + _INSTALL_MONGO=false + ;; + --prefix) + shift + _INSTALL_PREFIX="$1" + ;; + --version) + shift + _RITA_VERSION="$1" + ;; + *) + ;; + esac + shift + done + + _BIN_PATH="$_INSTALL_PREFIX/bin" + + if __installation_exist; then + printf "$_IMPORTANT RITA is already installed.\n" + printf "$_QUESTION Would you like to erase it and re-install? [y/N] " + read + if [[ $REPLY =~ ^[Yy]$ ]]; then + _UNINSTALL_RITA=true + _INSTALL_BRO=false + _INSTALL_MONGO=false + else + exit 0 + fi + fi + + if [ "$_UNINSTALL_RITA" = "true" ]; then + __uninstall + else + __install + fi +} + +__installation_exist() { + [ -f "$_BIN_PATH/rita" -o -d "$_CONFIG_PATH" -o -d "$_VAR_PATH" ] +} + +__uninstall() { + printf "$_IMPORTANT Uninstalling RITA..." + if [ -f "$_BIN_PATH/rita"]; then + printf "$_SUBITEM Removing $_BIN_PATH/rita \n" + $_ELEVATE rm "$_BIN_PATH/rita" + else + printf "$_SUBIMPORTANT $_BIN_PATH/rita not found! \n" + fi + if [ -d "$_CONFIG_PATH" ]; then + printf "$_SUBITEM Removing $_CONFIG_PATH \n" + $_ELEVATE rm -rf "$_CONFIG_PATH" + else + printf "$_SUBIMPORTANT $_CONFIG_PATH not found! \n" + fi + if [ -d "$_VAR_PATH" ]; then + printf "$_SUBITEM Removing $_VAR_PATH \n" + $_ELEVATE rm -rf "$_VAR_PATH" + else + printf "$_SUBIMPORTANT $_VAR_PATH not found! \n" + fi + printf "$_IMPORTANT You may wish to uninstall Go, MongoDB, and Bro IDS if they were installed. \n" +} + +__install() { + __title + # Gather enough information to download installer dependencies + __gather_pkg_mgr + + # Install installer dependencies + __install_installer_deps + + # Get system information + __gather_OS + __gather_bro + __gather_go + __gather_mongo + + # Explain the installer's actions + __explain + + if [ "$_INSTALL_BRO" = "true" ]; then + if [ "$_BRO_INSTALLED" = "false" ]; then + __load "$_ITEM Installing Bro IDS" __install_bro + else + printf "$_ITEM Bro IDS is already installed \n" + fi + + if [ "$_BRO_IN_PATH" = "false" ]; then + __add_bro_to_path + fi + fi + + # Always install Go + if [ "$_GO_OUT_OF_DATE" = "true" ]; then + printf "$_IMPORTANT WARNING: An old version of Go has been detected on this system. \n" + printf "$_IMPORTANT RITA has only been tested with Go >= 1.7. Check if the installed \n" + printf "$_IMPORTANT version of Go is up to date with 'go version'. If it is out of date \n" + printf "$_IMPORTANT you may remove the old version of Go and let this installer install \n" + printf "$_IMPORTANT a more recente version. \n" + sleep 10s + fi + + if [ "$_GO_INSTALLED" = "false" ]; then + __load "$_ITEM Installing Go" __install_go + else + printf "$_ITEM Go is already installed \n" + fi + + if [ "$_GO_IN_PATH" = "false" ]; then + __add_go_to_path + fi + + if [ "$_GOPATH_EXISTS" = "false" ]; then + __create_go_path + else + printf "$_SUBITEM Found GOPATH at $GOPATH \n" + # Add the bin folder of the $GOPATH + # It may already be in the path, but oh well, better to be safe than sorry + export PATH=$PATH:$GOPATH/bin + fi + + if [ $_INSTALL_MONGO = "true" ]; then + if [ $_MONGO_INSTALLED = "false" ]; then + __load "$_ITEM Installing MongoDB" __install_mongodb + else + printf "$_ITEM MongoDB is already installed \n" + fi + fi + + __load "$_ITEM Installing RITA" __build_rita && __install_rita + + printf "$_IMPORTANT To finish the installtion, reload the system profile and \n" + printf "$_IMPORTANT user profile with 'source /etc/profile' and 'source ~/.profile'. \n" + printf "$_IMPORTANT Additionally, you may want to configure Bro and run 'sudo broctl deploy'. \n" + printf "$_IMPORTANT Finally, start MongoDB with 'sudo systemctl start mongod'. You can \n" + printf "$_IMPORTANT access the MongoDB shell with 'mongo'. If, at any time, you need \n" + printf "$_IMPORTANT to stop MongoDB, run 'sudo systemctl stop mongod'. \n" + + __title + printf "Thank you for installing RITA! Happy hunting! \n" +} + +__install_installer_deps() { + printf "$_ITEM In order to run the installer, several basic packages must be installed. \n" + + # Update package cache + __load "$_SUBITEM Updating packages" __freshen_packages + + for pkg in git curl make coreutils lsb-release; do + __load "$_SUBITEM Ensuring $pkg is installed" __install_packages $pkg + done +} + +__install_bro() { + case "$_OS" in + Ubuntu) + __add_deb_repo "deb http://download.opensuse.org/repositories/network:/bro/xUbuntu_$(lsb_release -rs)/ /" \ + "Bro" \ + "http://download.opensuse.org/repositories/network:bro/xUbuntu_$(lsb_release -rs)/Release.key" + ;; + CentOS) + __add_rpm_repo http://download.opensuse.org/repositories/network:bro/CentOS_7/network:bro.repo + ;; + esac + __install_packages bro broctl + $_ELEVATE chmod 2755 /opt/bro/logs + _BRO_PKG_INSTALLED=true + _BRO_PATH="/opt/bro/bin" +} + +__add_bro_to_path() { + printf "$_SUBQUESTION Would you like to add Bro IDS to the PATH? [Y/n] " + read + if [[ ! $REPLY =~ ^[Nn]$ ]]; then + printf "$_SUBIMPORTANT Adding Bro IDS to the path in $_BRO_PATH_SCRIPT \n" + echo "export PATH=\"\$PATH:$_BRO_PATH\"" | $_ELEVATE tee $_BRO_PATH_SCRIPT > /dev/null + _BRO_PATH_SCRIPT_INSTALLED=true + export PATH="$PATH:$_BRO_PATH" + _BRO_IN_PATH=true + fi +} + +__install_go() { + curl -s -o /tmp/golang.tar.gz https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz + $_ELEVATE tar -zxf /tmp/golang.tar.gz -C /usr/local/ + rm /tmp/golang.tar.gz + _GO_INSTALLED_STD=true + _GO_INSTALLED=true + _GO_PATH="/usr/local/go/bin" +} + +__add_go_to_path() { + printf "$_SUBIMPORTANT Adding Go to the path in $_GO_PATH_SCRIPT \n" + echo "export PATH=\"\$PATH:$_GO_PATH\"" | $_ELEVATE tee $_GO_PATH_SCRIPT > /dev/null + _GO_PATH_SCRIPT_INSTALLED=true + export PATH="$PATH:$_GO_PATH" + _GO_IN_PATH=true +} + +__create_go_path() { + printf "$_SUBIMPORTANT Go requires a per-user workspace (GOPATH) in order to build software. \n" + + printf "$_SUBQUESTION Select a GOPATH [$HOME/go]: " + read + if [ -n "$REPLY" ]; then + export GOPATH="$REPLY" + else + export GOPATH="$HOME/go" + fi + + printf "$_SUBIMPORTANT Creating a GOPATH at $GOPATH \n" + mkdir -p "$GOPATH/"{src,pkg,bin} + _GOPATH_EXISTS=true + + export PATH="$PATH:$GOPATH/bin" + + printf "$_SUBIMPORTANT Adding your GOPATH to $_GOPATH_PATH_SCRIPT \n" + echo "export GOPATH=\"$GOPATH\"" > "$_GOPATH_PATH_SCRIPT" + echo "export PATH=\"\$PATH:\$GOPATH\bin\"" >> "$_GOPATH_PATH_SCRIPT" + _GOPATH_PATH_SCRIPT_INSTALLED=true + + printf "$_SUBIMPORTANT Adding $_GOPATH_PATH_SCRIPT to $HOME/.profile \n" + echo "source \"$_GOPATH_PATH_SCRIPT\"" >> "$HOME/.profile" +} + +__install_mongodb() { + case "$_OS" in + Ubuntu) + __add_deb_repo "deb [ arch=$(dpkg --print-architecture) ] http://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/3.4 multiverse" \ + "MongoDB" \ + "https://www.mongodb.org/static/pgp/server-3.4.asc" + ;; + CentOS) + if [ ! -s /etc/yum.repos.d/mongodb-org-3.4.repo ]; then + echo -e '[mongodb-org-3.4]\nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/3.4/x86_64/\ngpgcheck=1\nenabled=1\ngpgkey=https://www.mongodb.org/static/pgp/server-3.4.asc' > /etc/yum.repos.d/mongodb-org-3.4.repo + fi + ;; + esac + __install_packages mongodb-org + _MONGO_INSTALLED=true +} + +__build_rita() { + curl -L -s -o "$GOPATH/bin/dep" https://github.com/golang/dep/releases/download/v0.3.2/dep-linux-amd64 + chmod +x "$GOPATH/bin/dep" + + export _RITA_SRC_DIR="$GOPATH/src/github.com/activecm/rita" + mkdir -p "$_RITA_SRC_DIR" + + # Get the code from git since the build process is dependent on git + git clone http://github.com/activecm/rita "$_RITA_SRC_DIR" > /dev/null 2>&1 + + local old_dir="$PWD" + cd "$_RITA_SRC_DIR" + if [ -n "${_RITA_VERSION+x}" ]; then + git checkout $_RITA_VERSION > /dev/null 2>&1 + fi + make > /dev/null + cd "$old_dir" +} + +__install_rita() { + $_ELEVATE mkdir -p "$_CONFIG_PATH" + $_ELEVATE mkdir -p "$_VAR_PATH" + + $_ELEVATE mv "$_RITA_SRC_DIR/rita" "$_BIN_PATH/rita" + $_ELEVATE chown root:root "$_BIN_PATH/rita" + $_ELEVATE chmod 755 "$_BIN_PATH/rita" + + $_ELEVATE cp "$_RITA_SRC_DIR/LICENSE" "$_CONFIG_PATH/LICENSE" + $_ELEVATE cp "$_RITA_SRC_DIR/etc/rita.yaml" "$_CONFIG_PATH/config.yaml" + $_ELEVATE cp "$_RITA_SRC_DIR/etc/tables.yaml" "$_CONFIG_PATH/tables.yaml" + + # All users can read and write rita's config file + $_ELEVATE chmod 755 "$_CONFIG_PATH" + $_ELEVATE chmod 666 "$_CONFIG_PATH/config.yaml" + + $_ELEVATE touch "$_VAR_PATH/safebrowsing" + $_ELEVATE chmod 755 "$_VAR_PATH" + $_ELEVATE chmod 666 "$_VAR_PATH/safebrowsing" +} + +# INFORMATION GATHERING + +__gather_OS() { + _OS="$(lsb_release -is)" + if [ "$_OS" != "Ubuntu" -a "$_OS" != "CentOS" ]; then + printf "$_ITEM This installer supports Ubuntu and CentOS. \n" + printf "$_IMPORTANT Your operating system is unsupported." + exit 1 + fi +} + +__gather_pkg_mgr() { + # _PKG_MGR = 1: APT: Ubuntu 14.04, 16.04 and Security Onion (Debian) + # _PKG_MGR = 2: YUM: CentOS (Old RHEL Derivatives) + # _PKG_MGR = 3: Unsupported + _PKG_MGR=3 + _PKG_INSTALL="" + if [ -x /usr/bin/apt-get ]; then + _PKG_MGR=1 + _PKG_INSTALL="apt-get -qq install -y" + elif [ -x /usr/bin/yum ]; then + _PKG_MGR=2 + _PKG_INSTALL="yum -y -q install" + fi + if [ $_PKG_MGR -eq 3 ]; then + printf "$_ITEM This installer supports package management via apt and yum. \n" + printf "$_IMPORTANT A supported package manager was not found. \n" + exit 1 + fi +} + +__gather_go() { + _GO_PATH="" + _GO_INSTALLED_STD=false + if [ -f "/usr/local/go/bin/go" ]; then + _GO_INSTALLED_STD=true + _GO_PATH="/usr/local/go/bin" + fi + + _GO_INSTALLED_NON_STD=false + if [ -n "$GOROOT" -a -f "$GOROOT/bin/go" ]; then + _GO_INSTALLED_NON_STD=true + _GO_PATH="$GOROOT/bin" + fi + + _GO_INSTALLED=false + if [ $_GO_INSTALLED_STD = "true" -o $_GO_INSTALLED_NON_STD = "true" ]; then + _GO_INSTALLED=true + fi + + _GO_OUT_OF_DATE=false + if [ $_GO_INSTALLED = "true" ]; then + case `$_GO_PATH/go version | awk '{print $3}'` in + go1|go1.2*|go1.3*|go1.4*|go1.5*|go1.6*|"") + _GO_OUT_OF_DATE=true + ;; + esac + fi + + _GO_PATH_SCRIPT="/etc/profile.d/go-path.sh" + _GO_PATH_SCRIPT_INSTALLED=false + + if [ -f "$_GO_PATH_SCRIPT" ]; then + source "$_GO_PATH_SCRIPT" + _GO_PATH_SCRIPT_INSTALLED=true + fi + + _GO_IN_PATH=false + if [ -n "$(type -fp go)" ]; then + _GO_IN_PATH=true + fi + + _GOPATH_PATH_SCRIPT="$HOME/.gopath-path.sh" + _GOPATH_PATH_SCRIPT_INSTALLED=false + + if [ -f "$_GOPATH_PATH_SCRIPT" ]; then + source "$_GOPATH_PATH_SCRIPT" + _GOPATH_PATH_SCRIPT_INSTALLED=true + fi + + _GOPATH_EXISTS=false + if [ -n "$GOPATH" ]; then + _GOPATH_EXISTS=true + fi + +} + +__gather_bro() { + _BRO_PATH="" + _BRO_PKG_INSTALLED=false + if __package_installed bro; then + _BRO_PKG_INSTALLED=true + _BRO_PATH="/opt/bro/bin" + fi + + _BRO_ONION_INSTALLED=false + if __package_installed securityonion-bro; then + _BRO_ONION_INSTALLED=true + _BRO_PATH="/opt/bro/bin" + fi + + _BRO_SOURCE_INSTALLED=false + if [ -f "/usr/local/bro/bin/bro" ]; then + _BRO_SOURCE_INSTALLED=true + _BRO_PATH="/usr/local/bro/bin" + fi + + _BRO_INSTALLED=false + if [ $_BRO_PKG_INSTALLED = "true" -o $_BRO_ONION_INSTALLED = "true" -o $_BRO_SOURCE_INSTALLED = "true" ]; then + _BRO_INSTALLED=true + fi + + _BRO_PATH_SCRIPT="/etc/profile.d/bro-path.sh" + _BRO_PATH_SCRIPT_INSTALLED=false + + if [ -f "$_BRO_PATH_SCRIPT" ]; then + source "$_BRO_PATH_SCRIPT" + _BRO_PATH_SCRIPT_INSTALLED=true + fi + + _BRO_IN_PATH=false + if [ -n "$(type -pf bro)" ]; then + _BRO_IN_PATH=true + fi +} + +__gather_mongo() { + _MONGO_INSTALLED=false + if __package_installed mongodb-org; then + _MONGO_INSTALLED=true + fi +} + +# USER EXPERIENCE + +__explain() { + printf "$_ITEM This installer will: \n" + if [ $_BRO_INSTALLED = "false" -a $_INSTALL_BRO = "true" ]; then + printf "$_SUBITEM Install Bro IDS to /opt/bro \n" + fi + if [ $_GO_INSTALLED = "false" ]; then + printf "$_SUBITEM Install Go to /usr/local/go \n" + fi + if [ $_GOPATH_EXISTS = "false" ]; then + printf "$_SUBITEM Create a Go build environment (GOPATH) in $HOME/go \n" + fi + if [ $_MONGO_INSTALLED = "false" -a $_INSTALL_MONGO = "true" ]; then + printf "$_SUBITEM Install MongoDB \n" + fi + printf "$_SUBITEM Install RITA to $_BIN_PATH/rita \n" + printf "$_SUBITEM Create a runtime directory for RITA in $_VAR_PATH \n" + printf "$_SUBITEM Create a configuration directory for RITA in $_CONFIG_PATH \n" + sleep 5s +} + +__title() { + echo \ +" + _ \ _ _| __ __| \\ + / | | _ \\ +_|_\ ___| _| _/ _\\ + +Brought to you by Active CounterMeasures +" +} + +__load() { + local loadingText=$1 + printf "$loadingText...\r" + shift + eval "$@" + echo -ne "\r\033[K" + printf "$loadingText... $_SUCCESS\n" +} + +# PACKAGE MANAGEMENT +__install_packages() { + while [ ! -z "$1" ]; do + local pkg="$1" + # Translation layer + # apt -> yum + if [ $_PKG_MGR -eq 2 ]; then + case "$pkg" in + "lsb-release") + pkg="redhat-lsb-core" + ;; + realpath) + pkg="coreutils" + ;; + esac + fi + eval $_ELEVATE $_PKG_INSTALL $pkg >/dev/null 2>&1 + shift + done +} + +__freshen_packages() { + if [ $_PKG_MGR -eq 1 ]; then #apt + $_ELEVATE apt-get -qq update > /dev/null 2>&1 + elif [ $_PKG_MGR -eq 2 ]; then #yum + $_ELEVATE yum -q makecache > /dev/null 2>&1 + fi +} + +__package_installed() { + #Returns true if the package is installed, false otherwise + if [ $_PKG_MGR -eq 1 ]; then # apt + dpkg-query -W -f='${Status}' "$1" 2>/dev/null | grep -q "ok installed" + elif [ $_PKG_MGR -eq 2 ]; then # yum and dnf + rpm -q "$1" >/dev/null + fi +} + +__add_deb_repo() { + if [ ! -s "/etc/apt/sources.list.d/$2.list" ]; then + if [ ! -z "$3" ]; then + curl -s -L "$3" | $_ELEVATE apt-key add - > /dev/null 2>&1 + fi + echo "$1" | $_ELEVATE tee "/etc/apt/sources.list.d/$2.list" > /dev/null + __freshen_packages + fi +} + +__add_rpm_repo() { + $_ELEVATE yum-config-manager -q --add-repo=$1 > /dev/null 2>&1 +} + +# ENTRYPOINT CALL +__entry "${@:-}" From a213e33bbc5324a8a9746c4beb6d811a9b24d7c4 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Mon, 12 Mar 2018 22:04:33 -0600 Subject: [PATCH 099/117] Fix interrupt handler --- new-install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/new-install.sh b/new-install.sh index 98777f86..243bc276 100755 --- a/new-install.sh +++ b/new-install.sh @@ -16,7 +16,6 @@ _SUBQUESTION="\t$_QUESTION" # ERROR HANDLING -#Kill 0 to kill subshells as well __err() { printf "\n$_IMPORTANT Installation $_FAILED on line $1.\n" exit 1 @@ -24,6 +23,7 @@ __err() { __int() { printf "\n$_IMPORTANT Installation \e[91mCANCELLED\e[0m.\n" + exit 1 } trap '__err $LINENO' ERR From 32f12b19d8db95d2cbf9addb00a955ac41861ee2 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Tue, 20 Mar 2018 10:21:22 -0600 Subject: [PATCH 100/117] Move logs to var folder --- etc/rita.yaml | 2 +- new-install.sh | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/etc/rita.yaml b/etc/rita.yaml index cea6b660..a2fa7957 100644 --- a/etc/rita.yaml +++ b/etc/rita.yaml @@ -29,7 +29,7 @@ LogConfig: # LogPath is the path for Rita's logs. Make sure permissions are set accordingly. # Logs will only be written here if LogToFile is true - RitaLogPath: $HOME/.rita/logs + RitaLogPath: /var/lib/rita/logs LogToFile: true LogToDB: true diff --git a/new-install.sh b/new-install.sh index 243bc276..f880876a 100755 --- a/new-install.sh +++ b/new-install.sh @@ -369,7 +369,8 @@ __build_rita() { __install_rita() { $_ELEVATE mkdir -p "$_CONFIG_PATH" - $_ELEVATE mkdir -p "$_VAR_PATH" + #$_ELEVATE mkdir -p "$_VAR_PATH" + $_ELEVATE mkdir -p "$_VAR_PATH/logs" $_ELEVATE mv "$_RITA_SRC_DIR/rita" "$_BIN_PATH/rita" $_ELEVATE chown root:root "$_BIN_PATH/rita" From b57b0ce7906a5fe2c4f64e57f5fc7a64c22df75d Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Tue, 20 Mar 2018 10:25:21 -0600 Subject: [PATCH 101/117] Overwrite install script --- install.sh | 833 ++++++++++++++++++++++++++++++------------------- new-install.sh | 620 ------------------------------------ 2 files changed, 509 insertions(+), 944 deletions(-) delete mode 100755 new-install.sh diff --git a/install.sh b/install.sh index 6da29eea..f880876a 100755 --- a/install.sh +++ b/install.sh @@ -3,123 +3,404 @@ # RITA is brought to you by Active CounterMeasures. # activecountermeasures.com +# CONSTANTS _NAME=$(basename "${0}") _FAILED="\e[91mFAILED\e[0m" _SUCCESS="\e[92mSUCCESS\e[0m" +_ITEM="[-]" +_IMPORTANT="[!]" +_QUESTION="[?]" +_SUBITEM="\t$_ITEM" +_SUBIMPORTANT="\t$_IMPORTANT" +_SUBQUESTION="\t$_QUESTION" -#Error handling -#Kill 0 to kill subshells as well + +# ERROR HANDLING __err() { - printf "\n[!] Installation $_FAILED!\n" - kill 0 + printf "\n$_IMPORTANT Installation $_FAILED on line $1.\n" + exit 1 +} + +__int() { + printf "\n$_IMPORTANT Installation \e[91mCANCELLED\e[0m.\n" + exit 1 } -trap __err ERR INT + +trap '__err $LINENO' ERR +trap '__int' INT + set -o errexit set -o errtrace set -o pipefail -# Fix $HOME for users under standard sudo + +# PERMISSIONS GADGET +# The user must run the build process, but root must install +# software. In order to make sure the appropriate users +# take the right actions, we call sudo in the script itself. + +# Prevent user running sudo themselves if [ ! -z ${SUDO_USER+x} ]; then - HOME="$( getent passwd $SUDO_USER | cut -d: -f6 )" + printf "Please run the RITA installer without sudo.\n" + exit 1 +fi + +# Root is running the script without sudo +if [ "$EUID" = "0" ]; then + _ELEVATE="" +else + printf "$_IMPORTANT The RITA installer requires root privileges for some tasks. \n" + printf "$_IMPORTANT \"sudo\" will be used when necessary. \n" + _SUDO="$(type -fp sudo)" + if [ -z $_SUDO ]; then + printf "\"sudo\" was not found on the system. Please log in as root \n" + printf "before running the installer, or install \"sudo\". \n" + exit 1 + fi + $_SUDO -v + if [ $? -ne 0 ]; then + printf "The installer was unable to elevate privileges using \"sudo\". \n" + printf "Please make sure your account has \"sudo\" privileges. \n" + fi + # _ELEVATE is separate from _SUDO since environment variables may need + # to be passed + _ELEVATE="$_SUDO" fi -# Make sure to source the latest .bashrc -# Hack the PS1 variable to get around ubuntu .bashrc -_OLD_PS1=$PS1 -PS1=" " -# Hack the interactive flag to get around other .bashrc's -set -i -# Make sure weirdness doesn't happen with autocomplete/ etc -set -o posix +# ENTRYPOINT +__entry() { + _UNINSTALL_RITA=false + + # Optional Dependencies + _INSTALL_BRO=true + _INSTALL_MONGO=true -source $HOME/.bashrc + # Install locations + _INSTALL_PREFIX=/usr/local + _CONFIG_PATH=/etc/rita + _VAR_PATH=/var/lib/rita -# Clean up our hacks -set +o posix -set +i -PS1=$_OLD_PS1 -unset _OLD_PS1 + # FOR an OPT style installation + # NOTE: RITA itself must be changed to agree with the + # _CONFIG_PATH and _VAR_PATH + # _INSTALL_PREFIX=/opt/rita + # _CONFIG_PATH=/etc/opt/rita + # _VAR_PATH=/var/opt/rita -__help() { - __title + # Parse through command args + while [[ $# -gt 0 ]]; do + case $1 in + -h|--help) + # Display help and exit + __help + exit 0 + ;; + -u|--uninstall) + _UNINSTALL_RITA=true + _INSTALL_BRO=false + _INSTALL_MONGO=false + ;; + --disable-bro) + _INSTALL_BRO=false + ;; + --disable-mongo) + _INSTALL_MONGO=false + ;; + --prefix) + shift + _INSTALL_PREFIX="$1" + ;; + --version) + shift + _RITA_VERSION="$1" + ;; + *) + ;; + esac + shift + done - cat <] + _BIN_PATH="$_INSTALL_PREFIX/bin" + + if __installation_exist; then + printf "$_IMPORTANT RITA is already installed.\n" + printf "$_QUESTION Would you like to erase it and re-install? [y/N] " + read + if [[ $REPLY =~ ^[Yy]$ ]]; then + _UNINSTALL_RITA=true + _INSTALL_BRO=false + _INSTALL_MONGO=false + else + exit 0 + fi + fi -Options: - -h --help Show this help message. - -u --uninstall Remove RITA. + if [ "$_UNINSTALL_RITA" = "true" ]; then + __uninstall + else + __install + fi +} -HEREDOC +__installation_exist() { + [ -f "$_BIN_PATH/rita" -o -d "$_CONFIG_PATH" -o -d "$_VAR_PATH" ] } -__explain() { - cat <= 1.7. Check if the installed \n" + printf "$_IMPORTANT version of Go is up to date with 'go version'. If it is out of date \n" + printf "$_IMPORTANT you may remove the old version of Go and let this installer install \n" + printf "$_IMPORTANT a more recente version. \n" + sleep 10s + fi + + if [ "$_GO_INSTALLED" = "false" ]; then + __load "$_ITEM Installing Go" __install_go + else + printf "$_ITEM Go is already installed \n" + fi + + if [ "$_GO_IN_PATH" = "false" ]; then + __add_go_to_path + fi + + if [ "$_GOPATH_EXISTS" = "false" ]; then + __create_go_path + else + printf "$_SUBITEM Found GOPATH at $GOPATH \n" + # Add the bin folder of the $GOPATH + # It may already be in the path, but oh well, better to be safe than sorry + export PATH=$PATH:$GOPATH/bin + fi + + if [ $_INSTALL_MONGO = "true" ]; then + if [ $_MONGO_INSTALLED = "false" ]; then + __load "$_ITEM Installing MongoDB" __install_mongodb + else + printf "$_ITEM MongoDB is already installed \n" + fi + fi + + __load "$_ITEM Installing RITA" __build_rita && __install_rita + + printf "$_IMPORTANT To finish the installtion, reload the system profile and \n" + printf "$_IMPORTANT user profile with 'source /etc/profile' and 'source ~/.profile'. \n" + printf "$_IMPORTANT Additionally, you may want to configure Bro and run 'sudo broctl deploy'. \n" + printf "$_IMPORTANT Finally, start MongoDB with 'sudo systemctl start mongod'. You can \n" + printf "$_IMPORTANT access the MongoDB shell with 'mongo'. If, at any time, you need \n" + printf "$_IMPORTANT to stop MongoDB, run 'sudo systemctl stop mongod'. \n" + + __title + printf "Thank you for installing RITA! Happy hunting! \n" } -__title() { - echo \ -" - _ \ _ _| __ __| \\ - / | | _ \\ -_|_\ ___| _| _/ _\\ +__install_installer_deps() { + printf "$_ITEM In order to run the installer, several basic packages must be installed. \n" -Brought to you by Active CounterMeasures -" + # Update package cache + __load "$_SUBITEM Updating packages" __freshen_packages + for pkg in git curl make coreutils lsb-release; do + __load "$_SUBITEM Ensuring $pkg is installed" __install_packages $pkg + done } -__load() { - local pid=$! - local loadingText=$1 +__install_bro() { + case "$_OS" in + Ubuntu) + __add_deb_repo "deb http://download.opensuse.org/repositories/network:/bro/xUbuntu_$(lsb_release -rs)/ /" \ + "Bro" \ + "http://download.opensuse.org/repositories/network:bro/xUbuntu_$(lsb_release -rs)/Release.key" + ;; + CentOS) + __add_rpm_repo http://download.opensuse.org/repositories/network:bro/CentOS_7/network:bro.repo + ;; + esac + __install_packages bro broctl + $_ELEVATE chmod 2755 /opt/bro/logs + _BRO_PKG_INSTALLED=true + _BRO_PATH="/opt/bro/bin" +} + +__add_bro_to_path() { + printf "$_SUBQUESTION Would you like to add Bro IDS to the PATH? [Y/n] " + read + if [[ ! $REPLY =~ ^[Nn]$ ]]; then + printf "$_SUBIMPORTANT Adding Bro IDS to the path in $_BRO_PATH_SCRIPT \n" + echo "export PATH=\"\$PATH:$_BRO_PATH\"" | $_ELEVATE tee $_BRO_PATH_SCRIPT > /dev/null + _BRO_PATH_SCRIPT_INSTALLED=true + export PATH="$PATH:$_BRO_PATH" + _BRO_IN_PATH=true + fi +} - while kill -0 $pid 2>/dev/null; do - echo -ne "$loadingText.\r" - sleep 0.5 - echo -ne "$loadingText..\r" - sleep 0.5 - echo -ne "$loadingText...\r" - sleep 0.5 - echo -ne "\r\033[K" - echo -ne "$loadingText\r" - sleep 0.5 - done - wait $pid - echo -e "$loadingText... $_SUCCESS" +__install_go() { + curl -s -o /tmp/golang.tar.gz https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz + $_ELEVATE tar -zxf /tmp/golang.tar.gz -C /usr/local/ + rm /tmp/golang.tar.gz + _GO_INSTALLED_STD=true + _GO_INSTALLED=true + _GO_PATH="/usr/local/go/bin" } -__checkPermissions() { - [ `id -u` -eq 0 ] +__add_go_to_path() { + printf "$_SUBIMPORTANT Adding Go to the path in $_GO_PATH_SCRIPT \n" + echo "export PATH=\"\$PATH:$_GO_PATH\"" | $_ELEVATE tee $_GO_PATH_SCRIPT > /dev/null + _GO_PATH_SCRIPT_INSTALLED=true + export PATH="$PATH:$_GO_PATH" + _GO_IN_PATH=true } -__uninstall() { - if [ "$_INSTALL_PREFIX" != "/opt/rita" ]; then - # Too risky to delete files if we don't know where it was installed (e.g. could have installed to /) - printf "\t[!] Automatic uninstall from a non-standard location is not supported \n" +__create_go_path() { + printf "$_SUBIMPORTANT Go requires a per-user workspace (GOPATH) in order to build software. \n" + + printf "$_SUBQUESTION Select a GOPATH [$HOME/go]: " + read + if [ -n "$REPLY" ]; then + export GOPATH="$REPLY" else - printf "\t[!] Removing /opt/rita \n" - rm -rf /opt/rita + export GOPATH="$HOME/go" fi - printf "\t[!] Removing $_CONFIG_PATH \n" - rm -rf "$_CONFIG_PATH" + + printf "$_SUBIMPORTANT Creating a GOPATH at $GOPATH \n" + mkdir -p "$GOPATH/"{src,pkg,bin} + _GOPATH_EXISTS=true + + export PATH="$PATH:$GOPATH/bin" + + printf "$_SUBIMPORTANT Adding your GOPATH to $_GOPATH_PATH_SCRIPT \n" + echo "export GOPATH=\"$GOPATH\"" > "$_GOPATH_PATH_SCRIPT" + echo "export PATH=\"\$PATH:\$GOPATH\bin\"" >> "$_GOPATH_PATH_SCRIPT" + _GOPATH_PATH_SCRIPT_INSTALLED=true + + printf "$_SUBIMPORTANT Adding $_GOPATH_PATH_SCRIPT to $HOME/.profile \n" + echo "source \"$_GOPATH_PATH_SCRIPT\"" >> "$HOME/.profile" } -__setPkgMgr() { +__install_mongodb() { + case "$_OS" in + Ubuntu) + __add_deb_repo "deb [ arch=$(dpkg --print-architecture) ] http://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/3.4 multiverse" \ + "MongoDB" \ + "https://www.mongodb.org/static/pgp/server-3.4.asc" + ;; + CentOS) + if [ ! -s /etc/yum.repos.d/mongodb-org-3.4.repo ]; then + echo -e '[mongodb-org-3.4]\nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/3.4/x86_64/\ngpgcheck=1\nenabled=1\ngpgkey=https://www.mongodb.org/static/pgp/server-3.4.asc' > /etc/yum.repos.d/mongodb-org-3.4.repo + fi + ;; + esac + __install_packages mongodb-org + _MONGO_INSTALLED=true +} + +__build_rita() { + curl -L -s -o "$GOPATH/bin/dep" https://github.com/golang/dep/releases/download/v0.3.2/dep-linux-amd64 + chmod +x "$GOPATH/bin/dep" + + export _RITA_SRC_DIR="$GOPATH/src/github.com/activecm/rita" + mkdir -p "$_RITA_SRC_DIR" + + # Get the code from git since the build process is dependent on git + git clone http://github.com/activecm/rita "$_RITA_SRC_DIR" > /dev/null 2>&1 + + local old_dir="$PWD" + cd "$_RITA_SRC_DIR" + if [ -n "${_RITA_VERSION+x}" ]; then + git checkout $_RITA_VERSION > /dev/null 2>&1 + fi + make > /dev/null + cd "$old_dir" +} + +__install_rita() { + $_ELEVATE mkdir -p "$_CONFIG_PATH" + #$_ELEVATE mkdir -p "$_VAR_PATH" + $_ELEVATE mkdir -p "$_VAR_PATH/logs" + + $_ELEVATE mv "$_RITA_SRC_DIR/rita" "$_BIN_PATH/rita" + $_ELEVATE chown root:root "$_BIN_PATH/rita" + $_ELEVATE chmod 755 "$_BIN_PATH/rita" + + $_ELEVATE cp "$_RITA_SRC_DIR/LICENSE" "$_CONFIG_PATH/LICENSE" + $_ELEVATE cp "$_RITA_SRC_DIR/etc/rita.yaml" "$_CONFIG_PATH/config.yaml" + $_ELEVATE cp "$_RITA_SRC_DIR/etc/tables.yaml" "$_CONFIG_PATH/tables.yaml" + + # All users can read and write rita's config file + $_ELEVATE chmod 755 "$_CONFIG_PATH" + $_ELEVATE chmod 666 "$_CONFIG_PATH/config.yaml" + + $_ELEVATE touch "$_VAR_PATH/safebrowsing" + $_ELEVATE chmod 755 "$_VAR_PATH" + $_ELEVATE chmod 666 "$_VAR_PATH/safebrowsing" +} + +# INFORMATION GATHERING + +__gather_OS() { + _OS="$(lsb_release -is)" + if [ "$_OS" != "Ubuntu" -a "$_OS" != "CentOS" ]; then + printf "$_ITEM This installer supports Ubuntu and CentOS. \n" + printf "$_IMPORTANT Your operating system is unsupported." + exit 1 + fi +} + +__gather_pkg_mgr() { # _PKG_MGR = 1: APT: Ubuntu 14.04, 16.04 and Security Onion (Debian) # _PKG_MGR = 2: YUM: CentOS (Old RHEL Derivatives) # _PKG_MGR = 3: Unsupported @@ -133,19 +414,157 @@ __setPkgMgr() { _PKG_INSTALL="yum -y -q install" fi if [ $_PKG_MGR -eq 3 ]; then - echo "Unsupported package manager" - __err + printf "$_ITEM This installer supports package management via apt and yum. \n" + printf "$_IMPORTANT A supported package manager was not found. \n" + exit 1 fi } -__setOS() { - _OS="$(lsb_release -is)" - if [ "$_OS" != "Ubuntu" -a "$_OS" != "CentOS" ]; then - echo "Unsupported operating system" - __err +__gather_go() { + _GO_PATH="" + _GO_INSTALLED_STD=false + if [ -f "/usr/local/go/bin/go" ]; then + _GO_INSTALLED_STD=true + _GO_PATH="/usr/local/go/bin" + fi + + _GO_INSTALLED_NON_STD=false + if [ -n "$GOROOT" -a -f "$GOROOT/bin/go" ]; then + _GO_INSTALLED_NON_STD=true + _GO_PATH="$GOROOT/bin" + fi + + _GO_INSTALLED=false + if [ $_GO_INSTALLED_STD = "true" -o $_GO_INSTALLED_NON_STD = "true" ]; then + _GO_INSTALLED=true fi + + _GO_OUT_OF_DATE=false + if [ $_GO_INSTALLED = "true" ]; then + case `$_GO_PATH/go version | awk '{print $3}'` in + go1|go1.2*|go1.3*|go1.4*|go1.5*|go1.6*|"") + _GO_OUT_OF_DATE=true + ;; + esac + fi + + _GO_PATH_SCRIPT="/etc/profile.d/go-path.sh" + _GO_PATH_SCRIPT_INSTALLED=false + + if [ -f "$_GO_PATH_SCRIPT" ]; then + source "$_GO_PATH_SCRIPT" + _GO_PATH_SCRIPT_INSTALLED=true + fi + + _GO_IN_PATH=false + if [ -n "$(type -fp go)" ]; then + _GO_IN_PATH=true + fi + + _GOPATH_PATH_SCRIPT="$HOME/.gopath-path.sh" + _GOPATH_PATH_SCRIPT_INSTALLED=false + + if [ -f "$_GOPATH_PATH_SCRIPT" ]; then + source "$_GOPATH_PATH_SCRIPT" + _GOPATH_PATH_SCRIPT_INSTALLED=true + fi + + _GOPATH_EXISTS=false + if [ -n "$GOPATH" ]; then + _GOPATH_EXISTS=true + fi + +} + +__gather_bro() { + _BRO_PATH="" + _BRO_PKG_INSTALLED=false + if __package_installed bro; then + _BRO_PKG_INSTALLED=true + _BRO_PATH="/opt/bro/bin" + fi + + _BRO_ONION_INSTALLED=false + if __package_installed securityonion-bro; then + _BRO_ONION_INSTALLED=true + _BRO_PATH="/opt/bro/bin" + fi + + _BRO_SOURCE_INSTALLED=false + if [ -f "/usr/local/bro/bin/bro" ]; then + _BRO_SOURCE_INSTALLED=true + _BRO_PATH="/usr/local/bro/bin" + fi + + _BRO_INSTALLED=false + if [ $_BRO_PKG_INSTALLED = "true" -o $_BRO_ONION_INSTALLED = "true" -o $_BRO_SOURCE_INSTALLED = "true" ]; then + _BRO_INSTALLED=true + fi + + _BRO_PATH_SCRIPT="/etc/profile.d/bro-path.sh" + _BRO_PATH_SCRIPT_INSTALLED=false + + if [ -f "$_BRO_PATH_SCRIPT" ]; then + source "$_BRO_PATH_SCRIPT" + _BRO_PATH_SCRIPT_INSTALLED=true + fi + + _BRO_IN_PATH=false + if [ -n "$(type -pf bro)" ]; then + _BRO_IN_PATH=true + fi +} + +__gather_mongo() { + _MONGO_INSTALLED=false + if __package_installed mongodb-org; then + _MONGO_INSTALLED=true + fi +} + +# USER EXPERIENCE + +__explain() { + printf "$_ITEM This installer will: \n" + if [ $_BRO_INSTALLED = "false" -a $_INSTALL_BRO = "true" ]; then + printf "$_SUBITEM Install Bro IDS to /opt/bro \n" + fi + if [ $_GO_INSTALLED = "false" ]; then + printf "$_SUBITEM Install Go to /usr/local/go \n" + fi + if [ $_GOPATH_EXISTS = "false" ]; then + printf "$_SUBITEM Create a Go build environment (GOPATH) in $HOME/go \n" + fi + if [ $_MONGO_INSTALLED = "false" -a $_INSTALL_MONGO = "true" ]; then + printf "$_SUBITEM Install MongoDB \n" + fi + printf "$_SUBITEM Install RITA to $_BIN_PATH/rita \n" + printf "$_SUBITEM Create a runtime directory for RITA in $_VAR_PATH \n" + printf "$_SUBITEM Create a configuration directory for RITA in $_CONFIG_PATH \n" + sleep 5s } +__title() { + echo \ +" + _ \ _ _| __ __| \\ + / | | _ \\ +_|_\ ___| _| _/ _\\ + +Brought to you by Active CounterMeasures +" +} + +__load() { + local loadingText=$1 + printf "$loadingText...\r" + shift + eval "$@" + echo -ne "\r\033[K" + printf "$loadingText... $_SUCCESS\n" +} + +# PACKAGE MANAGEMENT __install_packages() { while [ ! -z "$1" ]; do local pkg="$1" @@ -161,16 +580,16 @@ __install_packages() { ;; esac fi - eval $_PKG_INSTALL $pkg >/dev/null 2>&1 + eval $_ELEVATE $_PKG_INSTALL $pkg >/dev/null 2>&1 shift done } __freshen_packages() { if [ $_PKG_MGR -eq 1 ]; then #apt - apt-get -qq update > /dev/null 2>&1 + $_ELEVATE apt-get -qq update > /dev/null 2>&1 elif [ $_PKG_MGR -eq 2 ]; then #yum - yum -q makecache > /dev/null 2>&1 + $_ELEVATE yum -q makecache > /dev/null 2>&1 fi } @@ -186,250 +605,16 @@ __package_installed() { __add_deb_repo() { if [ ! -s "/etc/apt/sources.list.d/$2.list" ]; then if [ ! -z "$3" ]; then - curl -s -L "$3" | apt-key add - > /dev/null 2>&1 + curl -s -L "$3" | $_ELEVATE apt-key add - > /dev/null 2>&1 fi - echo "$1" > "/etc/apt/sources.list.d/$2.list" + echo "$1" | $_ELEVATE tee "/etc/apt/sources.list.d/$2.list" > /dev/null __freshen_packages fi } __add_rpm_repo() { - yum-config-manager -q --add-repo=$1 > /dev/null 2>&1 -} - -__check_go_version() { - case `go version | awk '{print $3}'` in - go1|go1.2*|go1.3*|go1.4*|go1.5*|go1.6*|"") - echo -e "\e[93m\t[!] WARNING: Go has been detected on this system.\e[0m -\tIf you installed Go with apt, make sure your Go installation is up -\tto date with 'go version'. RITA has only been tested with golang -\t1.7 and 1.8 which are currently not the versions in the Ubuntu -\tapt repositories. You may remove the old version with -\t'sudo apt remove golang' and let this script install the correct -\tversion for you! -" - sleep 10s - ;; - esac -} - -__install_go() { - # Check if go isn't available in the path - printf "[+] Checking if Go is installed...\n" - if [ ! $(command -v go) ]; then - if [ ! -x "/usr/local/go/bin/go" ]; then - ( - curl -s -o /tmp/golang.tar.gz https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz - tar -zxf /tmp/golang.tar.gz -C /usr/local/ - rm /tmp/golang.tar.gz - ) & __load "\t[+] Installing Go" - fi - printf "\t[+] Adding Go to the PATH...\n" - export PATH="$PATH:/usr/local/go/bin" - echo 'export PATH=$PATH:/usr/local/go/bin' >> $HOME/.bashrc - else - printf "\t[+] Go is installed...\n" - fi - - # Check if the GOPATH isn't set - if [ -z ${GOPATH+x} ]; then - ( # Set up the GOPATH - mkdir -p $_INSTALL_PREFIX/{src,pkg,bin} - #echo "export GOPATH=$_INSTALL_PREFIX" >> $HOME/.bashrc - echo "export PATH=\$PATH:$_INSTALL_PREFIX/bin" >> $HOME/.bashrc - ) & __load "\t[+] Configuring Go dev environment" - export GOPATH=$_INSTALL_PREFIX - export PATH=$PATH:$_INSTALL_PREFIX/bin - fi -} - -__install_bro() { - ( - # security onion packages bro on their own - if ! __package_installed bro && ! __package_installed securityonion-bro; then - case "$_OS" in - Ubuntu) - __add_deb_repo "deb http://download.opensuse.org/repositories/network:/bro/xUbuntu_$(lsb_release -rs)/ /" \ - "Bro" \ - "http://download.opensuse.org/repositories/network:bro/xUbuntu_$(lsb_release -rs)/Release.key" - ;; - CentOS) - __add_rpm_repo http://download.opensuse.org/repositories/network:bro/CentOS_7/network:bro.repo - ;; - esac - __install_packages bro broctl - fi - ) & __load "[+] Ensuring Bro IDS is installed" - - if [ ! $(command -v bro) ]; then - printf "\t[+] Adding Bro to the PATH...\n" - echo 'export PATH=$PATH:/opt/bro/bin' >> $HOME/.bashrc - PATH=$PATH:/opt/bro/bin - fi - chmod 2755 /opt/bro/logs -} - -__install_mongodb() { - case "$_OS" in - Ubuntu) - __add_deb_repo "deb [ arch=$(dpkg --print-architecture) ] http://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/3.4 multiverse" \ - "MongoDB" \ - "https://www.mongodb.org/static/pgp/server-3.4.asc" - ;; - CentOS) - if [ ! -s /etc/yum.repos.d/mongodb-org-3.4.repo ]; then - echo -e '[mongodb-org-3.4]\nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/3.4/x86_64/\ngpgcheck=1\nenabled=1\ngpgkey=https://www.mongodb.org/static/pgp/server-3.4.asc' > /etc/yum.repos.d/mongodb-org-3.4.repo - fi - ;; - esac - __install_packages mongodb-org -} - -__install() { - - # Check if RITA is already installed, if so ask if this is a re-install - if [ ! -z $(command -v rita) ] \ - || [ -d /opt/rita ] \ - || [ -d $_CONFIG_PATH ] - then - printf "[+] RITA is already installed.\n" - read -p "[-] Would you like to erase it and re-install? [y/n] " -r - if [[ $REPLY =~ ^[Yy]$ ]] - then - __uninstall - echo "" - else - exit 1 - fi - fi - - # Explain the scripts actions - __explain - - # Figure out which package manager to use - __setPkgMgr - - # Update package sources - __freshen_packages - - # Install "the basics" - __install_packages git wget curl make coreutils realpath lsb-release & \ - __load "[+] Ensuring git, wget, curl, make, coreutils, and lsb-release are installed" - - # Determine the OS, needs lsb-release - __setOS - - if [[ "${_INSTALL_BRO}" = "true" ]] - then - __install_bro - fi - - __install_go - __check_go_version - - if [[ "${_INSTALL_MONGO}" = "true" ]] - then - __install_mongodb & __load "[+] Installing MongoDB" - fi - - ( # Build RITA - # Ensure go dep is installed - wget -q -O $GOPATH/bin/dep https://github.com/golang/dep/releases/download/v0.3.2/dep-linux-amd64 - chmod +x $GOPATH/bin/dep - - mkdir -p $GOPATH/src/github.com/activecm/rita - # Get the install script's directory in case it's run from elsewhere - cp -R "$(dirname "$(realpath ${0})")/." $GOPATH/src/github.com/activecm/rita/ - cd $GOPATH/src/github.com/activecm/rita - make install > /dev/null - # Allow any user to execute rita - chmod 755 $GOPATH/bin/rita - ) & __load "[+] Installing RITA" - - - ( # Install the base configuration files - mkdir -p $_CONFIG_PATH - cd $GOPATH/src/github.com/activecm/rita - cp ./LICENSE $_CONFIG_PATH/LICENSE - cp ./etc/rita.yaml $_CONFIG_PATH/config.yaml - cp ./etc/tables.yaml $_CONFIG_PATH/tables.yaml - touch $_CONFIG_PATH/safebrowsing - chmod 755 $_CONFIG_PATH - # All users can read and write rita's config file - chmod 666 $_CONFIG_PATH/config.yaml - chmod 666 $_CONFIG_PATH/safebrowsing - ) & __load "[+] Installing config files to $_CONFIG_PATH" - - echo -e " -In order to finish the installation, reload your bash config -with 'source ~/.bashrc'. Make sure to configure Bro and run -'sudo broctl deploy'. Also, make sure to start the MongoDB -service with 'sudo service mongod start'. You can access -the MongoDB shell with 'mongo'. If, at any time, you need -to stop MongoDB, run 'sudo service mongod stop'." - - __title - printf "Thank you for installing RITA! " - printf "Happy hunting\n" -} - -# start point for installer -__entry() { - _INSTALL_BRO=true - _INSTALL_MONGO=true - _INSTALL_PREFIX=/opt/rita - _CONFIG_PATH=/etc/rita - _INSTALL_RITA=true - _UNINSTALL_RITA=false - - # Parse through command args - while [[ $# -gt 0 ]]; do - case $1 in - -h|--help) - # Display help and exit - __help - exit 0 - ;; - -u|--uninstall) - _UNINSTALL_RITA=true - _INSTALL_RITA=false - _INSTALL_BRO=false - _INSTALL_MONGO=false - ;; - --disable-bro) - _INSTALL_BRO=false - ;; - --disable-mongo) - _INSTALL_MONGO=false - ;; - --prefix) - shift - _INSTALL_PREFIX="$1" - ;; - *) - ;; - esac - shift - done - - # Check to see if the user has permission to install RITA - if __checkPermissions - then - if [[ "${_UNINSTALL_RITA}" = "true" ]] - then - __uninstall - exit 0 - fi - if [[ "${_INSTALL_RITA}" = "true" ]] - then - __install - exit 0 - fi - else - printf "You do NOT have permission install RITA\n\n" - fi - - + $_ELEVATE yum-config-manager -q --add-repo=$1 > /dev/null 2>&1 } +# ENTRYPOINT CALL __entry "${@:-}" diff --git a/new-install.sh b/new-install.sh deleted file mode 100755 index f880876a..00000000 --- a/new-install.sh +++ /dev/null @@ -1,620 +0,0 @@ -#!/usr/bin/env bash -# -# RITA is brought to you by Active CounterMeasures. -# activecountermeasures.com - -# CONSTANTS -_NAME=$(basename "${0}") -_FAILED="\e[91mFAILED\e[0m" -_SUCCESS="\e[92mSUCCESS\e[0m" -_ITEM="[-]" -_IMPORTANT="[!]" -_QUESTION="[?]" -_SUBITEM="\t$_ITEM" -_SUBIMPORTANT="\t$_IMPORTANT" -_SUBQUESTION="\t$_QUESTION" - - -# ERROR HANDLING -__err() { - printf "\n$_IMPORTANT Installation $_FAILED on line $1.\n" - exit 1 -} - -__int() { - printf "\n$_IMPORTANT Installation \e[91mCANCELLED\e[0m.\n" - exit 1 -} - -trap '__err $LINENO' ERR -trap '__int' INT - -set -o errexit -set -o errtrace -set -o pipefail - - -# PERMISSIONS GADGET -# The user must run the build process, but root must install -# software. In order to make sure the appropriate users -# take the right actions, we call sudo in the script itself. - -# Prevent user running sudo themselves -if [ ! -z ${SUDO_USER+x} ]; then - printf "Please run the RITA installer without sudo.\n" - exit 1 -fi - -# Root is running the script without sudo -if [ "$EUID" = "0" ]; then - _ELEVATE="" -else - printf "$_IMPORTANT The RITA installer requires root privileges for some tasks. \n" - printf "$_IMPORTANT \"sudo\" will be used when necessary. \n" - _SUDO="$(type -fp sudo)" - if [ -z $_SUDO ]; then - printf "\"sudo\" was not found on the system. Please log in as root \n" - printf "before running the installer, or install \"sudo\". \n" - exit 1 - fi - $_SUDO -v - if [ $? -ne 0 ]; then - printf "The installer was unable to elevate privileges using \"sudo\". \n" - printf "Please make sure your account has \"sudo\" privileges. \n" - fi - # _ELEVATE is separate from _SUDO since environment variables may need - # to be passed - _ELEVATE="$_SUDO" -fi - -# ENTRYPOINT -__entry() { - _UNINSTALL_RITA=false - - # Optional Dependencies - _INSTALL_BRO=true - _INSTALL_MONGO=true - - # Install locations - _INSTALL_PREFIX=/usr/local - _CONFIG_PATH=/etc/rita - _VAR_PATH=/var/lib/rita - - # FOR an OPT style installation - # NOTE: RITA itself must be changed to agree with the - # _CONFIG_PATH and _VAR_PATH - # _INSTALL_PREFIX=/opt/rita - # _CONFIG_PATH=/etc/opt/rita - # _VAR_PATH=/var/opt/rita - - # Parse through command args - while [[ $# -gt 0 ]]; do - case $1 in - -h|--help) - # Display help and exit - __help - exit 0 - ;; - -u|--uninstall) - _UNINSTALL_RITA=true - _INSTALL_BRO=false - _INSTALL_MONGO=false - ;; - --disable-bro) - _INSTALL_BRO=false - ;; - --disable-mongo) - _INSTALL_MONGO=false - ;; - --prefix) - shift - _INSTALL_PREFIX="$1" - ;; - --version) - shift - _RITA_VERSION="$1" - ;; - *) - ;; - esac - shift - done - - _BIN_PATH="$_INSTALL_PREFIX/bin" - - if __installation_exist; then - printf "$_IMPORTANT RITA is already installed.\n" - printf "$_QUESTION Would you like to erase it and re-install? [y/N] " - read - if [[ $REPLY =~ ^[Yy]$ ]]; then - _UNINSTALL_RITA=true - _INSTALL_BRO=false - _INSTALL_MONGO=false - else - exit 0 - fi - fi - - if [ "$_UNINSTALL_RITA" = "true" ]; then - __uninstall - else - __install - fi -} - -__installation_exist() { - [ -f "$_BIN_PATH/rita" -o -d "$_CONFIG_PATH" -o -d "$_VAR_PATH" ] -} - -__uninstall() { - printf "$_IMPORTANT Uninstalling RITA..." - if [ -f "$_BIN_PATH/rita"]; then - printf "$_SUBITEM Removing $_BIN_PATH/rita \n" - $_ELEVATE rm "$_BIN_PATH/rita" - else - printf "$_SUBIMPORTANT $_BIN_PATH/rita not found! \n" - fi - if [ -d "$_CONFIG_PATH" ]; then - printf "$_SUBITEM Removing $_CONFIG_PATH \n" - $_ELEVATE rm -rf "$_CONFIG_PATH" - else - printf "$_SUBIMPORTANT $_CONFIG_PATH not found! \n" - fi - if [ -d "$_VAR_PATH" ]; then - printf "$_SUBITEM Removing $_VAR_PATH \n" - $_ELEVATE rm -rf "$_VAR_PATH" - else - printf "$_SUBIMPORTANT $_VAR_PATH not found! \n" - fi - printf "$_IMPORTANT You may wish to uninstall Go, MongoDB, and Bro IDS if they were installed. \n" -} - -__install() { - __title - # Gather enough information to download installer dependencies - __gather_pkg_mgr - - # Install installer dependencies - __install_installer_deps - - # Get system information - __gather_OS - __gather_bro - __gather_go - __gather_mongo - - # Explain the installer's actions - __explain - - if [ "$_INSTALL_BRO" = "true" ]; then - if [ "$_BRO_INSTALLED" = "false" ]; then - __load "$_ITEM Installing Bro IDS" __install_bro - else - printf "$_ITEM Bro IDS is already installed \n" - fi - - if [ "$_BRO_IN_PATH" = "false" ]; then - __add_bro_to_path - fi - fi - - # Always install Go - if [ "$_GO_OUT_OF_DATE" = "true" ]; then - printf "$_IMPORTANT WARNING: An old version of Go has been detected on this system. \n" - printf "$_IMPORTANT RITA has only been tested with Go >= 1.7. Check if the installed \n" - printf "$_IMPORTANT version of Go is up to date with 'go version'. If it is out of date \n" - printf "$_IMPORTANT you may remove the old version of Go and let this installer install \n" - printf "$_IMPORTANT a more recente version. \n" - sleep 10s - fi - - if [ "$_GO_INSTALLED" = "false" ]; then - __load "$_ITEM Installing Go" __install_go - else - printf "$_ITEM Go is already installed \n" - fi - - if [ "$_GO_IN_PATH" = "false" ]; then - __add_go_to_path - fi - - if [ "$_GOPATH_EXISTS" = "false" ]; then - __create_go_path - else - printf "$_SUBITEM Found GOPATH at $GOPATH \n" - # Add the bin folder of the $GOPATH - # It may already be in the path, but oh well, better to be safe than sorry - export PATH=$PATH:$GOPATH/bin - fi - - if [ $_INSTALL_MONGO = "true" ]; then - if [ $_MONGO_INSTALLED = "false" ]; then - __load "$_ITEM Installing MongoDB" __install_mongodb - else - printf "$_ITEM MongoDB is already installed \n" - fi - fi - - __load "$_ITEM Installing RITA" __build_rita && __install_rita - - printf "$_IMPORTANT To finish the installtion, reload the system profile and \n" - printf "$_IMPORTANT user profile with 'source /etc/profile' and 'source ~/.profile'. \n" - printf "$_IMPORTANT Additionally, you may want to configure Bro and run 'sudo broctl deploy'. \n" - printf "$_IMPORTANT Finally, start MongoDB with 'sudo systemctl start mongod'. You can \n" - printf "$_IMPORTANT access the MongoDB shell with 'mongo'. If, at any time, you need \n" - printf "$_IMPORTANT to stop MongoDB, run 'sudo systemctl stop mongod'. \n" - - __title - printf "Thank you for installing RITA! Happy hunting! \n" -} - -__install_installer_deps() { - printf "$_ITEM In order to run the installer, several basic packages must be installed. \n" - - # Update package cache - __load "$_SUBITEM Updating packages" __freshen_packages - - for pkg in git curl make coreutils lsb-release; do - __load "$_SUBITEM Ensuring $pkg is installed" __install_packages $pkg - done -} - -__install_bro() { - case "$_OS" in - Ubuntu) - __add_deb_repo "deb http://download.opensuse.org/repositories/network:/bro/xUbuntu_$(lsb_release -rs)/ /" \ - "Bro" \ - "http://download.opensuse.org/repositories/network:bro/xUbuntu_$(lsb_release -rs)/Release.key" - ;; - CentOS) - __add_rpm_repo http://download.opensuse.org/repositories/network:bro/CentOS_7/network:bro.repo - ;; - esac - __install_packages bro broctl - $_ELEVATE chmod 2755 /opt/bro/logs - _BRO_PKG_INSTALLED=true - _BRO_PATH="/opt/bro/bin" -} - -__add_bro_to_path() { - printf "$_SUBQUESTION Would you like to add Bro IDS to the PATH? [Y/n] " - read - if [[ ! $REPLY =~ ^[Nn]$ ]]; then - printf "$_SUBIMPORTANT Adding Bro IDS to the path in $_BRO_PATH_SCRIPT \n" - echo "export PATH=\"\$PATH:$_BRO_PATH\"" | $_ELEVATE tee $_BRO_PATH_SCRIPT > /dev/null - _BRO_PATH_SCRIPT_INSTALLED=true - export PATH="$PATH:$_BRO_PATH" - _BRO_IN_PATH=true - fi -} - -__install_go() { - curl -s -o /tmp/golang.tar.gz https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz - $_ELEVATE tar -zxf /tmp/golang.tar.gz -C /usr/local/ - rm /tmp/golang.tar.gz - _GO_INSTALLED_STD=true - _GO_INSTALLED=true - _GO_PATH="/usr/local/go/bin" -} - -__add_go_to_path() { - printf "$_SUBIMPORTANT Adding Go to the path in $_GO_PATH_SCRIPT \n" - echo "export PATH=\"\$PATH:$_GO_PATH\"" | $_ELEVATE tee $_GO_PATH_SCRIPT > /dev/null - _GO_PATH_SCRIPT_INSTALLED=true - export PATH="$PATH:$_GO_PATH" - _GO_IN_PATH=true -} - -__create_go_path() { - printf "$_SUBIMPORTANT Go requires a per-user workspace (GOPATH) in order to build software. \n" - - printf "$_SUBQUESTION Select a GOPATH [$HOME/go]: " - read - if [ -n "$REPLY" ]; then - export GOPATH="$REPLY" - else - export GOPATH="$HOME/go" - fi - - printf "$_SUBIMPORTANT Creating a GOPATH at $GOPATH \n" - mkdir -p "$GOPATH/"{src,pkg,bin} - _GOPATH_EXISTS=true - - export PATH="$PATH:$GOPATH/bin" - - printf "$_SUBIMPORTANT Adding your GOPATH to $_GOPATH_PATH_SCRIPT \n" - echo "export GOPATH=\"$GOPATH\"" > "$_GOPATH_PATH_SCRIPT" - echo "export PATH=\"\$PATH:\$GOPATH\bin\"" >> "$_GOPATH_PATH_SCRIPT" - _GOPATH_PATH_SCRIPT_INSTALLED=true - - printf "$_SUBIMPORTANT Adding $_GOPATH_PATH_SCRIPT to $HOME/.profile \n" - echo "source \"$_GOPATH_PATH_SCRIPT\"" >> "$HOME/.profile" -} - -__install_mongodb() { - case "$_OS" in - Ubuntu) - __add_deb_repo "deb [ arch=$(dpkg --print-architecture) ] http://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/3.4 multiverse" \ - "MongoDB" \ - "https://www.mongodb.org/static/pgp/server-3.4.asc" - ;; - CentOS) - if [ ! -s /etc/yum.repos.d/mongodb-org-3.4.repo ]; then - echo -e '[mongodb-org-3.4]\nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/3.4/x86_64/\ngpgcheck=1\nenabled=1\ngpgkey=https://www.mongodb.org/static/pgp/server-3.4.asc' > /etc/yum.repos.d/mongodb-org-3.4.repo - fi - ;; - esac - __install_packages mongodb-org - _MONGO_INSTALLED=true -} - -__build_rita() { - curl -L -s -o "$GOPATH/bin/dep" https://github.com/golang/dep/releases/download/v0.3.2/dep-linux-amd64 - chmod +x "$GOPATH/bin/dep" - - export _RITA_SRC_DIR="$GOPATH/src/github.com/activecm/rita" - mkdir -p "$_RITA_SRC_DIR" - - # Get the code from git since the build process is dependent on git - git clone http://github.com/activecm/rita "$_RITA_SRC_DIR" > /dev/null 2>&1 - - local old_dir="$PWD" - cd "$_RITA_SRC_DIR" - if [ -n "${_RITA_VERSION+x}" ]; then - git checkout $_RITA_VERSION > /dev/null 2>&1 - fi - make > /dev/null - cd "$old_dir" -} - -__install_rita() { - $_ELEVATE mkdir -p "$_CONFIG_PATH" - #$_ELEVATE mkdir -p "$_VAR_PATH" - $_ELEVATE mkdir -p "$_VAR_PATH/logs" - - $_ELEVATE mv "$_RITA_SRC_DIR/rita" "$_BIN_PATH/rita" - $_ELEVATE chown root:root "$_BIN_PATH/rita" - $_ELEVATE chmod 755 "$_BIN_PATH/rita" - - $_ELEVATE cp "$_RITA_SRC_DIR/LICENSE" "$_CONFIG_PATH/LICENSE" - $_ELEVATE cp "$_RITA_SRC_DIR/etc/rita.yaml" "$_CONFIG_PATH/config.yaml" - $_ELEVATE cp "$_RITA_SRC_DIR/etc/tables.yaml" "$_CONFIG_PATH/tables.yaml" - - # All users can read and write rita's config file - $_ELEVATE chmod 755 "$_CONFIG_PATH" - $_ELEVATE chmod 666 "$_CONFIG_PATH/config.yaml" - - $_ELEVATE touch "$_VAR_PATH/safebrowsing" - $_ELEVATE chmod 755 "$_VAR_PATH" - $_ELEVATE chmod 666 "$_VAR_PATH/safebrowsing" -} - -# INFORMATION GATHERING - -__gather_OS() { - _OS="$(lsb_release -is)" - if [ "$_OS" != "Ubuntu" -a "$_OS" != "CentOS" ]; then - printf "$_ITEM This installer supports Ubuntu and CentOS. \n" - printf "$_IMPORTANT Your operating system is unsupported." - exit 1 - fi -} - -__gather_pkg_mgr() { - # _PKG_MGR = 1: APT: Ubuntu 14.04, 16.04 and Security Onion (Debian) - # _PKG_MGR = 2: YUM: CentOS (Old RHEL Derivatives) - # _PKG_MGR = 3: Unsupported - _PKG_MGR=3 - _PKG_INSTALL="" - if [ -x /usr/bin/apt-get ]; then - _PKG_MGR=1 - _PKG_INSTALL="apt-get -qq install -y" - elif [ -x /usr/bin/yum ]; then - _PKG_MGR=2 - _PKG_INSTALL="yum -y -q install" - fi - if [ $_PKG_MGR -eq 3 ]; then - printf "$_ITEM This installer supports package management via apt and yum. \n" - printf "$_IMPORTANT A supported package manager was not found. \n" - exit 1 - fi -} - -__gather_go() { - _GO_PATH="" - _GO_INSTALLED_STD=false - if [ -f "/usr/local/go/bin/go" ]; then - _GO_INSTALLED_STD=true - _GO_PATH="/usr/local/go/bin" - fi - - _GO_INSTALLED_NON_STD=false - if [ -n "$GOROOT" -a -f "$GOROOT/bin/go" ]; then - _GO_INSTALLED_NON_STD=true - _GO_PATH="$GOROOT/bin" - fi - - _GO_INSTALLED=false - if [ $_GO_INSTALLED_STD = "true" -o $_GO_INSTALLED_NON_STD = "true" ]; then - _GO_INSTALLED=true - fi - - _GO_OUT_OF_DATE=false - if [ $_GO_INSTALLED = "true" ]; then - case `$_GO_PATH/go version | awk '{print $3}'` in - go1|go1.2*|go1.3*|go1.4*|go1.5*|go1.6*|"") - _GO_OUT_OF_DATE=true - ;; - esac - fi - - _GO_PATH_SCRIPT="/etc/profile.d/go-path.sh" - _GO_PATH_SCRIPT_INSTALLED=false - - if [ -f "$_GO_PATH_SCRIPT" ]; then - source "$_GO_PATH_SCRIPT" - _GO_PATH_SCRIPT_INSTALLED=true - fi - - _GO_IN_PATH=false - if [ -n "$(type -fp go)" ]; then - _GO_IN_PATH=true - fi - - _GOPATH_PATH_SCRIPT="$HOME/.gopath-path.sh" - _GOPATH_PATH_SCRIPT_INSTALLED=false - - if [ -f "$_GOPATH_PATH_SCRIPT" ]; then - source "$_GOPATH_PATH_SCRIPT" - _GOPATH_PATH_SCRIPT_INSTALLED=true - fi - - _GOPATH_EXISTS=false - if [ -n "$GOPATH" ]; then - _GOPATH_EXISTS=true - fi - -} - -__gather_bro() { - _BRO_PATH="" - _BRO_PKG_INSTALLED=false - if __package_installed bro; then - _BRO_PKG_INSTALLED=true - _BRO_PATH="/opt/bro/bin" - fi - - _BRO_ONION_INSTALLED=false - if __package_installed securityonion-bro; then - _BRO_ONION_INSTALLED=true - _BRO_PATH="/opt/bro/bin" - fi - - _BRO_SOURCE_INSTALLED=false - if [ -f "/usr/local/bro/bin/bro" ]; then - _BRO_SOURCE_INSTALLED=true - _BRO_PATH="/usr/local/bro/bin" - fi - - _BRO_INSTALLED=false - if [ $_BRO_PKG_INSTALLED = "true" -o $_BRO_ONION_INSTALLED = "true" -o $_BRO_SOURCE_INSTALLED = "true" ]; then - _BRO_INSTALLED=true - fi - - _BRO_PATH_SCRIPT="/etc/profile.d/bro-path.sh" - _BRO_PATH_SCRIPT_INSTALLED=false - - if [ -f "$_BRO_PATH_SCRIPT" ]; then - source "$_BRO_PATH_SCRIPT" - _BRO_PATH_SCRIPT_INSTALLED=true - fi - - _BRO_IN_PATH=false - if [ -n "$(type -pf bro)" ]; then - _BRO_IN_PATH=true - fi -} - -__gather_mongo() { - _MONGO_INSTALLED=false - if __package_installed mongodb-org; then - _MONGO_INSTALLED=true - fi -} - -# USER EXPERIENCE - -__explain() { - printf "$_ITEM This installer will: \n" - if [ $_BRO_INSTALLED = "false" -a $_INSTALL_BRO = "true" ]; then - printf "$_SUBITEM Install Bro IDS to /opt/bro \n" - fi - if [ $_GO_INSTALLED = "false" ]; then - printf "$_SUBITEM Install Go to /usr/local/go \n" - fi - if [ $_GOPATH_EXISTS = "false" ]; then - printf "$_SUBITEM Create a Go build environment (GOPATH) in $HOME/go \n" - fi - if [ $_MONGO_INSTALLED = "false" -a $_INSTALL_MONGO = "true" ]; then - printf "$_SUBITEM Install MongoDB \n" - fi - printf "$_SUBITEM Install RITA to $_BIN_PATH/rita \n" - printf "$_SUBITEM Create a runtime directory for RITA in $_VAR_PATH \n" - printf "$_SUBITEM Create a configuration directory for RITA in $_CONFIG_PATH \n" - sleep 5s -} - -__title() { - echo \ -" - _ \ _ _| __ __| \\ - / | | _ \\ -_|_\ ___| _| _/ _\\ - -Brought to you by Active CounterMeasures -" -} - -__load() { - local loadingText=$1 - printf "$loadingText...\r" - shift - eval "$@" - echo -ne "\r\033[K" - printf "$loadingText... $_SUCCESS\n" -} - -# PACKAGE MANAGEMENT -__install_packages() { - while [ ! -z "$1" ]; do - local pkg="$1" - # Translation layer - # apt -> yum - if [ $_PKG_MGR -eq 2 ]; then - case "$pkg" in - "lsb-release") - pkg="redhat-lsb-core" - ;; - realpath) - pkg="coreutils" - ;; - esac - fi - eval $_ELEVATE $_PKG_INSTALL $pkg >/dev/null 2>&1 - shift - done -} - -__freshen_packages() { - if [ $_PKG_MGR -eq 1 ]; then #apt - $_ELEVATE apt-get -qq update > /dev/null 2>&1 - elif [ $_PKG_MGR -eq 2 ]; then #yum - $_ELEVATE yum -q makecache > /dev/null 2>&1 - fi -} - -__package_installed() { - #Returns true if the package is installed, false otherwise - if [ $_PKG_MGR -eq 1 ]; then # apt - dpkg-query -W -f='${Status}' "$1" 2>/dev/null | grep -q "ok installed" - elif [ $_PKG_MGR -eq 2 ]; then # yum and dnf - rpm -q "$1" >/dev/null - fi -} - -__add_deb_repo() { - if [ ! -s "/etc/apt/sources.list.d/$2.list" ]; then - if [ ! -z "$3" ]; then - curl -s -L "$3" | $_ELEVATE apt-key add - > /dev/null 2>&1 - fi - echo "$1" | $_ELEVATE tee "/etc/apt/sources.list.d/$2.list" > /dev/null - __freshen_packages - fi -} - -__add_rpm_repo() { - $_ELEVATE yum-config-manager -q --add-repo=$1 > /dev/null 2>&1 -} - -# ENTRYPOINT CALL -__entry "${@:-}" From 9de65606b9d6d265b7b453aba4e890763c606192 Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Tue, 20 Mar 2018 13:22:56 -0500 Subject: [PATCH 102/117] Fix typos in installer. Add help message back in. Update readme to reflect installer changes. --- Readme.md | 5 +++-- install.sh | 51 ++++++++++++++++++++++++++++++--------------------- 2 files changed, 33 insertions(+), 23 deletions(-) diff --git a/Readme.md b/Readme.md index a225225a..b1763ee8 100644 --- a/Readme.md +++ b/Readme.md @@ -19,11 +19,12 @@ Additional functionality is being developed and will be included soon. ### Automatic Installation **The automatic installer is officially supported on Ubuntu 14.04, 16.04 LTS, Security Onion, and CentOS 7** + + * Clone the package: `git clone https://github.com/activecm/rita.git` * Change into the source directory: `cd rita` -* Run the installer: `sudo ./install.sh` -* Source your .bashrc (the installer added RITA to the PATH): `source ~/.bashrc` +* Run the installer: `./install.sh` * Start MongoDB: `sudo service mongod start` ### Manual Installation diff --git a/install.sh b/install.sh index f880876a..ef0d79ec 100755 --- a/install.sh +++ b/install.sh @@ -69,7 +69,7 @@ fi # ENTRYPOINT __entry() { - _UNINSTALL_RITA=false + _REINSTALL_RITA=false # Optional Dependencies _INSTALL_BRO=true @@ -95,8 +95,8 @@ __entry() { __help exit 0 ;; - -u|--uninstall) - _UNINSTALL_RITA=true + -r|--reinstall) + _REINSTALL_RITA=true _INSTALL_BRO=false _INSTALL_MONGO=false ;; @@ -106,11 +106,7 @@ __entry() { --disable-mongo) _INSTALL_MONGO=false ;; - --prefix) - shift - _INSTALL_PREFIX="$1" - ;; - --version) + -v|--version) shift _RITA_VERSION="$1" ;; @@ -122,33 +118,30 @@ __entry() { _BIN_PATH="$_INSTALL_PREFIX/bin" - if __installation_exist; then + if [ __installation_exist ] && [ "$_REINSTALL_RITA" != "true" ]; then printf "$_IMPORTANT RITA is already installed.\n" printf "$_QUESTION Would you like to erase it and re-install? [y/N] " read if [[ $REPLY =~ ^[Yy]$ ]]; then - _UNINSTALL_RITA=true - _INSTALL_BRO=false - _INSTALL_MONGO=false + _REINSTALL_RITA=true else exit 0 fi fi - if [ "$_UNINSTALL_RITA" = "true" ]; then - __uninstall - else - __install + if [ "$_REINSTALL_RITA" = "true" ]; then + __reinstall fi + __install } __installation_exist() { [ -f "$_BIN_PATH/rita" -o -d "$_CONFIG_PATH" -o -d "$_VAR_PATH" ] } -__uninstall() { - printf "$_IMPORTANT Uninstalling RITA..." - if [ -f "$_BIN_PATH/rita"]; then +__reinstall() { + printf "$_IMPORTANT Re-installing RITA..." + if [ -f "$_BIN_PATH/rita" ]; then printf "$_SUBITEM Removing $_BIN_PATH/rita \n" $_ELEVATE rm "$_BIN_PATH/rita" else @@ -166,7 +159,6 @@ __uninstall() { else printf "$_SUBIMPORTANT $_VAR_PATH not found! \n" fi - printf "$_IMPORTANT You may wish to uninstall Go, MongoDB, and Bro IDS if they were installed. \n" } __install() { @@ -324,7 +316,7 @@ __create_go_path() { printf "$_SUBIMPORTANT Adding your GOPATH to $_GOPATH_PATH_SCRIPT \n" echo "export GOPATH=\"$GOPATH\"" > "$_GOPATH_PATH_SCRIPT" - echo "export PATH=\"\$PATH:\$GOPATH\bin\"" >> "$_GOPATH_PATH_SCRIPT" + echo "export PATH=\"\$PATH:\$GOPATH/bin\"" >> "$_GOPATH_PATH_SCRIPT" _GOPATH_PATH_SCRIPT_INSTALLED=true printf "$_SUBIMPORTANT Adding $_GOPATH_PATH_SCRIPT to $HOME/.profile \n" @@ -555,6 +547,23 @@ Brought to you by Active CounterMeasures " } +__help() { + __title + + cat <] +Options: + -h|--help Show this help message. + -r|--reinstall Force reinstalling RITA. + -v|--version Specify the version tag of RITA to install instead of master. + --disable-bro Disable automatic installation of Bro IDS. + --disable-mongo Disable automatic installation of MongoDB. +HEREDOC +} + __load() { local loadingText=$1 printf "$loadingText...\r" From e1f29eedabd7673af15a2d55998a222514ff012e Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Thu, 22 Mar 2018 00:35:01 -0500 Subject: [PATCH 103/117] Remove uninstall and back up rita user config on reinstall --- install.sh | 43 ++++++++++--------------------------------- 1 file changed, 10 insertions(+), 33 deletions(-) diff --git a/install.sh b/install.sh index ef0d79ec..647fbae0 100755 --- a/install.sh +++ b/install.sh @@ -120,18 +120,13 @@ __entry() { if [ __installation_exist ] && [ "$_REINSTALL_RITA" != "true" ]; then printf "$_IMPORTANT RITA is already installed.\n" - printf "$_QUESTION Would you like to erase it and re-install? [y/N] " + printf "$_QUESTION Would you like to re-install? [y/N] " read - if [[ $REPLY =~ ^[Yy]$ ]]; then - _REINSTALL_RITA=true - else + if [[ $REPLY =~ ^[Nn]$ ]]; then exit 0 fi fi - if [ "$_REINSTALL_RITA" = "true" ]; then - __reinstall - fi __install } @@ -139,28 +134,6 @@ __installation_exist() { [ -f "$_BIN_PATH/rita" -o -d "$_CONFIG_PATH" -o -d "$_VAR_PATH" ] } -__reinstall() { - printf "$_IMPORTANT Re-installing RITA..." - if [ -f "$_BIN_PATH/rita" ]; then - printf "$_SUBITEM Removing $_BIN_PATH/rita \n" - $_ELEVATE rm "$_BIN_PATH/rita" - else - printf "$_SUBIMPORTANT $_BIN_PATH/rita not found! \n" - fi - if [ -d "$_CONFIG_PATH" ]; then - printf "$_SUBITEM Removing $_CONFIG_PATH \n" - $_ELEVATE rm -rf "$_CONFIG_PATH" - else - printf "$_SUBIMPORTANT $_CONFIG_PATH not found! \n" - fi - if [ -d "$_VAR_PATH" ]; then - printf "$_SUBITEM Removing $_VAR_PATH \n" - $_ELEVATE rm -rf "$_VAR_PATH" - else - printf "$_SUBIMPORTANT $_VAR_PATH not found! \n" - fi -} - __install() { __title # Gather enough information to download installer dependencies @@ -364,13 +337,17 @@ __install_rita() { #$_ELEVATE mkdir -p "$_VAR_PATH" $_ELEVATE mkdir -p "$_VAR_PATH/logs" - $_ELEVATE mv "$_RITA_SRC_DIR/rita" "$_BIN_PATH/rita" + $_ELEVATE mv -f "$_RITA_SRC_DIR/rita" "$_BIN_PATH/rita" $_ELEVATE chown root:root "$_BIN_PATH/rita" $_ELEVATE chmod 755 "$_BIN_PATH/rita" - $_ELEVATE cp "$_RITA_SRC_DIR/LICENSE" "$_CONFIG_PATH/LICENSE" - $_ELEVATE cp "$_RITA_SRC_DIR/etc/rita.yaml" "$_CONFIG_PATH/config.yaml" - $_ELEVATE cp "$_RITA_SRC_DIR/etc/tables.yaml" "$_CONFIG_PATH/tables.yaml" + $_ELEVATE cp -f "$_RITA_SRC_DIR/LICENSE" "$_CONFIG_PATH/LICENSE" + if [ -f "$_CONFIG_PATH/config.yaml" ]; then + printf "$_SUBITEM Backing up your current RITA config: $_CONFIG_PATH/config.yaml -> $_CONFIG_PATH/config.yaml.old \n" + $_ELEVATE mv -f "$_CONFIG_PATH/config.yaml" "$_CONFIG_PATH/config.yaml.old" + fi + $_ELEVATE cp -f "$_RITA_SRC_DIR/etc/rita.yaml" "$_CONFIG_PATH/config.yaml" + $_ELEVATE cp -f "$_RITA_SRC_DIR/etc/tables.yaml" "$_CONFIG_PATH/tables.yaml" # All users can read and write rita's config file $_ELEVATE chmod 755 "$_CONFIG_PATH" From a1a27f4b62b63828b64b71d3b637ac649c3695dd Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Thu, 22 Mar 2018 12:39:56 -0500 Subject: [PATCH 104/117] Fix broken reinstall condition --- install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install.sh b/install.sh index 647fbae0..f31fd761 100755 --- a/install.sh +++ b/install.sh @@ -118,7 +118,7 @@ __entry() { _BIN_PATH="$_INSTALL_PREFIX/bin" - if [ __installation_exist ] && [ "$_REINSTALL_RITA" != "true" ]; then + if __installation_exist && [ "$_REINSTALL_RITA" != "true" ]; then printf "$_IMPORTANT RITA is already installed.\n" printf "$_QUESTION Would you like to re-install? [y/N] " read From ec8b20fa423a6db03a789121885683e0fd6466a9 Mon Sep 17 00:00:00 2001 From: logan Date: Thu, 22 Mar 2018 12:23:49 -0600 Subject: [PATCH 105/117] Make selection behaviour match what is displayed on re-install --- install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install.sh b/install.sh index f31fd761..c7fa2bc7 100755 --- a/install.sh +++ b/install.sh @@ -122,7 +122,7 @@ __entry() { printf "$_IMPORTANT RITA is already installed.\n" printf "$_QUESTION Would you like to re-install? [y/N] " read - if [[ $REPLY =~ ^[Nn]$ ]]; then + if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 0 fi fi From e398cbd857aea6794be551a6b486a684158d9eda Mon Sep 17 00:00:00 2001 From: logan Date: Thu, 22 Mar 2018 12:36:12 -0600 Subject: [PATCH 106/117] Fix centos sudo error --- install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install.sh b/install.sh index c7fa2bc7..d72f6560 100755 --- a/install.sh +++ b/install.sh @@ -305,7 +305,7 @@ __install_mongodb() { ;; CentOS) if [ ! -s /etc/yum.repos.d/mongodb-org-3.4.repo ]; then - echo -e '[mongodb-org-3.4]\nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/3.4/x86_64/\ngpgcheck=1\nenabled=1\ngpgkey=https://www.mongodb.org/static/pgp/server-3.4.asc' > /etc/yum.repos.d/mongodb-org-3.4.repo + echo -e '[mongodb-org-3.4]\nname=MongoDB Repository\nbaseurl=https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/3.4/x86_64/\ngpgcheck=1\nenabled=1\ngpgkey=https://www.mongodb.org/static/pgp/server-3.4.asc' | $_ELEVATE tee /etc/yum.repos.d/mongodb-org-3.4.repo > /dev/null fi ;; esac From c295c08d006622f42d00dcc77f71d5d3696c0258 Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Thu, 22 Mar 2018 17:29:57 -0500 Subject: [PATCH 107/117] Workaround for Bro on CentOS --- install.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/install.sh b/install.sh index d72f6560..71716d6b 100755 --- a/install.sh +++ b/install.sh @@ -233,6 +233,14 @@ __install_bro() { ;; CentOS) __add_rpm_repo http://download.opensuse.org/repositories/network:bro/CentOS_7/network:bro.repo + + # Workaround for https://github.com/activecm/rita/issues/189 + curl -sSL http://download.opensuse.org/repositories/network%3A/bro/CentOS_7/x86_64/bro-2.5.3-1.1.x86_64.rpm -o /tmp/bro-2.5.3-1.1.x86_64.rpm + curl -sSL http://download.opensuse.org/repositories/network%3A/bro/CentOS_7/x86_64/bro-core-2.5.3-1.1.x86_64.rpm -o /tmp/bro-core-2.5.3-1.1.x86_64.rpm + curl -sSL http://download.opensuse.org/repositories/network%3A/bro/CentOS_7/x86_64/broctl-2.5.3-1.1.x86_64.rpm -o /tmp/broctl-2.5.3-1.1.x86_64.rpm + curl -sSL http://download.opensuse.org/repositories/network%3A/bro/CentOS_7/x86_64/libbroccoli-2.5.3-1.1.x86_64.rpm -o /tmp/libbroccoli-2.5.3-1.1.x86_64.rpm + yum -y -q localinstall /tmp/bro-2.5.3-1.1.x86_64.rpm /tmp/bro-core-2.5.3-1.1.x86_64.rpm /tmp/broctl-2.5.3-1.1.x86_64.rpm /tmp/libbroccoli-2.5.3-1.1.x86_64.rpm + # End workaround ;; esac __install_packages bro broctl @@ -600,6 +608,7 @@ __add_deb_repo() { __add_rpm_repo() { $_ELEVATE yum-config-manager -q --add-repo=$1 > /dev/null 2>&1 + __freshen_packages } # ENTRYPOINT CALL From 89747434c42809a41399adbe39986f3c3601b9aa Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 22 Mar 2018 17:07:51 -0600 Subject: [PATCH 108/117] Add missing $_ELEVATE to run yum in the workaround --- install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install.sh b/install.sh index 71716d6b..d7940f6c 100755 --- a/install.sh +++ b/install.sh @@ -239,7 +239,7 @@ __install_bro() { curl -sSL http://download.opensuse.org/repositories/network%3A/bro/CentOS_7/x86_64/bro-core-2.5.3-1.1.x86_64.rpm -o /tmp/bro-core-2.5.3-1.1.x86_64.rpm curl -sSL http://download.opensuse.org/repositories/network%3A/bro/CentOS_7/x86_64/broctl-2.5.3-1.1.x86_64.rpm -o /tmp/broctl-2.5.3-1.1.x86_64.rpm curl -sSL http://download.opensuse.org/repositories/network%3A/bro/CentOS_7/x86_64/libbroccoli-2.5.3-1.1.x86_64.rpm -o /tmp/libbroccoli-2.5.3-1.1.x86_64.rpm - yum -y -q localinstall /tmp/bro-2.5.3-1.1.x86_64.rpm /tmp/bro-core-2.5.3-1.1.x86_64.rpm /tmp/broctl-2.5.3-1.1.x86_64.rpm /tmp/libbroccoli-2.5.3-1.1.x86_64.rpm + $_ELEVATE yum -y -q localinstall /tmp/bro-2.5.3-1.1.x86_64.rpm /tmp/bro-core-2.5.3-1.1.x86_64.rpm /tmp/broctl-2.5.3-1.1.x86_64.rpm /tmp/libbroccoli-2.5.3-1.1.x86_64.rpm # End workaround ;; esac From acebddc2bfee86564d44db19b4f1d6b1ad6981e8 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 22 Mar 2018 17:52:14 -0600 Subject: [PATCH 109/117] Correct instructions for ubuntu 14 and security onion users regarding mongodb --- install.sh | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/install.sh b/install.sh index d7940f6c..a901de50 100755 --- a/install.sh +++ b/install.sh @@ -202,12 +202,20 @@ __install() { __load "$_ITEM Installing RITA" __build_rita && __install_rita + # Ubuntu 14.04 uses Upstart for init + _START_MONGO="sudo systemctl start mongod" + _STOP_MONGO="sudo systemctl stop mongod" + if [ $_OS = "Ubuntu" -a $_OS_CODENAME = "trusty" ]; then + _START_MONGO="sudo service mongod start" + _STOP_MONGO="sudo service mongod stop" + fi + printf "$_IMPORTANT To finish the installtion, reload the system profile and \n" printf "$_IMPORTANT user profile with 'source /etc/profile' and 'source ~/.profile'. \n" printf "$_IMPORTANT Additionally, you may want to configure Bro and run 'sudo broctl deploy'. \n" - printf "$_IMPORTANT Finally, start MongoDB with 'sudo systemctl start mongod'. You can \n" + printf "$_IMPORTANT Finally, start MongoDB with '$_START_MONGO'. You can \n" printf "$_IMPORTANT access the MongoDB shell with 'mongo'. If, at any time, you need \n" - printf "$_IMPORTANT to stop MongoDB, run 'sudo systemctl stop mongod'. \n" + printf "$_IMPORTANT to stop MongoDB, run '$_STOP_MONGO'. \n" __title printf "Thank you for installing RITA! Happy hunting! \n" @@ -370,6 +378,7 @@ __install_rita() { __gather_OS() { _OS="$(lsb_release -is)" + _OS_CODENAME="$(lsb_release -cs)" if [ "$_OS" != "Ubuntu" -a "$_OS" != "CentOS" ]; then printf "$_ITEM This installer supports Ubuntu and CentOS. \n" printf "$_IMPORTANT Your operating system is unsupported." From 3d10bf97891d9b28f7136368016df211c9e1c76d Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Tue, 27 Mar 2018 17:12:53 -0400 Subject: [PATCH 110/117] Extending docker capabilities and adding statically linked binary build --- Dockerfile | 43 ++++++++++++++------------- Makefile | 2 -- Readme.md | 7 ++--- docs/Docker Usage.md | 71 ++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 96 insertions(+), 27 deletions(-) create mode 100644 docs/Docker Usage.md diff --git a/Dockerfile b/Dockerfile index 561e8c40..2e9638a8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,25 +1,28 @@ -#RITA runs in Docker! -#However, it needs a little help. -#In order to run rita in Docker, two volume mounts are needed. -#One for logs, and another for the config file. -#Alternatively you may extend this dockerfile and add in these files. -#Make sure your Dockerized RITA config file points to the correct bro log location. -#Additionally, make sure that RITA has access to a MongoDB server. - -#Ex: docker run -it --rm -v /path/to/bro/logs:/logs/:ro -v /path/to/rita/config.yaml:/root/.rita/config.yaml:ro rita import -#RITA works best with docker-compose. Docker-compose lets you set these mounts -#and additionally connect it to MongoDB with ease. -FROM golang:1.8-alpine as rita-builder -RUN apk update && apk upgrade && apk add --no-cache git make ca-certificates wget +FROM golang:1.10-alpine as rita-builder +RUN apk add --no-cache git make ca-certificates wget build-base RUN wget -q -O /go/bin/dep https://github.com/golang/dep/releases/download/v0.3.2/dep-linux-amd64 && chmod +x /go/bin/dep WORKDIR /go/src/github.com/activecm/rita COPY . . -RUN make -FROM alpine:latest +# Change ARGs with --build-arg to target other architectures +# Produce a self-contained statically linked binary +ARG CGO_ENABLED=0 +# Set the build target architecture and OS +ARG GOARCH=amd64 +ARG GOOS=linux +# Passing arguments in to make result in them being set as +# environment variables for the call to go build +RUN make CGO_ENABLED=$CGO_ENABLED GOARCH=$GOARCH GOOS=$GOOS + +FROM scratch + +# Use WORKDIR to create /var/lib/rita since "mkdir" doesn't exist in scratch +# /var/lib/rita is required for the safebrowsing cache in the default config +WORKDIR /var/lib/rita + +WORKDIR / +COPY --from=rita-builder /go/src/github.com/activecm/rita/etc/tables.yaml /etc/rita/tables.yaml +COPY --from=rita-builder /go/src/github.com/activecm/rita/etc/rita.yaml /etc/rita/config.yaml +COPY --from=rita-builder /go/src/github.com/activecm/rita/rita /rita -WORKDIR /root -RUN mkdir /etc/rita -COPY --from=rita-builder /go/src/github.com/activecm/rita/etc/tables.yaml /etc/rita/ -COPY --from=rita-builder /go/src/github.com/activecm/rita/rita . -ENTRYPOINT ["./rita"] +ENTRYPOINT ["/rita"] diff --git a/Makefile b/Makefile index 8103aabd..e22fc1ee 100644 --- a/Makefile +++ b/Makefile @@ -10,8 +10,6 @@ default: dep ensure go build ${LDFLAGS} -# Having issues with 'go install' + LDFLAGS using sudo and the -# install script. This is a workaround. install: dep ensure go build ${LDFLAGS} -o ${GOPATH}/bin/${BINARY} diff --git a/Readme.md b/Readme.md index b1763ee8..0e2ae075 100644 --- a/Readme.md +++ b/Readme.md @@ -1,6 +1,6 @@ # RITA (Real Intelligence Threat Analytics) -Brought to you by Active CounterMeasures. +Brought to you by Active Countermeasures. --- ### What is Here @@ -19,8 +19,6 @@ Additional functionality is being developed and will be included soon. ### Automatic Installation **The automatic installer is officially supported on Ubuntu 14.04, 16.04 LTS, Security Onion, and CentOS 7** - - * Clone the package: `git clone https://github.com/activecm/rita.git` * Change into the source directory: `cd rita` @@ -35,7 +33,6 @@ RITA contains a yaml format configuration file. You can specify the location for the configuration file with the **-c** command line flag. If not specified, RITA will look for the configuration in **/etc/rita/config.yaml**. - ### API Keys RITA relies on the the [Google Safe Browsing API](https://developers.google.com/safe-browsing/) to check network log data for connections to known threats. An API key is required to use this service. Obtaining a key is free, and only requires a Google account. @@ -94,4 +91,4 @@ To contribute to RITA visit our [Contributing Guide](https://github.com/activecm ### License GNU GPL V3 -© Active CounterMeasures ™ +© Active Countermeasures ™ diff --git a/docs/Docker Usage.md b/docs/Docker Usage.md new file mode 100644 index 00000000..0f3a381f --- /dev/null +++ b/docs/Docker Usage.md @@ -0,0 +1,71 @@ +# Docker Usage + +You can run RITA using Docker! You have several options depending on your specific needs. +* Running RITA with Docker Compose - This is the simplest option and requires the least setup. You will have to provide your own Bro logs. +* Running RITA with Docker Using External Mongo - This option is useful if you do not want to use Docker Compose or you have an external Mongo server you wish to use. +* Using Docker to Build RITA - You can use Docker to build a standalone RITA binary that runs on any Linux 64-bit CPU. This is useful if you want a portable binary but don't want to use Docker to actually run RITA. + +## Obtaining the RITA Docker Image + +The easiest way is to pull down the pre-built image. + +``` +docker pull activecm/rita:latest +``` + +You can also build the image from scratch. + +``` +docker build -t activecm/rita:latest . +``` + +## Running RITA with Docker Compose + +At the very least, you will have to provide RITA with the path to your Bro log files using the `BRO_LOGS` environment variable. + +``` +export BRO_LOGS=/path/to/your/logs +docker-compose run --rm rita import +docker-compose run --rm rita analyze +``` + +You can also call it this way if you wish. + +``` +BRO_LOGS=/path/to/your/logs docker-compose run --rm rita import +BRO_LOGS=/path/to/your/logs docker-compose run --rm rita analyze +``` + +RITA will use the default `config.yaml` file which will work out of the box. If you wish to specify your own config file you can do so like this: + +``` +export BRO_LOGS=/path/to/your/logs +docker-compose run --rm -v /path/to/your/rita/config.yaml:/etc/rita/config.yaml rita show-databases +``` + +## Running RITA with Docker Using External Mongo + +If you don't need/want the convenience of Docker Compose running the Mongo server for you, you can also use RITA without it. You will need to modify RITA's config file to point to your external Mongo server. + +``` +docker run -it --rm \ + -v /path/to/your/bro/logs:/opt/bro/logs/:ro \ + -v /path/to/your/rita/config.yaml:/etc/rita/config.yaml:ro \ + activecm/rita:latest import +docker run -it --rm \ + -v /path/to/your/bro/logs:/opt/bro/logs/:ro \ + -v /path/to/your/rita/config.yaml:/etc/rita/config.yaml:ro \ + activecm/rita:latest analyze +``` + +## Using Docker to Build RITA + +You can use Docker to build a statically linked RITA binary for you. This binary should be portable between Linux 64-bit systems. Once you've obtained the RITA docker image (see the "Obtaining the RITA Docker Image" section above) you can run the following commands to copy the binary to your host system. + +``` +docker create --name rita activecm/rita:latest +docker cp rita:/rita ./rita +docker rm rita +``` + +Note that you will have to manually install the `config.yaml` and `tables.yaml` files into `/etc/rita/` as well as create any directories referenced inside the `config.yaml` file. From 579c4dda29a2a31e567a9b3078bec643f088a963 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 5 Apr 2018 17:25:42 -0600 Subject: [PATCH 111/117] Make sure users can write to the rita log directory --- Gopkg.lock | 154 +++++++++++++++++++++++++++++++++++++++++++++++++++++ install.sh | 1 + 2 files changed, 155 insertions(+) create mode 100644 Gopkg.lock diff --git a/Gopkg.lock b/Gopkg.lock new file mode 100644 index 00000000..3ee20a3c --- /dev/null +++ b/Gopkg.lock @@ -0,0 +1,154 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/activecm/mgorus" + packages = ["."] + revision = "544a63f222470b2feb3b1142f7edb9a843f5120d" + version = "v0.1.0" + +[[projects]] + name = "github.com/activecm/mgosec" + packages = ["."] + revision = "af42afa3ec74143661a863bdae62d36a93c6eca7" + version = "v0.1.0" + +[[projects]] + branch = "master" + name = "github.com/activecm/rita-bl" + packages = [ + ".", + "database", + "list", + "sources/lists", + "sources/lists/util", + "sources/rpc" + ] + revision = "a88d294decb19915d5cc4f53b64d5e80b781b775" + +[[projects]] + name = "github.com/blang/semver" + packages = ["."] + revision = "2ee87856327ba09384cabd113bc6b5d174e9ec0f" + version = "v3.5.1" + +[[projects]] + name = "github.com/golang/protobuf" + packages = [ + "proto", + "ptypes/duration" + ] + revision = "925541529c1fa6821df4e44ce2723319eb2be768" + version = "v1.0.0" + +[[projects]] + branch = "master" + name = "github.com/google/safebrowsing" + packages = [ + ".", + "internal/safebrowsing_proto" + ] + revision = "fe6951d7ef01b4e46d3008e8a08b55bcdf3c0ee6" + +[[projects]] + name = "github.com/mattn/go-runewidth" + packages = ["."] + revision = "9e777a8366cce605130a531d2cd6363d07ad7317" + version = "v0.0.2" + +[[projects]] + branch = "master" + name = "github.com/olekukonko/tablewriter" + packages = ["."] + revision = "b8a9be070da40449e501c3c4730a889e42d87a9e" + +[[projects]] + name = "github.com/rifflock/lfshook" + packages = ["."] + revision = "bf539943797a1f34c1f502d07de419b5238ae6c6" + version = "v2.3" + +[[projects]] + name = "github.com/sirupsen/logrus" + packages = ["."] + revision = "c155da19408a8799da419ed3eeb0cb5db0ad5dbc" + version = "v1.0.5" + +[[projects]] + branch = "master" + name = "github.com/skratchdot/open-golang" + packages = ["open"] + revision = "75fb7ed4208cf72d323d7d02fd1a5964a7a9073c" + +[[projects]] + name = "github.com/urfave/cli" + packages = ["."] + revision = "cfb38830724cc34fedffe9a2a29fb54fa9169cd1" + version = "v1.20.0" + +[[projects]] + branch = "master" + name = "golang.org/x/crypto" + packages = ["ssh/terminal"] + revision = "b2aa35443fbc700ab74c586ae79b81c171851023" + +[[projects]] + branch = "master" + name = "golang.org/x/net" + packages = ["idna"] + revision = "b3c676e531a6dc479fa1b35ac961c13f5e2b4d2e" + +[[projects]] + branch = "master" + name = "golang.org/x/sys" + packages = [ + "unix", + "windows" + ] + revision = "1d206c9fa8975fb4cf00df1dc8bf3283dc24ba0e" + +[[projects]] + name = "golang.org/x/text" + packages = [ + "collate", + "collate/build", + "internal/colltab", + "internal/gen", + "internal/tag", + "internal/triegen", + "internal/ucd", + "language", + "secure/bidirule", + "transform", + "unicode/bidi", + "unicode/cldr", + "unicode/norm", + "unicode/rangetable" + ] + revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0" + version = "v0.3.0" + +[[projects]] + branch = "v2" + name = "gopkg.in/mgo.v2" + packages = [ + ".", + "bson", + "internal/json", + "internal/sasl", + "internal/scram" + ] + revision = "3f83fa5005286a7fe593b055f0d7771a7dce4655" + +[[projects]] + name = "gopkg.in/yaml.v2" + packages = ["."] + revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183" + version = "v2.2.1" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "2cd8cd4548384c70c9d6a0facc232a8a7ed8573751f8cbea14c27b2153d38f8c" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/install.sh b/install.sh index a901de50..ab4efe68 100755 --- a/install.sh +++ b/install.sh @@ -352,6 +352,7 @@ __install_rita() { $_ELEVATE mkdir -p "$_CONFIG_PATH" #$_ELEVATE mkdir -p "$_VAR_PATH" $_ELEVATE mkdir -p "$_VAR_PATH/logs" + $_ELEVATE chmod 777 "$_VAR_PATH/logs" $_ELEVATE mv -f "$_RITA_SRC_DIR/rita" "$_BIN_PATH/rita" $_ELEVATE chown root:root "$_BIN_PATH/rita" From 2b94954dace4aa8debf10b5cc7dafd530e1fedd4 Mon Sep 17 00:00:00 2001 From: logan Date: Tue, 20 Feb 2018 19:51:26 -0700 Subject: [PATCH 112/117] Move normalization/ sanitization process to the analysis step --- analysis/sanitization/sanitization.go | 101 ++++++++++++++++++++++++++ commands/analyze.go | 4 + commands/reset-analysis.go | 6 +- parser/fileparser.go | 1 - parser/parsetypes/conn.go | 3 - parser/parsetypes/dns.go | 5 +- parser/parsetypes/http.go | 22 +----- parser/parsetypes/parsetypes.go | 1 - 8 files changed, 111 insertions(+), 32 deletions(-) create mode 100644 analysis/sanitization/sanitization.go diff --git a/analysis/sanitization/sanitization.go b/analysis/sanitization/sanitization.go new file mode 100644 index 00000000..4a36ee4e --- /dev/null +++ b/analysis/sanitization/sanitization.go @@ -0,0 +1,101 @@ +package sanitization + +import ( + "net/url" + "strings" + + mgo "gopkg.in/mgo.v2" + "gopkg.in/mgo.v2/bson" + + "github.com/ocmdev/rita/database" + "github.com/ocmdev/rita/parser/parsetypes" +) + +//SanitizeData cleans up abnormalities in the imported data +func SanitizeData(res *database.Resources) { + sanitizeHTTPData(res) +} + +//sanitizeHTTPData cleans up abnormalities in the HTTP collection +func sanitizeHTTPData(res *database.Resources) { + sess := res.DB.Session.Copy() + defer sess.Close() + + http := sess.DB(res.DB.GetSelectedDB()).C(res.Config.T.Structure.HTTPTable) + + var httpRec parsetypes.HTTP + httpIter := http.Find(nil).Iter() + + bufferSize := res.Config.S.Bro.ImportBuffer + if bufferSize%2 == 1 { + bufferSize++ + } + + buffer := make([]interface{}, 0, bufferSize) + + for httpIter.Next(&httpRec) { + updateDoc := sanitizeHTTPRecord(&httpRec) + + if updateDoc == nil { + continue + } + + if len(buffer) == bufferSize { + err := commitUpdateBuffer(buffer, http) + if err != nil { + res.Log.Error("Could not sanitize http records", err) + return + } + buffer = buffer[:0] + } + buffer = append(buffer, bson.M{"_id": httpRec.ID}) + buffer = append(buffer, updateDoc) + } + + if len(buffer) > 0 { + err := commitUpdateBuffer(buffer, http) + if err != nil { + res.Log.Error("Could not sanitize http records", err) + return + } + } +} + +func sanitizeHTTPRecord(httpRec *parsetypes.HTTP) interface{} { + newURI := httpRec.URI + + // ex: Host: 67.217.65.244 URI: 67.217.65.244:443 + // URI -> :443 which will cause an error in the parser + if strings.HasPrefix(httpRec.URI, httpRec.Host) { + newURI = httpRec.URI[len(httpRec.Host):] + } + + parsedURL, err := url.Parse(newURI) + if err != nil { + newURI = "" + } + + //CASE: Host: www.google.com URI: http://www.google.com + if err == nil && parsedURL.IsAbs() { + newURI = parsedURL.RequestURI() + } + + if newURI == httpRec.URI { + return nil + } + + //nolint: vet + return bson.M{ + "$set": bson.D{ + {"uri", newURI}, + }, + } +} + +func commitUpdateBuffer(buffer []interface{}, collection *mgo.Collection) error { + bulk := collection.Bulk() + bulk.Unordered() + bulk.Update(buffer...) + _, err := bulk.Run() + return err +} diff --git a/commands/analyze.go b/commands/analyze.go index 1d853b26..c6603b45 100644 --- a/commands/analyze.go +++ b/commands/analyze.go @@ -9,6 +9,7 @@ import ( "github.com/activecm/rita/analysis/blacklist" "github.com/activecm/rita/analysis/crossref" "github.com/activecm/rita/analysis/dns" + "github.com/activecm/rita/analysis/sanitization" "github.com/activecm/rita/analysis/scanning" "github.com/activecm/rita/analysis/structure" "github.com/activecm/rita/analysis/urls" @@ -99,6 +100,9 @@ func analyze(inDb string, configFile string) error { }).Info("Analyzing") fmt.Println("[+] Analyzing " + td) res.DB.SelectDB(td) + + sanitization.SanitizeData(res) + logAnalysisFunc("Unique Connections", td, res, structure.BuildUniqueConnectionsCollection, ) diff --git a/commands/reset-analysis.go b/commands/reset-analysis.go index e489d648..e407a199 100644 --- a/commands/reset-analysis.go +++ b/commands/reset-analysis.go @@ -25,16 +25,16 @@ func init() { if db == "" { return cli.NewExitError("Specify a database", -1) } - return cleanAnalysis(db, res) + return resetAnalysis(db, res) }, } bootstrapCommands(reset) } -// cleanAnalysis cleans out all of the analysis data, leaving behind only the +// resetAnalysis cleans out all of the analysis data, leaving behind only the // raw data from parsing the logs -func cleanAnalysis(database string, res *database.Resources) error { +func resetAnalysis(database string, res *database.Resources) error { //clean database conn := res.Config.T.Structure.ConnTable diff --git a/parser/fileparser.go b/parser/fileparser.go index ed122b6d..77ce3d8c 100644 --- a/parser/fileparser.go +++ b/parser/fileparser.go @@ -292,6 +292,5 @@ func parseLine(lineString string, header *fpt.BroHeader, }).Error("Encountered unhandled type in log") } } - dat.Normalize() return dat } diff --git a/parser/parsetypes/conn.go b/parser/parsetypes/conn.go index c481f14f..cf71b054 100644 --- a/parser/parsetypes/conn.go +++ b/parser/parsetypes/conn.go @@ -65,6 +65,3 @@ func (in *Conn) TargetCollection(config *config.StructureTableCfg) string { func (in *Conn) Indices() []string { return []string{"$hashed:id_orig_h", "$hashed:id_resp_h", "-duration", "ts", "uid"} } - -//Normalize pre processes this type of entry before it is imported by rita -func (in *Conn) Normalize() {} diff --git a/parser/parsetypes/dns.go b/parser/parsetypes/dns.go index 0b599eae..4ba5e677 100644 --- a/parser/parsetypes/dns.go +++ b/parser/parsetypes/dns.go @@ -27,7 +27,7 @@ type DNS struct { // the query TransID int64 `bson:"trans_id" bro:"trans_id" brotype:"count"` // RTT contains the round trip time of this request / response - RTT float64 `bson:"RTT" bro:"rtt" brotype:"interval"` + RTT float64 `bson:"rtt" bro:"rtt" brotype:"interval"` // Query contians the query string Query string `bson:"query" bro:"query" brotype:"string"` // QClass contains a the qclass of the query @@ -70,6 +70,3 @@ func (in *DNS) TargetCollection(config *config.StructureTableCfg) string { func (in *DNS) Indices() []string { return []string{"$hashed:id_orig_h", "$hashed:id_resp_h", "$hashed:query"} } - -//Normalize pre processes this type of entry before it is imported by rita -func (in *DNS) Normalize() {} diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index 85d31529..8b79ff37 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -1,13 +1,13 @@ package parsetypes import ( - "net/url" + // "net/url" "github.com/activecm/rita/config" "gopkg.in/mgo.v2/bson" ) -import "strings" +//import "strings" // HTTP provides a data structure for entries in bro's HTTP log file type HTTP struct { @@ -84,21 +84,3 @@ func (line *HTTP) TargetCollection(config *config.StructureTableCfg) string { func (line *HTTP) Indices() []string { return []string{"$hashed:id_orig_h", "$hashed:id_resp_h", "$hashed:user_agent", "uid"} } - -// Normalize fixes up absolute uri's as read by bro to be relative -func (line *HTTP) Normalize() { - //uri is missing the protocol. set uri to "" - // ex: Host: 67.217.65.244 URI: 67.217.65.244:443 - if strings.HasPrefix(line.URI, line.Host) { - line.URI = "" - return - } - parsedURL, err2 := url.Parse(line.URI) - if err2 != nil { - line.URI = "" - return - } - if parsedURL.IsAbs() { - line.URI = parsedURL.RequestURI() - } -} diff --git a/parser/parsetypes/parsetypes.go b/parser/parsetypes/parsetypes.go index cdb52019..d896d443 100644 --- a/parser/parsetypes/parsetypes.go +++ b/parser/parsetypes/parsetypes.go @@ -6,7 +6,6 @@ import "github.com/activecm/rita/config" type BroData interface { TargetCollection(*config.StructureTableCfg) string Indices() []string - Normalize() } //NewBroDataFactory creates a new BroData based on the string From 709d1a39b3d2506287e59ea20fc0a0338a40c470 Mon Sep 17 00:00:00 2001 From: logan Date: Tue, 20 Feb 2018 21:06:48 -0700 Subject: [PATCH 113/117] Remove debugging comments --- parser/parsetypes/http.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/parser/parsetypes/http.go b/parser/parsetypes/http.go index 8b79ff37..149c8d55 100644 --- a/parser/parsetypes/http.go +++ b/parser/parsetypes/http.go @@ -1,13 +1,11 @@ package parsetypes import ( - // "net/url" "github.com/activecm/rita/config" "gopkg.in/mgo.v2/bson" ) -//import "strings" // HTTP provides a data structure for entries in bro's HTTP log file type HTTP struct { From e288b5f24d3766bf254cb7e8839dd1799127f76b Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Thu, 5 Apr 2018 17:52:01 -0600 Subject: [PATCH 114/117] OCMDev/ActiveCM switch --- analysis/sanitization/sanitization.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/analysis/sanitization/sanitization.go b/analysis/sanitization/sanitization.go index 4a36ee4e..19dd471d 100644 --- a/analysis/sanitization/sanitization.go +++ b/analysis/sanitization/sanitization.go @@ -7,8 +7,8 @@ import ( mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" - "github.com/ocmdev/rita/database" - "github.com/ocmdev/rita/parser/parsetypes" + "github.com/activecm/rita/database" + "github.com/activecm/rita/parser/parsetypes" ) //SanitizeData cleans up abnormalities in the imported data From fd7cee056ed45a6fb1e58d35958965609bcfd254 Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Wed, 21 Mar 2018 22:19:47 -0500 Subject: [PATCH 115/117] Removed tables.yaml & hardcoded contents into config/tables.go Restructured test-config command to output config before testing database connection. --- Dockerfile | 1 - commands/test-config.go | 16 ++++- config/config.go | 16 ++--- config/tables.go | 125 +++++++++++++++++++++------------------- database/mock.go | 4 +- database/resources.go | 4 +- etc/tables.yaml | 43 -------------- install.sh | 1 - 8 files changed, 84 insertions(+), 126 deletions(-) delete mode 100644 etc/tables.yaml diff --git a/Dockerfile b/Dockerfile index 2e9638a8..d4e6693c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,7 +21,6 @@ FROM scratch WORKDIR /var/lib/rita WORKDIR / -COPY --from=rita-builder /go/src/github.com/activecm/rita/etc/tables.yaml /etc/rita/tables.yaml COPY --from=rita-builder /go/src/github.com/activecm/rita/etc/rita.yaml /etc/rita/config.yaml COPY --from=rita-builder /go/src/github.com/activecm/rita/rita /rita diff --git a/commands/test-config.go b/commands/test-config.go index 3b90d91b..3403a42d 100644 --- a/commands/test-config.go +++ b/commands/test-config.go @@ -4,7 +4,9 @@ import ( "fmt" "os" + "github.com/activecm/rita/config" "github.com/activecm/rita/database" + "github.com/urfave/cli" yaml "gopkg.in/yaml.v2" ) @@ -28,14 +30,19 @@ func init() { // testConfiguration prints out the result of parsing the config file func testConfiguration(c *cli.Context) error { - res := database.InitResources(c.String("config")) + // First, print out the config as it was parsed + conf, err := config.GetConfig(c.String("config")) + if err != nil { + fmt.Fprintf(os.Stdout, "Failed to config, exiting") + panic(err) + } - staticConfig, err := yaml.Marshal(res.Config.S) + staticConfig, err := yaml.Marshal(conf.S) if err != nil { return err } - tableConfig, err := yaml.Marshal(res.Config.T) + tableConfig, err := yaml.Marshal(conf.T) if err != nil { return err } @@ -43,5 +50,8 @@ func testConfiguration(c *cli.Context) error { fmt.Fprintf(os.Stdout, "\n%s\n", string(staticConfig)) fmt.Fprintf(os.Stdout, "\n%s\n", string(tableConfig)) + // Then test initializing external resources like db connection and file handles + database.InitResources(c.String("config")) + return nil } diff --git a/config/config.go b/config/config.go index 495b508f..11c20599 100644 --- a/config/config.go +++ b/config/config.go @@ -23,29 +23,21 @@ type ( ) //userConfigPath specifies the path of RITA's static config file -//relative to the user's home directory const userConfigPath = "/etc/rita/config.yaml" -//tableConfigPath specifies teh path of RITA's table config file -//relative to the user's home directory -const tableConfigPath = "/etc/rita/tables.yaml" - //NOTE: If go ever gets default parameters, default the config options to "" // GetConfig retrieves a configuration in order of precedence -func GetConfig(userConfig string, tableConfig string) (*Config, error) { +func GetConfig(userConfig string) (*Config, error) { if userConfig == "" { userConfig = userConfigPath } - if tableConfig == "" { - tableConfig = tableConfigPath - } - return loadSystemConfig(userConfig, tableConfig) + return loadSystemConfig(userConfig) } // loadSystemConfig attempts to parse a config file -func loadSystemConfig(userConfig string, tableConfig string) (*Config, error) { +func loadSystemConfig(userConfig string) (*Config, error) { var config = new(Config) static, err := loadStaticConfig(userConfig) if err != nil { @@ -53,7 +45,7 @@ func loadSystemConfig(userConfig string, tableConfig string) (*Config, error) { } config.S = *static - tables, err := loadTableConfig(tableConfig) + tables, err := loadTableConfig() if err != nil { return config, err } diff --git a/config/tables.go b/config/tables.go index f5419639..5b25380d 100644 --- a/config/tables.go +++ b/config/tables.go @@ -1,116 +1,121 @@ package config -import ( - "fmt" - "io/ioutil" - "os" - "reflect" - - yaml "gopkg.in/yaml.v2" -) - type ( //TableCfg is the container for other table config sections TableCfg struct { - Log LogTableCfg `yaml:"LogConfig"` - Blacklisted BlacklistedTableCfg `yaml:"BlackListed"` - DNS DNSTableCfg `yaml:"Dns"` - Crossref CrossrefTableCfg `yaml:"Crossref"` - Scanning ScanningTableCfg `yaml:"Scanning"` - Structure StructureTableCfg `yaml:"Structure"` - Beacon BeaconTableCfg `yaml:"Beacon"` - Urls UrlsTableCfg `yaml:"Urls"` - UserAgent UserAgentTableCfg `yaml:"UserAgent"` - Meta MetaTableCfg `yaml:"MetaTables"` + Log LogTableCfg + Blacklisted BlacklistedTableCfg + DNS DNSTableCfg + Crossref CrossrefTableCfg + Scanning ScanningTableCfg + Structure StructureTableCfg + Beacon BeaconTableCfg + Urls UrlsTableCfg + UserAgent UserAgentTableCfg + Meta MetaTableCfg } //LogTableCfg contains the configuration for logging LogTableCfg struct { - RitaLogTable string `yaml:"RitaLogTable"` + RitaLogTable string } //StructureTableCfg contains the names of the base level collections StructureTableCfg struct { - ConnTable string `yaml:"ConnectionTable"` - HTTPTable string `yaml:"HttpTable"` - DNSTable string `yaml:"DnsTable"` - UniqueConnTable string `yaml:"UniqueConnectionTable"` - HostTable string `yaml:"HostTable"` - IPv4Table string `yaml:"IPv4Table"` - IPv6Table string `yaml:"IPv6Table"` + ConnTable string + HTTPTable string + DNSTable string + UniqueConnTable string + HostTable string + IPv4Table string + IPv6Table string } //BlacklistedTableCfg is used to control the blacklisted analysis module BlacklistedTableCfg struct { - BlacklistDatabase string `yaml:"Database"` - SourceIPsTable string `yaml:"SourceIPsTable"` - DestIPsTable string `yaml:"DestIPsTable"` - HostnamesTable string `yaml:"HostnamesTable"` - UrlsTable string `yaml:"UrlsTable"` + BlacklistDatabase string + SourceIPsTable string + DestIPsTable string + HostnamesTable string + UrlsTable string } //DNSTableCfg is used to control the dns analysis module DNSTableCfg struct { - ExplodedDNSTable string `yaml:"ExplodedDnsTable"` - HostnamesTable string `yaml:"HostnamesTable"` + ExplodedDNSTable string + HostnamesTable string } //CrossrefTableCfg is used to control the crossref analysis module CrossrefTableCfg struct { - SourceTable string `yaml:"SourceTable"` - DestTable string `yaml:"DestinationTable"` + SourceTable string + DestTable string } //ScanningTableCfg is used to control the scanning analysis module ScanningTableCfg struct { - ScanTable string `yaml:"ScanTable"` + ScanTable string } //BeaconTableCfg is used to control the beaconing analysis module BeaconTableCfg struct { - BeaconTable string `yaml:"BeaconTable"` + BeaconTable string } //UrlsTableCfg is used to control the urls analysis module UrlsTableCfg struct { - UrlsTable string `yaml:"UrlsTable"` + UrlsTable string } //UserAgentTableCfg is used to control the urls analysis module UserAgentTableCfg struct { - UserAgentTable string `yaml:"UserAgentTable"` + UserAgentTable string } //MetaTableCfg contains the meta db collection names MetaTableCfg struct { - FilesTable string `yaml:"FilesTable"` - DatabasesTable string `yaml:"DatabasesTable"` + FilesTable string + DatabasesTable string } ) -// loadTableConfig attempts to parse a config file -func loadTableConfig(cfgPath string) (*TableCfg, error) { +// loadTableConfig initializes a config struct +func loadTableConfig() (*TableCfg, error) { var config = new(TableCfg) - _, err := os.Stat(cfgPath) - if os.IsNotExist(err) { - return config, err - } + // initialize all the table configs + config.Log.RitaLogTable = "logs" - cfgFile, err := ioutil.ReadFile(cfgPath) - if err != nil { - return config, err - } - err = yaml.Unmarshal(cfgFile, config) + config.Structure.ConnTable = "conn" + config.Structure.HTTPTable = "http" + config.Structure.DNSTable = "dns" + config.Structure.UniqueConnTable = "uconn" + config.Structure.HostTable = "host" + config.Structure.IPv4Table = "ipv4" + config.Structure.IPv6Table = "ipv6" - if err != nil { - fmt.Fprintf(os.Stderr, "Failed to read config: %s\n", err.Error()) - return config, err - } + config.Blacklisted.BlacklistDatabase = "rita-blacklist" + config.Blacklisted.SourceIPsTable = "blSourceIPs" + config.Blacklisted.DestIPsTable = "blDestIPs" + config.Blacklisted.HostnamesTable = "blHostnames" + config.Blacklisted.UrlsTable = "blUrls" + + config.DNS.ExplodedDNSTable = "explodedDns" + config.DNS.HostnamesTable = "hostnames" + + config.Crossref.SourceTable = "sourceXREF" + config.Crossref.DestTable = "destXREF" + + config.Scanning.ScanTable = "scan" + + config.Beacon.BeaconTable = "beacon" + + config.Urls.UrlsTable = "urls" + + config.UserAgent.UserAgentTable = "useragent" - // expand env variables, config is a pointer - // so we have to call elem on the reflect value - expandConfig(reflect.ValueOf(config).Elem()) + config.Meta.FilesTable = "files" + config.Meta.DatabasesTable = "database" return config, nil } diff --git a/database/mock.go b/database/mock.go index 7762eacc..d1f3f368 100644 --- a/database/mock.go +++ b/database/mock.go @@ -11,9 +11,7 @@ import ( // returning a *Resources object which has all of the necessary configuration information func InitMockResources(userConfig string) *Resources { //TODO: hard code in a test config - //GetConfig requires a table config. "" tells the configuration manager - //to use the default table config. - conf, err := config.GetConfig(userConfig, "") + conf, err := config.GetConfig(userConfig) if err != nil { fmt.Fprintf(os.Stdout, "Failed to config, exiting") panic(err) diff --git a/database/resources.go b/database/resources.go index 19425415..f81939b3 100644 --- a/database/resources.go +++ b/database/resources.go @@ -31,9 +31,7 @@ type ( // InitResources grabs the configuration file and intitializes the configuration data // returning a *Resources object which has all of the necessary configuration information func InitResources(userConfig string) *Resources { - //GetConfig requires a table config. "" tells the configuration manager - //to use the default table config. - conf, err := config.GetConfig(userConfig, "") + conf, err := config.GetConfig(userConfig) if err != nil { fmt.Fprintf(os.Stdout, "Failed to config, exiting") panic(err) diff --git a/etc/tables.yaml b/etc/tables.yaml deleted file mode 100644 index 39ad8200..00000000 --- a/etc/tables.yaml +++ /dev/null @@ -1,43 +0,0 @@ -# NOTE: DO NOT CHANGE THE SETTINGS BELOW UNLESS YOU ARE FAMILIAR WITH THE CODE # -LogConfig: - RitaLogTable: logs - -Structure: - ConnectionTable: conn - HttpTable: http - DnsTable: dns - UniqueConnectionTable: uconn - HostTable: host - IPv4Table: ipv4 - IPv6Table: ipv6 - -BlackListed: - Database: rita-blacklist - SourceIPsTable: blSourceIPs - DestIPsTable: blDestIPs - HostnamesTable: blHostnames - UrlsTable: blUrls - -Dns: - ExplodedDnsTable: explodedDns - HostnamesTable: hostnames - -Crossref: - SourceTable: sourceXREF - DestinationTable: destXREF - -Scanning: - ScanTable: scan - -Beacon: - BeaconTable: beacon - -Urls: - UrlsTable: urls - -UserAgent: - UserAgentTable: useragent - -MetaTables: - FilesTable: files - DatabasesTable: databases diff --git a/install.sh b/install.sh index ab4efe68..8ee31317 100755 --- a/install.sh +++ b/install.sh @@ -364,7 +364,6 @@ __install_rita() { $_ELEVATE mv -f "$_CONFIG_PATH/config.yaml" "$_CONFIG_PATH/config.yaml.old" fi $_ELEVATE cp -f "$_RITA_SRC_DIR/etc/rita.yaml" "$_CONFIG_PATH/config.yaml" - $_ELEVATE cp -f "$_RITA_SRC_DIR/etc/tables.yaml" "$_CONFIG_PATH/tables.yaml" # All users can read and write rita's config file $_ELEVATE chmod 755 "$_CONFIG_PATH" From af2ea1804e4734f72b273b12932bbf126ea18156 Mon Sep 17 00:00:00 2001 From: Logan Lembke Date: Fri, 6 Apr 2018 17:39:20 -0600 Subject: [PATCH 116/117] Fix database/ databases mismatch that prevented reading older metadatabases --- config/tables.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/tables.go b/config/tables.go index 5b25380d..1a38671f 100644 --- a/config/tables.go +++ b/config/tables.go @@ -115,7 +115,7 @@ func loadTableConfig() (*TableCfg, error) { config.UserAgent.UserAgentTable = "useragent" config.Meta.FilesTable = "files" - config.Meta.DatabasesTable = "database" + config.Meta.DatabasesTable = "databases" return config, nil } From d7d0273ca3665f599ecf5fbbdc61520c122fe20b Mon Sep 17 00:00:00 2001 From: Ethan Robish Date: Fri, 13 Apr 2018 18:55:33 -0500 Subject: [PATCH 117/117] Updating docker references to use Quay --- docker-compose.yml | 29 +++++++++++++++++++++++++++++ docs/Docker Usage.md | 20 ++++++++++++++------ 2 files changed, 43 insertions(+), 6 deletions(-) create mode 100644 docker-compose.yml diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..53b12780 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,29 @@ +version: '3' + +services: + # bro: + # image: blacktop/bro:2.5 + # network_mode: host + # volumes: + # - logs:/pcap + # cap_add: + # - NET_RAW + + db: + image: mongo:3.6 + volumes: + - db:/data/db/ + + rita: + image: quay.io/activecm/rita:${RITA_VERSION:-master} + build: . + links: + # give db an alias of "localhost" so that RITA's default config works unchanged + - db:localhost + volumes: + - ${BRO_LOGS:?You must provide a path to your BRO_LOGS}:/opt/bro/logs:ro + # - logs:/opt/bro/logs + +volumes: + db: + # logs: \ No newline at end of file diff --git a/docs/Docker Usage.md b/docs/Docker Usage.md index 0f3a381f..5b7c211e 100644 --- a/docs/Docker Usage.md +++ b/docs/Docker Usage.md @@ -10,13 +10,13 @@ You can run RITA using Docker! You have several options depending on your specif The easiest way is to pull down the pre-built image. ``` -docker pull activecm/rita:latest +docker pull quay.io/activecm/rita:master ``` -You can also build the image from scratch. +You can also build the image from source. ``` -docker build -t activecm/rita:latest . +docker build -t quay.io/activecm/rita:master . ``` ## Running RITA with Docker Compose @@ -43,6 +43,14 @@ export BRO_LOGS=/path/to/your/logs docker-compose run --rm -v /path/to/your/rita/config.yaml:/etc/rita/config.yaml rita show-databases ``` +Note: If you'd like to use a different version of RITA than the default `master` you can do so using the `RITA_VERSION` variable. + +``` +export RITA_VERSION=dev +export BRO_LOGS=/path/to/your/logs +docker-compose run --rm -v /path/to/your/rita/config.yaml:/etc/rita/config.yaml rita show-databases +``` + ## Running RITA with Docker Using External Mongo If you don't need/want the convenience of Docker Compose running the Mongo server for you, you can also use RITA without it. You will need to modify RITA's config file to point to your external Mongo server. @@ -51,11 +59,11 @@ If you don't need/want the convenience of Docker Compose running the Mongo serve docker run -it --rm \ -v /path/to/your/bro/logs:/opt/bro/logs/:ro \ -v /path/to/your/rita/config.yaml:/etc/rita/config.yaml:ro \ - activecm/rita:latest import + quay.io/activecm/rita:master import docker run -it --rm \ -v /path/to/your/bro/logs:/opt/bro/logs/:ro \ -v /path/to/your/rita/config.yaml:/etc/rita/config.yaml:ro \ - activecm/rita:latest analyze + quay.io/activecm/rita:master analyze ``` ## Using Docker to Build RITA @@ -63,7 +71,7 @@ docker run -it --rm \ You can use Docker to build a statically linked RITA binary for you. This binary should be portable between Linux 64-bit systems. Once you've obtained the RITA docker image (see the "Obtaining the RITA Docker Image" section above) you can run the following commands to copy the binary to your host system. ``` -docker create --name rita activecm/rita:latest +docker create --name rita quay.io/activecm/rita:master docker cp rita:/rita ./rita docker rm rita ```