From 14612338c124556fd2bc4e0777d24f91e105d1b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20N=C3=A4gele?= Date: Fri, 28 Jan 2022 10:11:28 +0100 Subject: [PATCH 1/5] correct minor doc error --- CONFIGURATION.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONFIGURATION.md b/CONFIGURATION.md index b89c30f..8015910 100644 --- a/CONFIGURATION.md +++ b/CONFIGURATION.md @@ -349,7 +349,7 @@ this can be configured using the fields parameter. ``` - segment: anonymize config: - encryptionkey: "abcdef" + key: "abcdef" # the lines below are optional and set to default fields: "SrcAddr,DstAddr,SamplerAddress" ``` From 5f64eca1e4a801d56a77bf51b10910f04d37b0e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20N=C3=A4gele?= Date: Fri, 28 Jan 2022 10:11:38 +0100 Subject: [PATCH 2/5] add support for printing ASNs --- segments/print/printflowdump/printflowdump.go | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/segments/print/printflowdump/printflowdump.go b/segments/print/printflowdump/printflowdump.go index 8c2c353..33d68d2 100644 --- a/segments/print/printflowdump/printflowdump.go +++ b/segments/print/printflowdump/printflowdump.go @@ -80,6 +80,16 @@ func (segment PrintFlowdump) format_flow(flowmsg *flow.FlowMessage) string { src := net.IP(flowmsg.SrcAddr) dst := net.IP(flowmsg.DstAddr) router := net.IP(flowmsg.SamplerAddress) + + var srcas, dstas string + if segment.Verbose { + if flowmsg.SrcAS != 0 { + srcas = fmt.Sprintf("AS%d/", flowmsg.SrcAS) + } + if flowmsg.DstAS != 0 { + dstas = fmt.Sprintf("AS%d/", flowmsg.DstAS) + } + } var proto string if segment.UseProtoname { if flowmsg.ProtoName != "" { @@ -195,9 +205,10 @@ func (segment PrintFlowdump) format_flow(flowmsg *flow.FlowMessage) string { color = "\033[0m" } - return fmt.Sprintf("%s%s: %s:%d -> %s:%d [%s → %s@%s → %s], %s, %ds, %s, %s", - color, timestamp, src, flowmsg.SrcPort, dst, flowmsg.DstPort, - srcIfDesc, statusString, router, dstIfDesc, proto, duration, + return fmt.Sprintf("%s%s: %s%s:%d -> %s%s:%d [%s → %s@%s → %s], %s, %ds, %s, %s", + color, timestamp, srcas, src, flowmsg.SrcPort, dstas, dst, + flowmsg.DstPort, srcIfDesc, statusString, router, dstIfDesc, + proto, duration, humanize.SI(float64(flowmsg.Bytes*8/duration), "bps"), humanize.SI(float64(flowmsg.Packets/duration), "pps")) } From 6ceb99e77888beab74d3801f6259eb75f0f22a39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20N=C3=A4gele?= Date: Fri, 28 Jan 2022 10:15:13 +0100 Subject: [PATCH 3/5] use unicode arrow in all places --- segments/print/printflowdump/printflowdump.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/segments/print/printflowdump/printflowdump.go b/segments/print/printflowdump/printflowdump.go index 33d68d2..4fe7bfa 100644 --- a/segments/print/printflowdump/printflowdump.go +++ b/segments/print/printflowdump/printflowdump.go @@ -205,7 +205,7 @@ func (segment PrintFlowdump) format_flow(flowmsg *flow.FlowMessage) string { color = "\033[0m" } - return fmt.Sprintf("%s%s: %s%s:%d -> %s%s:%d [%s → %s@%s → %s], %s, %ds, %s, %s", + return fmt.Sprintf("%s%s: %s%s:%d → %s%s:%d [%s → %s@%s → %s], %s, %ds, %s, %s", color, timestamp, srcas, src, flowmsg.SrcPort, dstas, dst, flowmsg.DstPort, srcIfDesc, statusString, router, dstIfDesc, proto, duration, From 7ffe11fa025daf6d9279ef4b2036c9d1e3a607ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20N=C3=A4gele?= Date: Fri, 28 Jan 2022 13:06:47 +0100 Subject: [PATCH 4/5] add some additional examples, add dedicated example readme --- examples/README.md | 41 +++++++++++++++++++++++++++++++++++ examples/flowdump/config.yml | 17 +++++---------- examples/localkafka/write.yml | 5 +++++ 3 files changed, 51 insertions(+), 12 deletions(-) create mode 100644 examples/README.md diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000..e48d444 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,41 @@ +# Flowpipeline Example Collection + +This collection of example configs is supposed to help users get started using +different use cases. A grouped and alphabetically sorted +[reference](https://github.com/bwNetFlow/flowpipeline/blob/master/CONFIGURATION.md), +might be the best resource when trying to achieve a specific outcome, this +short guide however tries to give new users some idea of what is possible with +this tool and present existing users with additional options. + +The most natural way to group these examples is to list them by which input +segment they use, aka "where they get flows from". Note that these input +segments can be freely interchanged for one another, and all examples work with +all inputs. + + +## `bpf` +This segment accesses local network interfaces using raw sockets, as for instance tcpdump does. + +Relevant examples are: +* [./flowdump](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/flowdump) -- create a tcpdump style view with custom filtering from CLI using local + interfaces + + +## `goflow` +This segment allows listening for raw IPFIX, Netflow, or sFlow by using goflow2's API. + +Relevant examples are: +* [./localkafka/write.yml](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/localkafka) -- emulate plain goflow2 and write flows to a Kafka topic for the following section to use + + +## `kafkaconsumer` +This segment accesses streams of flows generated by another pipeline using +`kafkaproducer` or [goflow2](https://github.com/netsampler/goflow2). + +Relevant examples are: +* [./kafkaflowdump](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/kafkaflowdump) -- create a tcpdump style view with custom filtering from CLI +* [./highlighted_flowdump](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/highlighted_flowdump) -- create a tcpdump style view but use the filtering conditional to highlight desired flows instead of dropping undesired flows +* [./enricher](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/enricher) -- enrich flows with various bits of data and store them back in Kafka +* [./reducer](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/reducer) -- strip flows of fields and store them back in Kafka +* [./splitter](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/splitter) -- distribute flows to multiple Kafka topics based on a field +* [./anonymizer](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/anonymizer) -- anonymize IP addresses using Crypto PAn diff --git a/examples/flowdump/config.yml b/examples/flowdump/config.yml index 2c59fdd..e5333c0 100644 --- a/examples/flowdump/config.yml +++ b/examples/flowdump/config.yml @@ -1,14 +1,11 @@ --- ############################################################################### -# Consume flow messages, it's best to use an enriched topic as flowdump -# printing involves interface descriptions. -- segment: kafkaconsumer +# Generate flow messages from packet headers. This uses a eBPF program and a +# custom flow cache. Requires root or the correct capabilities as specified by +# the projects README.md +- segment: bpf config: - server: kafka01.example.com:9093 - topic: flow-messages-enriched - group: myuser-flowdump - user: myuser - pass: $KAFKA_SASL_PASS + device: eth0 ############################################################################### # Filter flows dynamically, i.e. via commandline argument. This makes for a @@ -17,10 +14,6 @@ config: filter: $0 # this refers to the first arg. -############################################################################### -# Add human readable protocol names to any flow message -- segment: protomap - ############################################################################### # Print flowdump, this is a predefined format that is not configurable at this # point. diff --git a/examples/localkafka/write.yml b/examples/localkafka/write.yml index 379e748..29d588e 100644 --- a/examples/localkafka/write.yml +++ b/examples/localkafka/write.yml @@ -1,4 +1,9 @@ --- +# Note that this write.yml could just as well be a real goflow2 instance as no +# additional processing takes place. Thus, using goflow2 directly might be +# preferrable. + + ############################################################################### # Receive Netflow v9 using Goflow on a local port. - segment: goflow From f3efc9c95bc1181ee3b1b719889f1523c9613825 Mon Sep 17 00:00:00 2001 From: Georg Eisenhart Date: Wed, 2 Feb 2022 12:56:38 +0100 Subject: [PATCH 5/5] refine som examples and docs --- CONFIGURATION.md | 4 +++- examples/README.md | 6 ++--- examples/flowdump/{config.yml => bpf.yml} | 0 examples/flowdump/csv.yml | 22 +++++++++++++++++++ .../config.yml => flowdump/highlight.yml} | 0 examples/flowdump/json.yml | 19 ++++++++++++++++ examples/flowdump/kafkaflowdump.yml | 20 +++++++++++++++++ 7 files changed, 67 insertions(+), 4 deletions(-) rename examples/flowdump/{config.yml => bpf.yml} (100%) create mode 100644 examples/flowdump/csv.yml rename examples/{highlighted_flowdump/config.yml => flowdump/highlight.yml} (100%) create mode 100644 examples/flowdump/json.yml create mode 100644 examples/flowdump/kafkaflowdump.yml diff --git a/CONFIGURATION.md b/CONFIGURATION.md index 8015910..2a81ab4 100644 --- a/CONFIGURATION.md +++ b/CONFIGURATION.md @@ -354,7 +354,7 @@ this can be configured using the fields parameter. fields: "SrcAddr,DstAddr,SamplerAddress" ``` -[any additional links](https://bwnet.belwue.de) +[CryptoPan module](https://github.com/Yawning/cryptopan) [godoc](https://pkg.go.dev/github.com/bwNetFlow/flowpipeline/segments/modify/anonymize) [examples using this segment](https://github.com/search?q=%22segment%3A+anonymize%22+extension%3Ayml+repo%3AbwNetFlow%2Fflowpipeline%2Fexamples&type=Code) @@ -529,6 +529,8 @@ sequence to export to different places. The `csv` segment provides an CSV output option. It uses stdout by default, but can be instructed to write to file using the filename parameter. The fields parameter can be used to limit which fields will be exported. +If no filename is provided or empty, the output goes to stdout. +By default all fields are exported. To reduce them, use a valid comma seperated list of fields. ``` - segment: csv diff --git a/examples/README.md b/examples/README.md index e48d444..3ecc72d 100644 --- a/examples/README.md +++ b/examples/README.md @@ -17,7 +17,7 @@ all inputs. This segment accesses local network interfaces using raw sockets, as for instance tcpdump does. Relevant examples are: -* [./flowdump](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/flowdump) -- create a tcpdump style view with custom filtering from CLI using local +* [./flowdump/bpf.yml](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/flowdump/bpf.yml) -- create a tcpdump style view with custom filtering from CLI using local interfaces @@ -33,8 +33,8 @@ This segment accesses streams of flows generated by another pipeline using `kafkaproducer` or [goflow2](https://github.com/netsampler/goflow2). Relevant examples are: -* [./kafkaflowdump](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/kafkaflowdump) -- create a tcpdump style view with custom filtering from CLI -* [./highlighted_flowdump](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/highlighted_flowdump) -- create a tcpdump style view but use the filtering conditional to highlight desired flows instead of dropping undesired flows +* [./flowdump/kafkaflowdump.yml](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/flowdump/kafkaflowdump.yml) -- create a tcpdump style view with custom filtering from CLI +* [./flowdump/highlight.yml](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/flowdump/highlight.yml) -- create a tcpdump style view but use the filtering conditional to highlight desired flows instead of dropping undesired flows * [./enricher](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/enricher) -- enrich flows with various bits of data and store them back in Kafka * [./reducer](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/reducer) -- strip flows of fields and store them back in Kafka * [./splitter](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/splitter) -- distribute flows to multiple Kafka topics based on a field diff --git a/examples/flowdump/config.yml b/examples/flowdump/bpf.yml similarity index 100% rename from examples/flowdump/config.yml rename to examples/flowdump/bpf.yml diff --git a/examples/flowdump/csv.yml b/examples/flowdump/csv.yml new file mode 100644 index 0000000..ae7030c --- /dev/null +++ b/examples/flowdump/csv.yml @@ -0,0 +1,22 @@ +--- +############################################################################### +# Consume flow messages, it's best to use an enriched topic as flowdump +# printing involves interface descriptions. +- segment: kafkaconsumer + config: + server: kafka01.example.com:9093 + topic: flow-messages-enriched + group: myuser-flowdump + user: myuser + pass: $KAFKA_SASL_PASS + +############################################################################### +# CSV output with given fields. If no filename is configured output +# is redirected to stdout. +# +# Example list for fields may look like +# "TimeFlowStart,TimeFlowEnd,Bytes,Packets,SrcAddr,SrcPort,FlowDirection,DstAddr,DstPort,Proto" +- segment: csv + config: + filename: "" + fields: "" \ No newline at end of file diff --git a/examples/highlighted_flowdump/config.yml b/examples/flowdump/highlight.yml similarity index 100% rename from examples/highlighted_flowdump/config.yml rename to examples/flowdump/highlight.yml diff --git a/examples/flowdump/json.yml b/examples/flowdump/json.yml new file mode 100644 index 0000000..2016141 --- /dev/null +++ b/examples/flowdump/json.yml @@ -0,0 +1,19 @@ +--- +############################################################################### +# Consume flow messages, it's best to use an enriched topic as flowdump +# printing involves interface descriptions. +- segment: kafkaconsumer + config: + server: kafka01.example.com:9093 + topic: flow-messages-enriched + group: myuser-flowdump + user: myuser + pass: $KAFKA_SASL_PASS + +############################################################################### +# JSON output of flow messages. If no filename is configured output +# is redirected to stdout. +# +- segment: json + config: + filename: "" \ No newline at end of file diff --git a/examples/flowdump/kafkaflowdump.yml b/examples/flowdump/kafkaflowdump.yml new file mode 100644 index 0000000..24c97dc --- /dev/null +++ b/examples/flowdump/kafkaflowdump.yml @@ -0,0 +1,20 @@ +--- +############################################################################### +# Consume flow messages, it's best to use an enriched topic as flowdump +# printing involves interface descriptions. +- segment: kafkaconsumer + config: + server: kafka01.example.com:9093 + topic: flow-messages-enriched + group: myuser-flowdump + user: myuser + pass: $KAFKA_SASL_PASS + +############################################################################### +# tcpdump-style output of flows to stdout +- segment: printflowdump + # the lines below are optional and set to default + config: + useprotoname: true + verbose: false + highlight: false \ No newline at end of file