diff --git a/CONFIGURATION.md b/CONFIGURATION.md index b89c30f..2a81ab4 100644 --- a/CONFIGURATION.md +++ b/CONFIGURATION.md @@ -349,12 +349,12 @@ this can be configured using the fields parameter. ``` - segment: anonymize config: - encryptionkey: "abcdef" + key: "abcdef" # the lines below are optional and set to default fields: "SrcAddr,DstAddr,SamplerAddress" ``` -[any additional links](https://bwnet.belwue.de) +[CryptoPan module](https://github.com/Yawning/cryptopan) [godoc](https://pkg.go.dev/github.com/bwNetFlow/flowpipeline/segments/modify/anonymize) [examples using this segment](https://github.com/search?q=%22segment%3A+anonymize%22+extension%3Ayml+repo%3AbwNetFlow%2Fflowpipeline%2Fexamples&type=Code) @@ -529,6 +529,8 @@ sequence to export to different places. The `csv` segment provides an CSV output option. It uses stdout by default, but can be instructed to write to file using the filename parameter. The fields parameter can be used to limit which fields will be exported. +If no filename is provided or empty, the output goes to stdout. +By default all fields are exported. To reduce them, use a valid comma seperated list of fields. ``` - segment: csv diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000..3ecc72d --- /dev/null +++ b/examples/README.md @@ -0,0 +1,41 @@ +# Flowpipeline Example Collection + +This collection of example configs is supposed to help users get started using +different use cases. A grouped and alphabetically sorted +[reference](https://github.com/bwNetFlow/flowpipeline/blob/master/CONFIGURATION.md), +might be the best resource when trying to achieve a specific outcome, this +short guide however tries to give new users some idea of what is possible with +this tool and present existing users with additional options. + +The most natural way to group these examples is to list them by which input +segment they use, aka "where they get flows from". Note that these input +segments can be freely interchanged for one another, and all examples work with +all inputs. + + +## `bpf` +This segment accesses local network interfaces using raw sockets, as for instance tcpdump does. + +Relevant examples are: +* [./flowdump/bpf.yml](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/flowdump/bpf.yml) -- create a tcpdump style view with custom filtering from CLI using local + interfaces + + +## `goflow` +This segment allows listening for raw IPFIX, Netflow, or sFlow by using goflow2's API. + +Relevant examples are: +* [./localkafka/write.yml](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/localkafka) -- emulate plain goflow2 and write flows to a Kafka topic for the following section to use + + +## `kafkaconsumer` +This segment accesses streams of flows generated by another pipeline using +`kafkaproducer` or [goflow2](https://github.com/netsampler/goflow2). + +Relevant examples are: +* [./flowdump/kafkaflowdump.yml](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/flowdump/kafkaflowdump.yml) -- create a tcpdump style view with custom filtering from CLI +* [./flowdump/highlight.yml](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/flowdump/highlight.yml) -- create a tcpdump style view but use the filtering conditional to highlight desired flows instead of dropping undesired flows +* [./enricher](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/enricher) -- enrich flows with various bits of data and store them back in Kafka +* [./reducer](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/reducer) -- strip flows of fields and store them back in Kafka +* [./splitter](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/splitter) -- distribute flows to multiple Kafka topics based on a field +* [./anonymizer](https://github.com/bwNetFlow/flowpipeline/tree/master/examples/anonymizer) -- anonymize IP addresses using Crypto PAn diff --git a/examples/flowdump/config.yml b/examples/flowdump/bpf.yml similarity index 55% rename from examples/flowdump/config.yml rename to examples/flowdump/bpf.yml index 2c59fdd..e5333c0 100644 --- a/examples/flowdump/config.yml +++ b/examples/flowdump/bpf.yml @@ -1,14 +1,11 @@ --- ############################################################################### -# Consume flow messages, it's best to use an enriched topic as flowdump -# printing involves interface descriptions. -- segment: kafkaconsumer +# Generate flow messages from packet headers. This uses a eBPF program and a +# custom flow cache. Requires root or the correct capabilities as specified by +# the projects README.md +- segment: bpf config: - server: kafka01.example.com:9093 - topic: flow-messages-enriched - group: myuser-flowdump - user: myuser - pass: $KAFKA_SASL_PASS + device: eth0 ############################################################################### # Filter flows dynamically, i.e. via commandline argument. This makes for a @@ -17,10 +14,6 @@ config: filter: $0 # this refers to the first arg. -############################################################################### -# Add human readable protocol names to any flow message -- segment: protomap - ############################################################################### # Print flowdump, this is a predefined format that is not configurable at this # point. diff --git a/examples/flowdump/csv.yml b/examples/flowdump/csv.yml new file mode 100644 index 0000000..ae7030c --- /dev/null +++ b/examples/flowdump/csv.yml @@ -0,0 +1,22 @@ +--- +############################################################################### +# Consume flow messages, it's best to use an enriched topic as flowdump +# printing involves interface descriptions. +- segment: kafkaconsumer + config: + server: kafka01.example.com:9093 + topic: flow-messages-enriched + group: myuser-flowdump + user: myuser + pass: $KAFKA_SASL_PASS + +############################################################################### +# CSV output with given fields. If no filename is configured output +# is redirected to stdout. +# +# Example list for fields may look like +# "TimeFlowStart,TimeFlowEnd,Bytes,Packets,SrcAddr,SrcPort,FlowDirection,DstAddr,DstPort,Proto" +- segment: csv + config: + filename: "" + fields: "" \ No newline at end of file diff --git a/examples/highlighted_flowdump/config.yml b/examples/flowdump/highlight.yml similarity index 100% rename from examples/highlighted_flowdump/config.yml rename to examples/flowdump/highlight.yml diff --git a/examples/flowdump/json.yml b/examples/flowdump/json.yml new file mode 100644 index 0000000..2016141 --- /dev/null +++ b/examples/flowdump/json.yml @@ -0,0 +1,19 @@ +--- +############################################################################### +# Consume flow messages, it's best to use an enriched topic as flowdump +# printing involves interface descriptions. +- segment: kafkaconsumer + config: + server: kafka01.example.com:9093 + topic: flow-messages-enriched + group: myuser-flowdump + user: myuser + pass: $KAFKA_SASL_PASS + +############################################################################### +# JSON output of flow messages. If no filename is configured output +# is redirected to stdout. +# +- segment: json + config: + filename: "" \ No newline at end of file diff --git a/examples/flowdump/kafkaflowdump.yml b/examples/flowdump/kafkaflowdump.yml new file mode 100644 index 0000000..24c97dc --- /dev/null +++ b/examples/flowdump/kafkaflowdump.yml @@ -0,0 +1,20 @@ +--- +############################################################################### +# Consume flow messages, it's best to use an enriched topic as flowdump +# printing involves interface descriptions. +- segment: kafkaconsumer + config: + server: kafka01.example.com:9093 + topic: flow-messages-enriched + group: myuser-flowdump + user: myuser + pass: $KAFKA_SASL_PASS + +############################################################################### +# tcpdump-style output of flows to stdout +- segment: printflowdump + # the lines below are optional and set to default + config: + useprotoname: true + verbose: false + highlight: false \ No newline at end of file diff --git a/examples/localkafka/write.yml b/examples/localkafka/write.yml index 379e748..29d588e 100644 --- a/examples/localkafka/write.yml +++ b/examples/localkafka/write.yml @@ -1,4 +1,9 @@ --- +# Note that this write.yml could just as well be a real goflow2 instance as no +# additional processing takes place. Thus, using goflow2 directly might be +# preferrable. + + ############################################################################### # Receive Netflow v9 using Goflow on a local port. - segment: goflow diff --git a/segments/print/printflowdump/printflowdump.go b/segments/print/printflowdump/printflowdump.go index 8c2c353..4fe7bfa 100644 --- a/segments/print/printflowdump/printflowdump.go +++ b/segments/print/printflowdump/printflowdump.go @@ -80,6 +80,16 @@ func (segment PrintFlowdump) format_flow(flowmsg *flow.FlowMessage) string { src := net.IP(flowmsg.SrcAddr) dst := net.IP(flowmsg.DstAddr) router := net.IP(flowmsg.SamplerAddress) + + var srcas, dstas string + if segment.Verbose { + if flowmsg.SrcAS != 0 { + srcas = fmt.Sprintf("AS%d/", flowmsg.SrcAS) + } + if flowmsg.DstAS != 0 { + dstas = fmt.Sprintf("AS%d/", flowmsg.DstAS) + } + } var proto string if segment.UseProtoname { if flowmsg.ProtoName != "" { @@ -195,9 +205,10 @@ func (segment PrintFlowdump) format_flow(flowmsg *flow.FlowMessage) string { color = "\033[0m" } - return fmt.Sprintf("%s%s: %s:%d -> %s:%d [%s → %s@%s → %s], %s, %ds, %s, %s", - color, timestamp, src, flowmsg.SrcPort, dst, flowmsg.DstPort, - srcIfDesc, statusString, router, dstIfDesc, proto, duration, + return fmt.Sprintf("%s%s: %s%s:%d → %s%s:%d [%s → %s@%s → %s], %s, %ds, %s, %s", + color, timestamp, srcas, src, flowmsg.SrcPort, dstas, dst, + flowmsg.DstPort, srcIfDesc, statusString, router, dstIfDesc, + proto, duration, humanize.SI(float64(flowmsg.Bytes*8/duration), "bps"), humanize.SI(float64(flowmsg.Packets/duration), "pps")) }