-
Notifications
You must be signed in to change notification settings - Fork 1
/
sleuth
executable file
·186 lines (159 loc) · 7.2 KB
/
sleuth
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
#!/usr/bin/python
"""
sleuth performs query operations on JSON-formatted flow objects; see sleuth --help for more details
*
* Copyright (c) 2017 Cisco Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* Neither the name of the Cisco Systems, Inc. nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
"""
import sys
import argparse
sys.path.append("/usr/local/lib/python")
try:
from sleuth import *
except ImportError:
# If in source code directory
from sleuth_pkg import *
def attach_iterators(source, args):
source = DNSLinkedFlowEnrichIterator(source)
if args.tls_sec:
source = DictStreamEnrichIterator(source, "tls_sec", enrich_tls,
policy_file=args.policy_file, unknowns=args.unknowns,
compliance=args.compliance, failure_threshold=args.failure_threshold)
if args.fplist:
source = DictStreamEnrichIntoIterator(source, "inferences", tls_inference)
if args.filter:
source = DictStreamFilterIterator(source, SleuthPredicate(args.filter))
if not args.no_stitch:
source = FlowStitchIterator(source)
if args.selection:
source = DictStreamSelectIterator(source, args.selection)
if args.norm_keys:
source = DictStreamNormalizeIterator(source, args.norm_keys)
return source
#
# Main Processing Pipeline
#
def pipeline():
parser = argparse.ArgumentParser(
description='Interrogate JSON flow data and print out matching flows, selected fields, or stats.'
)
parser.add_argument("input", nargs='*', default=sys.stdin,
help="Input (json or pcap) file(s).")
parser.add_argument("--select", dest="selection",
help="Select key(s) to output.")
parser.add_argument("--where", dest="filter",
help="Filter flows according to the provided key/value.")
parser.add_argument("--normalize", dest="norm_keys",
help="Set values of selected key(s) to Null.")
parser.add_argument("--groupby", dest="group_keys",
help='Split processing into separate pipelines, grouping by each ' +
'unique value for the provided keys(s).')
parser.add_argument("--dist", dest='dist', action="store_true",
help="Compute distribution of unique values for keys(s) specified in --select.")
parser.add_argument("--pretty", dest='pretty', action="store_true",
help="Pretty-print JSON output.")
parser.add_argument("--no_stitch", dest='no_stitch', action="store_true",
help="Turn off stitching together successive flows separated by active timeouts.")
parser.add_argument("--sum", dest="sumvars",
help="Compute sum over selected element(s).")
parser.add_argument("--fingerprint", dest="fplist", action="store_true",
help="Enrich with inferences about fingerprint(s).")
parser.add_argument("--tls_sec", dest='tls_sec', action="store_true",
help="Report security level of TLS sessions.")
parser.add_argument("--sec_policy_file", dest='policy_file',
default="res_tls_policy.json", help="File containing seclevel policy; default - res_tls_policy.json")
parser.add_argument("--sec_failure_threshold", dest='failure_threshold',
default=None, help="Integer defining a custom failure reporting threshold. " +
"Adjust 'default_failure_threshold' value in the policy JSON file to specify default")
parser.add_argument("--sec_unknowns", dest='unknowns',
choices=['report', 'ignore'], default="report",
help="Report or ignore unknown ciphersuites, extensions, etc; defaults to report")
parser.add_argument("--sec_compliance", nargs="*", dest='compliance',
help="List of policies to do a soft check for selected cipher suite compliance against. " +
"I.e. 'fips_140' as defined in res_tls_compliance.json")
# Parse command line, and check arguments
args = parser.parse_args()
if args.pretty:
json_indent = 3
else:
json_indent = None
# Select flow processor
#
fp = DictStreamProcessor(indent=json_indent)
if args.group_keys:
fp = DictStreamGroupProcessor(fp, args.group_keys)
# Select post-processor
#
if args.dist:
postproc = DictStreamDistributionProcessor(indent=json_indent)
elif args.sumvars:
postproc = DictStreamSumProcessor(args.sumvars, indent=json_indent)
else:
postproc = DictStreamProcessor(indent=json_indent)
#
# Process all files, with pre- and post-processing
#
fp.pre_process()
if args.input == sys.stdin:
#
# Use standard input as source
#
flow_source = FlowIteratorFromFile(stdin=sys.stdin)
# Add iteration-behavior to transform flows
flow_source = attach_iterators(flow_source, args)
# Process all flows from source
try:
for flow in flow_source:
fp.main_process(flow)
except KeyboardInterrupt:
sys.exit()
else:
#
# Use files as source, looping over each
#
for x in args.input:
flow_source = FlowIteratorFromFile(file_name=x)
# Add iteration-behavior to transform flows
flow_source = attach_iterators(flow_source, args)
# Process all flows from source
try:
for flow in flow_source:
fp.main_process(flow)
except KeyboardInterrupt:
sys.exit()
fp.post_process(postproc)
"""
Script entry point
"""
if __name__ == '__main__':
pipeline()