-
Notifications
You must be signed in to change notification settings - Fork 57
/
clicktail.conf
167 lines (114 loc) · 5.73 KB
/
clicktail.conf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
[Application Options]
; APIHost = http://changeuser:changepass@localhost:8123/
; Only send 1 / N log lines
; SampleRate = 1
; Number of concurrent connections to open to Honeycomb
; NumSenders = 10
; How frequently to flush batches
; BatchFrequencyMs = 10000
; Maximum number of messages to put in a batch
; BatchSize = 1000000
; Print debugging output
; Debug = false
; How frequently, in seconds, to print out summary info
; StatusInterval = 60
; Configure clicktail to ingest old data in order to backfill Honeycomb. Sets the correct values for --backoff, --tail.read_from, and --tail.stop
; Backfill = false
; When parsing a timestamp that has no time zone, assume it is in the same timezone as localhost instead of UTC (the default)
; Localtime = false
; When parsing a timestamp use this time zone instead of UTC (the default). Must be specified in TZ format as seen here: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
; Timezone =
; For the field listed, apply a one-way hash to the field content. May be specified multiple times
; ScrubFields =
; Do not send the field to Honeycomb. May be specified multiple times
; DropFields =
; Add the field to every event. Field should be key=val. May be specified multiple times
; AddFields =
; Identify a field that contains an HTTP request of the form 'METHOD /path HTTP/1.x' or just the request path. Break apart that field into subfields that contain components. May be specified multiple times. Defaults to 'request' when using the nginx parser
; RequestShape =
; Prefix to use on fields generated from request_shape to prevent field collision
; ShapePrefix =
; A pattern for the request path on which to base the derived request_shape. May be specified multiple times. Patterns are considered in order; first match wins.
; RequestPattern =
; How to parse the request query parameters. 'whitelist' means only extract listed query keys. 'all' means to extract all query parameters as individual columns
; RequestParseQuery = whitelist
; Request query parameter key names to extract, when request_parse_query is 'whitelist'. May be specified multiple times.
; RequestQueryKeys =
; When rate limited by the API, back off and retry sending failed events. Otherwise failed events are dropped. When --backfill is set, it will override this option=true
; BackOff = false
; pass a regex to this flag to strip the matching prefix from the line before handing to the parser. Useful when log aggregation prepends a line header. Use named groups to extract fields into the event.
; PrefixRegex =
; enable dynamic sampling using the field listed in this option. May be specified multiple times; fields will be concatenated to form the dynsample key. WARNING increases CPU utilization dramatically over normal sampling
; DynSample =
; measurement window size for the dynsampler, in seconds
; DynWindowSec = 30
; if the rate of traffic falls below this, dynsampler won't sample
; MinSampleRate = 1
[Required Options]
; Parser module to use. Use --list to list available options.
; ParserName =
; Log file(s) to parse. Use '-' for STDIN, use this flag multiple times to tail multiple files, or use a glob (/path/to/foo-*.log)
; LogFiles =
; Name of the dataset
; Dataset =
[Other Modes]
; Show this help message
; Help = false
; List available parsers
; ListParsers = false
; Show version
; Version = false
[Tail Options]
; Location in the file from which to start reading. Values: beginning, end, last. Last picks up where it left off, if the file has not been rotated, otherwise beginning. When --backfill is set, it will override this option=beginning
; ReadFrom = last
; Stop reading the file after reaching the end rather than continuing to tail. When --backfill is set, it will override this option=true
; Stop = false
; use poll instead of inotify to tail files
; Poll = false
; File in which to store the last read position. Defaults to a file in /tmp named $logfile.leash.state. If tailing multiple files, default is forced.
; StateFile =
[JSON Parser Options]
; Name of the field that contains a timestamp
; TimeFieldName =
; Format of the timestamp found in timefield (supports strftime and Golang time formats)
; TimeFieldFormat =
[KeyVal Parser Options]
; Name of the field that contains a timestamp
; TimeFieldName =
; Format of the timestamp found in timefield (supports strftime and Golang time formats)
; TimeFieldFormat =
; a regular expression that will filter the input stream and only parse lines that match
; FilterRegex =
; change the filter_regex to only process lines that do *not* match
; InvertFilter = false
[MongoDB Parser Options]
; Send what was successfully parsed from a line (only if the error occured in the log line's message).
; LogPartials = false
[MySQL Parser Options]
; MySQL host in the format (address:port)
; Host =
; MySQL username
; User =
; MySQL password
; Pass =
; interval for querying the MySQL DB in seconds
; QueryInterval = 30
[Nginx Parser Options]
; Path to Nginx config file
; ConfigFile =
; Log format name to look for in the Nginx config file
; LogFormatName =
; Name of the field that contains a timestamp
; TimeFieldName =
; Timestamp format to use (strftime and Golang time.Parse supported)
; TimeFieldFormat =
[PostgreSQL Parser Options]
; Format string for PostgreSQL log line prefix
; LogLinePrefix =
[Regex Parser Options]
; Regular expression with named capture groups representing the fields you want parsed (RE2 syntax). You can enter multiple regexes to match (--regex.line_regex="(?P<foo>re)" --regex.line_regex="(?P<bar>...)"). Parses using the first regex to match a line, so list them in most-to-least-specific order.
; LineRegex =
; Name of the field that contains a timestamp
; TimeFieldName =
; Timestamp format to use (strftime and Golang time.Parse supported)
; TimeFieldFormat =