· 6 years ago · Mar 11, 2020, 07:48 PM
1###################### Filebeat Configuration Example #########################
2
3# This file is an example configuration file highlighting only the most common
4# options. The filebeat.reference.yml file from the same directory contains all the
5# supported options with more comments. You can use it as a reference.
6#
7# You can find the full configuration reference here:
8# https://www.elastic.co/guide/en/beats/filebeat/index.html
9
10# For more available modules and options, please see the filebeat.reference.yml sample
11# configuration file.
12
13#=========================== Filebeat inputs =============================
14
15filebeat.inputs:
16
17# Each - is an input. Most options can be set at the input level, so
18# you can use different inputs for various configurations.
19# Below are the input specific configurations.
20
21- type: log
22
23 # Change to true to enable this input configuration.
24 enabled: true
25
26 # Paths that should be crawled and fetched. Glob based paths.
27 paths:
28 - /var/log/*.log
29 - /var/log/system.log
30 - /var/log/wifi.log
31 #- c:\programdata\elasticsearch\logs\*
32
33 # Exclude lines. A list of regular expressions to match. It drops the lines that are
34 # matching any regular expression from the list.
35 #exclude_lines: ['^DBG']
36
37 # Include lines. A list of regular expressions to match. It exports the lines that are
38 # matching any regular expression from the list.
39 #include_lines: ['^ERR', '^WARN']
40
41 # Exclude files. A list of regular expressions to match. Filebeat drops the files that
42 # are matching any regular expression from the list. By default, no files are dropped.
43 #exclude_files: ['.gz$']
44
45 # Optional additional fields. These fields can be freely picked
46 # to add additional information to the crawled log files for filtering
47 #fields:
48 # level: debug
49 # review: 1
50
51 ### Multiline options
52
53 # Multiline can be used for log messages spanning multiple lines. This is common
54 # for Java Stack Traces or C-Line Continuation
55
56 # The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
57 #multiline.pattern: ^\[
58
59 # Defines if the pattern set under pattern should be negated or not. Default is false.
60 #multiline.negate: false
61
62 # Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
63 # that was (not) matched before or after or as long as a pattern is not matched based on negate.
64 # Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
65 #multiline.match: after
66
67
68#============================= Filebeat modules ===============================
69
70filebeat.config.modules:
71 # Glob pattern for configuration loading
72 path: ${path.config}/modules.d/*.yml
73
74 # Set to true to enable config reloading
75 reload.enabled: false
76
77 # Period on which files under path should be checked for changes
78 #reload.period: 10s
79
80#==================== Elasticsearch template setting ==========================
81
82setup.template.settings:
83 index.number_of_shards: 3
84 #index.codec: best_compression
85 #_source.enabled: false
86
87#================================ General =====================================
88
89# The name of the shipper that publishes the network data. It can be used to group
90# all the transactions sent by a single shipper in the web interface.
91#name:
92
93# The tags of the shipper are included in their own field with each
94# transaction published.
95#tags: ["service-X", "web-tier"]
96
97# Optional fields that you can specify to add additional information to the
98# output.
99#fields:
100# env: staging
101
102
103#============================== Dashboards =====================================
104# These settings control loading the sample dashboards to the Kibana index. Loading
105# the dashboards is disabled by default and can be enabled either by setting the
106# options here, or by using the `-setup` CLI flag or the `setup` command.
107#setup.dashboards.enabled: false
108
109# The URL from where to download the dashboards archive. By default this URL
110# has a value which is computed based on the Beat name and version. For released
111# versions, this URL points to the dashboard archive on the artifacts.elastic.co
112# website.
113#setup.dashboards.url:
114
115#============================== Kibana =====================================
116
117# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API.
118# This requires a Kibana endpoint configuration.
119setup.kibana:
120
121 # Kibana Host
122 # Scheme and port can be left out and will be set to the default (http and 5601)
123 # In case you specify and additional path, the scheme is required: http://localhost:5601/path
124 # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601
125 host: "13.56.27.197:5601"
126 username: "kibanaadmin"
127 password: "zelda1221"
128
129 # Kibana Space ID
130 # ID of the Kibana Space into which the dashboards should be loaded. By default,
131 # the Default Space will be used.
132 #space.id:
133
134#============================= Elastic Cloud ==================================
135
136# These settings simplify using filebeat with the Elastic Cloud (https://cloud.elastic.co/).
137
138# The cloud.id setting overwrites the `output.elasticsearch.hosts` and
139# `setup.kibana.host` options.
140# You can find the `cloud.id` in the Elastic Cloud web UI.
141#cloud.id:
142
143# The cloud.auth setting overwrites the `output.elasticsearch.username` and
144# `output.elasticsearch.password` settings. The format is `<user>:<pass>`.
145#cloud.auth:
146
147#================================ Outputs =====================================
148
149# Configure what output to use when sending the data collected by the beat.
150
151#-------------------------- Elasticsearch output ------------------------------
152#output.elasticsearch:
153 # Array of hosts to connect to.
154 #hosts: ["localhost:9200"]
155
156 # Enabled ilm (beta) to use index lifecycle management instead daily indices.
157 #ilm.enabled: false
158
159 # Optional protocol and basic auth credentials.
160 #protocol: "https"
161 #username: "elastic"
162 #password: "changeme"
163
164#----------------------------- Logstash output --------------------------------
165output.logstash:
166 # The Logstash hosts
167 hosts: ["13.56.27.197:5044"]
168
169 # Optional SSL. By default is off.
170 # List of root certificates for HTTPS server verifications
171 #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
172
173 # Certificate for SSL client authentication
174 #ssl.certificate: "/etc/pki/client/cert.pem"
175
176 # Client Certificate Key
177 #ssl.key: "/etc/pki/client/cert.key"
178
179#================================ Processors =====================================
180
181# Configure processors to enhance or manipulate events generated by the beat.
182
183processors:
184 - add_host_metadata: ~
185 - add_cloud_metadata: ~
186
187#================================ Logging =====================================
188
189# Sets log level. The default log level is info.
190# Available log levels are: error, warning, info, debug
191#logging.level: debug
192
193# At debug level, you can selectively enable logging only for some components.
194# To enable all selectors use ["*"]. Examples of other selectors are "beat",
195# "publish", "service".
196#logging.selectors: ["*"]
197
198#============================== Xpack Monitoring ===============================
199# filebeat can export internal metrics to a central Elasticsearch monitoring
200# cluster. This requires xpack monitoring to be enabled in Elasticsearch. The
201# reporting is disabled by default.
202
203# Set to true to enable the monitoring reporter.
204#xpack.monitoring.enabled: false
205
206# Uncomment to send the metrics to Elasticsearch. Most settings from the
207# Elasticsearch output are accepted here as well. Any setting that is not set is
208# automatically inherited from the Elasticsearch output configuration, so if you
209# have the Elasticsearch output configured, you can simply uncomment the
210# following line.
211#xpack.monitoring.elasticsearch: