ELK--mysql slow.log

Posted monkeybron

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了ELK--mysql slow.log相关的知识,希望对你有一定的参考价值。

思路:Beats -> Logstash -> Elasticsearch

filebeat.inputs:
- type: log
  # Change to true to enable this input configuration.
  enabled: true
  # Paths that should be crawled and fetched. Glob based paths.
  paths:
    - /data/dblogs/mysql3306/slowlogs/mysql_slow.log
    #- c:\programdata\elasticsearch\logs\*
  # Exclude lines. A list of regular expressions to match. It drops the lines that are
  # matching any regular expression from the list.
  exclude_lines: [‘^# Time‘]
  # Include lines. A list of regular expressions to match. It exports the lines that are
  # matching any regular expression from the list.
  #include_lines: [‘^ERR‘, ‘^WARN‘]
  # Exclude files. A list of regular expressions to match. Filebeat drops the files that
  # are matching any regular expression from the list. By default, no files are dropped.
  #exclude_files: [‘.gz$‘]
  # Optional additional fields. These fields can be freely picked
  # to add additional information to the crawled log files for filtering
  fields:
    type: mysql-slow-log
  #  level: debug
  #  review: 1
  ### Multiline options
  # Multiline can be used for log messages spanning multiple lines. This is common
  # for Java Stack Traces or C-Line Continuation
  # The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
  multiline.pattern: "^# User@Host:"
  # Defines if the pattern set under pattern should be negated or not. Default is false.
  multiline.negate: true
  # Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
  # that was (not) matched before or after or as long as a pattern is not matched based on negate.
  # Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
  multiline.match: after
filebeat.config.modules:
  # Glob pattern for configuration loading
  path: $path.config/modules.d/*.yml
  # Set to true to enable config reloading
  reload.enabled: false
  # Period on which files under path should be checked for changes
  #reload.period: 10s
setup.template.settings:
  index.number_of_shards: 1
  #index.codec: best_compression
  #_source.enabled: false
setup.kibana:
  # Kibana Host
  # Scheme and port can be left out and will be set to the default (http and 5601)
  # In case you specify and additional path, the scheme is required: http://localhost:5601/path
  # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601
  #host: "localhost:5601"
  # Kibana Space ID
  # ID of the Kibana Space into which the dashboards should be loaded. By default,
  # the Default Space will be used.
  #space.id:
  # Array of hosts to connect to.
  #hosts: ["localhost:9200"]
  # Optional protocol and basic auth credentials.
  #protocol: "https"
  #username: "elastic"
  #password: "changeme"
output.logstash:
  # The Logstash hosts
  #hosts: ["localhost:5044"]
  hosts: ["192.168.31.6:5044"]
  
  # Optional SSL. By default is off.
  # List of root certificates for HTTPS server verifications
  #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
  # Certificate for SSL client authentication
  #ssl.certificate: "/etc/pki/client/cert.pem"
  # Client Certificate Key
  #ssl.key: "/etc/pki/client/cert.key"
processors:
  - add_host_metadata: ~
  - add_cloud_metadata: ~

 

 

 

 

 

input 
  beats 
    port => 5044
  


filter

    if [fields][type] == "mysql-slow-log" 
        mutate 
          gsub => ["message", "\\n", ""] 
           
        grok 
          match => [ "message", "^#\s+User@Host:\s+%USER:user\[[^\]]+\]\s+@\s+(?:(?<clienthost>\S*) )?\[(?:%IP:clientip)?\]\s+Id:\s+%NUMBER:id\s*# Query_time: %NUMBER:query_time\s+Lock_time: %NUMBER:lock_time\s+Rows_sent: %NUMBER:rows_sent\s+Rows_examined: %NUMBER:rows_examined\s*SET\s+timestamp=%NUMBER:timestamp_mysql;\s*(?<query>[\s\S]*);" 
             ]   
           
        ruby 
        code => "event.set(timestamp, event.get(@timestamp).time.localtime + 8*60*60)"
           
        ruby 
        code => "event.set(@timestamp,event.get(timestamp))"
           
        mutate 
        remove_field => ["timestamp"]
           
       mutate 
         remove_field => ["ecs","input","flags","message","host","tags","timestamp_mysql","@version"]
          




output 
    # stdout  codec => rubydebug 
    if [fields][type] == "mysql-slow-log" 
        elasticsearch
            hosts => ["192.168.0.1:9200"]
            index  => "mysql-slow-log-%+YYYY-MM"
           
       
  

 

以上是关于ELK--mysql slow.log的主要内容,如果未能解决你的问题,请参考以下文章

ELK+MySQL出现大量重复记录问题处理