Repository: geerlingguy/ansible-role-logstash Branch: master Commit: c8aaeb69f766 Files: 30 Total size: 18.1 KB Directory structure: gitextract_t1sc7g26/ ├── .ansible-lint ├── .github/ │ ├── FUNDING.yml │ └── workflows/ │ ├── ci.yml │ ├── release.yml │ └── stale.yml ├── .gitignore ├── .yamllint ├── LICENSE ├── README.md ├── defaults/ │ └── main.yml ├── files/ │ └── filters/ │ ├── 10-syslog.conf │ ├── 11-nginx.conf │ ├── 12-apache.conf │ ├── 14-solr.conf │ └── 15-drupal.conf ├── handlers/ │ └── main.yml ├── meta/ │ └── main.yml ├── molecule/ │ └── default/ │ ├── converge.yml │ ├── molecule.yml │ └── requirements.yml ├── tasks/ │ ├── config.yml │ ├── main.yml │ ├── plugins.yml │ ├── setup-Debian.yml │ ├── setup-RedHat.yml │ └── ssl.yml └── templates/ ├── 01-beats-input.conf.j2 ├── 02-local-syslog-input.conf.j2 ├── 30-elasticsearch-output.conf.j2 └── logstash.repo.j2 ================================================ FILE CONTENTS ================================================ ================================================ FILE: .ansible-lint ================================================ skip_list: - 'yaml' - 'role-name' ================================================ FILE: .github/FUNDING.yml ================================================ # These are supported funding model platforms --- github: geerlingguy patreon: geerlingguy ================================================ FILE: .github/workflows/ci.yml ================================================ --- name: CI 'on': pull_request: push: branches: - master schedule: - cron: "30 4 * * 3" defaults: run: working-directory: 'geerlingguy.logstash' jobs: lint: name: Lint runs-on: ubuntu-latest steps: - name: Check out the codebase. uses: actions/checkout@v4 with: path: 'geerlingguy.logstash' - name: Set up Python 3. uses: actions/setup-python@v5 with: python-version: '3.13' # Can't go to 3.14+ until Ansible 13.x - name: Install test dependencies. run: pip3 install yamllint - name: Lint code. run: | yamllint . molecule: name: Molecule runs-on: ubuntu-latest strategy: matrix: distro: - rockylinux10 - ubuntu2404 steps: - name: Check out the codebase. uses: actions/checkout@v4 with: path: 'geerlingguy.logstash' - name: Set up Python 3. uses: actions/setup-python@v5 with: python-version: '3.13' # Can't go to 3.14+ until Ansible 13.x - name: Install test dependencies. run: pip3 install ansible molecule molecule-plugins[docker] docker - name: Run Molecule tests. run: molecule test env: PY_COLORS: '1' ANSIBLE_FORCE_COLOR: '1' MOLECULE_DISTRO: ${{ matrix.distro }} ================================================ FILE: .github/workflows/release.yml ================================================ --- # This workflow requires a GALAXY_API_KEY secret present in the GitHub # repository or organization. # # See: https://github.com/marketplace/actions/publish-ansible-role-to-galaxy # See: https://github.com/ansible/galaxy/issues/46 name: Release 'on': push: tags: - '*' defaults: run: working-directory: 'geerlingguy.logstash' jobs: release: name: Release runs-on: ubuntu-latest steps: - name: Check out the codebase. uses: actions/checkout@v4 with: path: 'geerlingguy.logstash' - name: Set up Python 3. uses: actions/setup-python@v5 with: python-version: '3.13' # Can't go to 3.14+ until Ansible 13.x - name: Install Ansible. run: pip3 install ansible-core - name: Trigger a new import on Galaxy. run: >- ansible-galaxy role import --api-key ${{ secrets.GALAXY_API_KEY }} $(echo ${{ github.repository }} | cut -d/ -f1) $(echo ${{ github.repository }} | cut -d/ -f2) ================================================ FILE: .github/workflows/stale.yml ================================================ --- name: Close inactive issues 'on': schedule: - cron: "55 2 * * 0" # semi-random time jobs: close-issues: runs-on: ubuntu-latest permissions: issues: write pull-requests: write steps: - uses: actions/stale@v8 with: days-before-stale: 120 days-before-close: 60 exempt-issue-labels: bug,pinned,security,planned exempt-pr-labels: bug,pinned,security,planned stale-issue-label: "stale" stale-pr-label: "stale" stale-issue-message: | This issue has been marked 'stale' due to lack of recent activity. If there is no further activity, the issue will be closed in another 30 days. Thank you for your contribution! Please read [this blog post](https://www.jeffgeerling.com/blog/2020/enabling-stale-issue-bot-on-my-github-repositories) to see the reasons why I mark issues as stale. close-issue-message: | This issue has been closed due to inactivity. If you feel this is in error, please reopen the issue or file a new issue with the relevant details. stale-pr-message: | This pr has been marked 'stale' due to lack of recent activity. If there is no further activity, the issue will be closed in another 30 days. Thank you for your contribution! Please read [this blog post](https://www.jeffgeerling.com/blog/2020/enabling-stale-issue-bot-on-my-github-repositories) to see the reasons why I mark issues as stale. close-pr-message: | This pr has been closed due to inactivity. If you feel this is in error, please reopen the issue or file a new issue with the relevant details. repo-token: ${{ secrets.GITHUB_TOKEN }} ================================================ FILE: .gitignore ================================================ *.retry */__pycache__ *.pyc .cache ================================================ FILE: .yamllint ================================================ --- extends: default rules: line-length: max: 120 level: warning ignore: | .github/workflows/stale.yml ================================================ FILE: LICENSE ================================================ The MIT License (MIT) Copyright (c) 2017 Jeff Geerling Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ # Ansible Role: Logstash [![CI](https://github.com/geerlingguy/ansible-role-logstash/actions/workflows/ci.yml/badge.svg)](https://github.com/geerlingguy/ansible-role-logstash/actions/workflows/ci.yml) An Ansible Role that installs Logstash on RedHat/CentOS Debian/Ubuntu. Note that this role installs a syslog grok pattern by default; if you want to add more filters, please add them inside the `/etc/logstash/conf.d/` directory. As an example, you could create a file named `13-myapp.conf` with the appropriate grok filter and restart logstash to start using it. Test your grok regex using the [Grok Debugger](http://grokdebug.herokuapp.com/). ## Requirements Though other methods are possible, this role is made to work with Elasticsearch as a backend for storing log messages. ## Role Variables Available variables are listed below, along with default values (see `defaults/main.yml`): logstash_version: '7.x' The major version of Logstash to install. logstash_package: logstash The specific package to be installed. You can specify a version of the package using the correct syntax for your platform and package manager by changing the package name. logstash_listen_port_beats: 5044 The port over which Logstash will listen for beats. logstash_elasticsearch_hosts: - http://localhost:9200 The hosts where Logstash should ship logs to Elasticsearch. logstash_dir: /usr/share/logstash The directory inside which Logstash is installed. logstash_ssl_dir: /etc/pki/logstash logstash_ssl_certificate_file: logstash-forwarder-example.crt logstash_ssl_key_file: logstash-forwarder-example.key Local paths to the SSL certificate and key files, which will be copied into the `logstash_ssl_dir`. See [Generating a self-signed certificate](#generating-a-self-signed-certificate) for information about generating and using self-signed certs with Logstash and Filebeat. logstash_local_syslog_path: /var/log/syslog logstash_monitor_local_syslog: true Whether configuration for local syslog file (defined as `logstash_local_syslog_path`) should be added to logstash. Set this to `false` if you are monitoring the local syslog differently, or if you don't care about the local syslog file. Other local logs can be added by your own configuration files placed inside `/etc/logstash/conf.d`. logstash_enabled_on_boot: true Set this to `false` if you don't want logstash to run on system startup. logstash_install_plugins: - logstash-input-beats - logstash-filter-multiline A list of Logstash plugins that should be installed. logstash_setup_default_config: true Set this to `false` if you don't want to add the default config files shipped with this role (inside the `files/filters` directory). You can add your own configuration files inside `/etc/logstash/conf.d`. ## Generating a Self-signed certificate For utmost security, you should use your own valid certificate and keyfile, and update the `logstash_ssl_*` variables in your playbook to use your certificate. To generate a self-signed certificate/key pair, you can use use the command: $ openssl req -x509 -batch -nodes -days 3650 -newkey rsa:2048 -keyout logstash.key -out logstash.crt -subj '/CN=example.com' Note that Filebeat and Logstash may not work correctly with self-signed certificates unless you also have the full chain of trust (including the Certificate Authority for your self-signed cert) added on your server. See: https://github.com/elastic/logstash/issues/4926#issuecomment-203936891 Newer versions of Filebeat and Logstash also require a pkcs8-formatted private key, which can be generated by converting the key generated earlier, e.g.: openssl pkcs8 -in logstash.key -topk8 -nocrypt -out logstash.p8 ## Other Notes If you are seeing high CPU usage from one of the `logstash` processes, and you're using Logstash along with another application running on port 80 on a platform like Ubuntu with upstart, the `logstash-web` process may be stuck in a loop trying to start on port 80, failing, and trying to start again, due to the `restart` flag being present in `/etc/init/logstash-web.conf`. To avoid this problem, either change that line to add a `limit` to the respawn statement, or set the `logstash-web` service to `enabled=no` in your playbook, e.g.: - name: Ensure logstash-web process is stopped and disabled. service: name=logstash-web state=stopped enabled=no ## Example Playbook - hosts: search pre_tasks: - name: Use Java 8 on Debian/Ubuntu. set_fact: java_packages: - openjdk-8-jdk when: ansible_facts.os_family == 'Debian' roles: - geerlingguy.java - geerlingguy.elasticsearch - geerlingguy.logstash ## License MIT / BSD ## Author Information This role was created in 2014 by [Jeff Geerling](https://www.jeffgeerling.com/), author of [Ansible for DevOps](https://www.ansiblefordevops.com/). ================================================ FILE: defaults/main.yml ================================================ --- logstash_version: '7.x' logstash_package: logstash logstash_listen_port_beats: 5044 logstash_elasticsearch_hosts: - http://localhost:9200 logstash_local_syslog_path: /var/log/syslog logstash_monitor_local_syslog: true logstash_dir: /usr/share/logstash logstash_ssl_dir: /etc/pki/logstash logstash_ssl_certificate_file: "" logstash_ssl_key_file: "" logstash_enabled_on_boot: true logstash_install_plugins: - logstash-input-beats - logstash-filter-multiline logstash_setup_default_config: true ================================================ FILE: files/filters/10-syslog.conf ================================================ filter { if [type] == "syslog" { if [message] =~ /last message repeated [0-9]+ times/ { drop { } } grok { match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" } add_field => [ "received_at", "%{@timestamp}" ] add_field => [ "received_from", "%{host}" ] } syslog_pri { } date { match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ] } } } ================================================ FILE: files/filters/11-nginx.conf ================================================ filter { if [type] == "nginx" { grok { match => { "message" => "%{COMBINEDAPACHELOG}" } } } } ================================================ FILE: files/filters/12-apache.conf ================================================ filter { if [type] == "apache" { grok { match => { "message" => "%{COMBINEDAPACHELOG}"} } date { match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] } } } ================================================ FILE: files/filters/14-solr.conf ================================================ filter { if [type] == "solr" { if [message] =~ /org\.apache\.solr\.core\.SolrCore execute/ { drop { } } grok { match => { "message" => "<%{POSINT:priority}>%{SYSLOGLINE}"} } multiline { pattern => "(([^\s]+)Exception.+)|(at:.+)" stream_identity => "%{logsource}.%{@type}" what => "previous" } } } ================================================ FILE: files/filters/15-drupal.conf ================================================ filter { if [type] == "drupal" { grok { match => ["message", "%{SYSLOGTIMESTAMP} %{HOSTNAME} %{WORD}: %{URI:drupal_vhost}\|%{NUMBER:drupal_timestamp}\|(?[^\|]*)\|%{IP:drupal_ip}\|(?[^\|]*)\|(?[^\|]*)\|(?[^\|]*)\|(?[^\|]*)\|%{GREEDYDATA:drupal_message}" ] } } } ================================================ FILE: handlers/main.yml ================================================ --- - name: restart logstash service: name=logstash state=restarted ================================================ FILE: meta/main.yml ================================================ --- dependencies: [] galaxy_info: role_name: logstash author: geerlingguy description: Logstash for Linux. company: "Midwestern Mac, LLC" license: "license (BSD, MIT)" min_ansible_version: 2.10 platforms: - name: Debian versions: - all - name: Ubuntu versions: - all galaxy_tags: - web - system - monitoring - logging - logs - elk - logstash ================================================ FILE: molecule/default/converge.yml ================================================ --- - name: Converge hosts: all #become: true vars: logstash_enabled_on_boot: false pre_tasks: - name: Update apt cache. apt: update_cache=true cache_valid_time=600 when: ansible_facts.os_family == 'Debian' - name: Use Java 8 on Debian/Ubuntu. set_fact: java_packages: - openjdk-8-jdk when: ansible_facts.os_family == 'Debian' roles: - geerlingguy.java - geerlingguy.elasticsearch - geerlingguy.logstash ================================================ FILE: molecule/default/molecule.yml ================================================ --- role_name_check: 1 dependency: name: galaxy options: ignore-errors: true driver: name: docker platforms: - name: instance image: "geerlingguy/docker-${MOLECULE_DISTRO:-rockylinux9}-ansible:latest" command: ${MOLECULE_DOCKER_COMMAND:-""} volumes: - /sys/fs/cgroup:/sys/fs/cgroup:rw cgroupns_mode: host privileged: true pre_build_image: true provisioner: name: ansible playbooks: converge: ${MOLECULE_PLAYBOOK:-converge.yml} ================================================ FILE: molecule/default/requirements.yml ================================================ --- - src: geerlingguy.java - src: geerlingguy.elasticsearch ================================================ FILE: tasks/config.yml ================================================ --- - name: Create Logstash configuration files. template: src: "{{ item }}.j2" dest: "/etc/logstash/conf.d/{{ item }}" owner: root group: root mode: 0644 with_items: - 01-beats-input.conf - 30-elasticsearch-output.conf when: logstash_setup_default_config notify: restart logstash - name: Create Logstash filters. copy: src: "filters/{{ item }}" dest: "/etc/logstash/conf.d/{{ item }}" owner: root group: root mode: 0644 with_items: - 10-syslog.conf - 11-nginx.conf - 12-apache.conf - 14-solr.conf - 15-drupal.conf when: logstash_setup_default_config notify: restart logstash - name: Create Logstash configuration file for local syslog. template: src: 02-local-syslog-input.conf.j2 dest: /etc/logstash/conf.d/02-local-syslog-input.conf owner: root group: root mode: 0644 when: logstash_monitor_local_syslog notify: restart logstash - name: Ensure configuration for local syslog is absent if disabled. file: path: /etc/logstash/conf.d/02-local-syslog-input.conf state: absent when: not logstash_monitor_local_syslog notify: restart logstash ================================================ FILE: tasks/main.yml ================================================ --- - name: Include OS Specific setup tasks include_tasks: setup-{{ ansible_facts.os_family }}.yml - include_tasks: config.yml - include_tasks: ssl.yml - include_tasks: plugins.yml - name: Ensure Logstash is started and enabled on boot. service: name: logstash state: started enabled: "{{ logstash_enabled_on_boot }}" ================================================ FILE: tasks/plugins.yml ================================================ --- - name: Get list of installed plugins. command: > ./bin/logstash-plugin list chdir={{ logstash_dir }} register: logstash_plugins_list changed_when: false - name: Install configured plugins. command: > ./bin/logstash-plugin install {{ item }} chdir={{ logstash_dir }} with_items: "{{ logstash_install_plugins }}" when: "item not in logstash_plugins_list.stdout" notify: restart logstash ================================================ FILE: tasks/setup-Debian.yml ================================================ --- - name: Ensure required dependencies are installed. apt: name: - apt-transport-https - gnupg2 state: present - name: Add Elasticsearch apt key. apt_key: url: https://artifacts.elastic.co/GPG-KEY-elasticsearch state: present - name: Add Logstash repository. apt_repository: repo: 'deb https://artifacts.elastic.co/packages/{{ logstash_version }}/apt stable main' state: present update_cache: true - name: Install Logstash. apt: name: '{{ logstash_package }}' state: present - name: Add Logstash user to adm group (Debian). user: name: logstash group: logstash groups: adm notify: restart logstash ================================================ FILE: tasks/setup-RedHat.yml ================================================ --- - name: Add Elasticsearch GPG key. rpm_key: key: https://artifacts.elastic.co/GPG-KEY-elasticsearch state: present - name: Add Logstash repository. template: src: logstash.repo.j2 dest: /etc/yum.repos.d/logstash.repo mode: 0644 - name: Install Logstash. package: name: '{{ logstash_package }}' state: present ================================================ FILE: tasks/ssl.yml ================================================ --- - name: Ensure Logstash SSL key pair directory exists. file: path: "{{ logstash_ssl_dir }}" state: directory mode: 0755 when: logstash_ssl_key_file | length > 0 - name: Copy SSL key and cert for logstash-forwarder. copy: src: "{{ item }}" dest: "{{ logstash_ssl_dir }}/{{ item | basename }}" mode: 0644 with_items: - "{{ logstash_ssl_key_file }}" - "{{ logstash_ssl_certificate_file }}" notify: restart logstash when: logstash_ssl_key_file | length > 0 ================================================ FILE: templates/01-beats-input.conf.j2 ================================================ input { beats { port => {{ logstash_listen_port_beats }} {% if logstash_ssl_certificate_file and logstash_ssl_key_file %} ssl => true ssl_certificate => "{{ logstash_ssl_dir }}/{{ logstash_ssl_certificate_file | basename }}" ssl_key => "{{ logstash_ssl_dir }}/{{ logstash_ssl_key_file | basename }}" ssl_verify_mode => "peer" ssl_certificate_authorities => "{{ logstash_ssl_dir }}/{{ logstash_ssl_certificate_file | basename }}" {% endif %} } } ================================================ FILE: templates/02-local-syslog-input.conf.j2 ================================================ input { file { path => "{{ logstash_local_syslog_path }}" } } ================================================ FILE: templates/30-elasticsearch-output.conf.j2 ================================================ output { elasticsearch { hosts => {{ logstash_elasticsearch_hosts | to_json }} index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}" } } ================================================ FILE: templates/logstash.repo.j2 ================================================ [logstash-{{ logstash_version }}] name=Elastic repository for {{ logstash_version }} packages baseurl=https://artifacts.elastic.co/packages/{{ logstash_version }}/yum gpgcheck=1 gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch enabled=1 autorefresh=1 type=rpm-md