Update to 2021-12-01 19:13

This commit is contained in:
Daniel Berteaud
2021-12-01 19:13:34 +01:00
commit 4c4556c660
2153 changed files with 60999 additions and 0 deletions

View File

@@ -0,0 +1,14 @@
---
# Name of the Elasticsearch cluster
es_cluster_name: elasticsearch
# Name of this ES node
es_node_name: "{{ inventory_hostname }}"
# Port on which ES will bind
es_port: 9200
# List of IP/CIDR which will have access to es_port (if iptables_manage == True)
es_src_ip: []
# Path where ES will store its data
es_data_dir: /opt/elasticsearch/data
# Path where ES will store snapshots for backups (created by pre-backup, removed by post-backup)
es_backup_dir: /opt/elasticsearch/dumps

View File

@@ -0,0 +1,4 @@
---
- name: restart elasticsearch
service: name=elasticsearch state=restarted

View File

@@ -0,0 +1,5 @@
---
dependencies:
- role: repo_elasticsearch
- role: mkdir

View File

@@ -0,0 +1,18 @@
---
- name: Declare repo in ElasticSearch
uri:
url: http://localhost:{{ es_port }}/_snapshot/lbkp
method: PUT
body:
type: fs
settings:
compress: True
location: "{{ es_backup_dir }}"
body_format: json
register: es_lbkp
until: es_lbkp.failed == False
retries: 10
delay: 10
tags: es

View File

@@ -0,0 +1,9 @@
---
- name: Deploy configuration
template: src={{ item }}.j2 dest=/etc/elasticsearch/{{ item }} group=elasticsearch mode=660
loop:
- elasticsearch.yml
- log4j2.properties
notify: restart elasticsearch
tags: es

View File

@@ -0,0 +1,14 @@
---
- name: Ensure the data dir exists
file: path={{ es_data_dir }} state=directory
tags: es
# We do it in two steps, so that parent dirs aren't created with restrictive permissions
- name: Restrict permissions on data dir
file: path={{ es_data_dir }} state=directory owner=elasticsearch group=elasticsearch mode=750
tags: es
- name: Create backup dir
file: path={{ es_backup_dir }} state=directory owner=elasticsearch group=elasticsearch mode=700
tags: es

View File

@@ -0,0 +1,42 @@
---
- name: Install needed packages
yum:
name:
- elasticsearch-oss
- java-1.8.0-openjdk-headless
tags: es
- name: Deploy pre and post backup script
template: src={{ item }}-backup.j2 dest=/etc/backup/{{ item }}.d/es mode=750
loop:
- pre
- post
tags: es
- name: Create systemd unit snippet dir
file: path=/etc/systemd/system/elasticsearch.service.d state=directory
tags: es
- name: Customize systemd unit
copy:
content: |
[Service]
ProtectSystem=full
PrivateDevices=yes
ProtectHome=yes
NoNewPrivileges=yes
SyslogIdentifier=elasticsearch
Restart=on-failure
ExecStart=
ExecStart=/usr/share/elasticsearch/bin/elasticsearch -p ${PID_DIR}/elasticsearch.pid
dest: /etc/systemd/system/elasticsearch.service.d/ansible.conf
register: es_unit
notify: restart elasticsearch
tags: es
- name: Reload systemd
systemd: daemon_reload=True
when: es_unit.changed
tags: es

View File

@@ -0,0 +1,13 @@
---
- name: Handle Elasticsearch port
iptables_raw:
name: "{{ item.name }}"
state: "{{ (item.src_ip | length > 0) | ternary('present','absent') }}"
rules: "-A INPUT -m state --state NEW -p tcp --dport {{ item.port }} -s {{ item.src_ip | join(',') }} -j ACCEPT"
loop:
- port: "{{ es_port }}"
name: es_port
src_ip: "{{ es_src_ip }}"
tags: firewall,es

View File

@@ -0,0 +1,10 @@
---
- include: install.yml
- include: directories.yml
- include: conf.yml
- include: iptables.yml
when: iptables_manage | default(True)
- include: services.yml
- include: backup.yml

View File

@@ -0,0 +1,6 @@
---
- name: Start and enable the service
service: name=elasticsearch state=started enabled=True
tags: es

View File

@@ -0,0 +1,11 @@
cluster.name: {{ es_cluster_name }}
network.host: 0.0.0.0
http.port: {{ es_port }}
node.name: {{ es_node_name }}
path.data: {{ es_data_dir }}
path.logs: /var/log/elasticsearch
path.repo: [ {{ es_backup_dir }} ]
action.auto_create_index: false
{% if es_major_version is defined and es_major_version is version('7','>=') %}
discovery.type: single-node
{% endif %}

View File

@@ -0,0 +1,28 @@
status = error
# log action execution errors for easier debugging
logger.action.name = org.elasticsearch.action
logger.action.level = debug
appender.console.type = Console
appender.console.name = console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = [%-5p][%-25c{1.}] %m%n
rootLogger.level = info
rootLogger.appenderRef.console.ref = console
logger.deprecation.name = org.elasticsearch.deprecation
logger.deprecation.level = warn
logger.deprecation.appenderRef.console.ref = console
logger.deprecation.additivity = false
logger.index_search_slowlog_rolling.name = index.search.slowlog
logger.index_search_slowlog_rolling.level = trace
logger.index_search_slowlog_rolling.appenderRef.console.ref = console
logger.index_search_slowlog_rolling.additivity = false
logger.index_indexing_slowlog.name = index.indexing.slowlog.index
logger.index_indexing_slowlog.level = trace
logger.index_indexing_slowlog.appenderRef.console.ref = console
logger.index_indexing_slowlog.additivity = false

View File

@@ -0,0 +1,5 @@
#!/bin/bash -e
curl -X DELETE http://localhost:{{ es_port }}/_snapshot/lbkp/lbkp
umount /home/lbkp/es
fstrim -a -v

View File

@@ -0,0 +1,7 @@
#!/bin/sh
set -eo pipefail
mkdir -p /home/lbkp/es
mount -o bind,ro {{ es_backup_dir }} /home/lbkp/es
curl -X PUT http://localhost:{{ es_port }}/_snapshot/lbkp/lbkp?wait_for_completion=true