MacOS : Python : Suppression des doublons d’emails avec l’API Python Elasticsearch/Kibana (Version V3)

Finalement dans les 200.000 emails je pense avoir des doublons … je vais donc profiter de l’export vers Elastciseach/Kibana pour voir si j’ai des doublons. L’email qu’il va avoir la même taille et le même checksum MD5 sera considéré comme un doublons.

Voici donc la version V3 (sans la suppression de fichier : os.unlink(path) )

#!/usr/bin/env python3

import email
import plistlib
import hashlib
import re
import glob, os
import string
from datetime import datetime
from email.utils import parsedate_to_datetime
from email.header import Header, decode_header, make_header
from elasticsearch import Elasticsearch 

class Emlx(object):
        def __init__(self):
            super(Emlx, self).__init__()
            self.bytecount = 0
            self.msg_data = None
            self.msg_plist = None

        def parse(self, filename_path):
            with open(filename_path, "rb") as f:
                self.bytecount = int(f.readline().strip())
                self.msg_data = email.message_from_bytes(f.read(self.bytecount))
                self.msg_plist = plistlib.loads(f.read())
            return self.msg_data, self.msg_plist

def md5(fname):
    hash_md5 = hashlib.md5()
    with open(fname, "rb") as f:
        for chunk in iter(lambda: f.read(4096), b""):
            hash_md5.update(chunk)
    return hash_md5.hexdigest()

if __name__ == '__main__':
   msg = Emlx()
   nb_parse = 0
   nb_error = 0
   save_space = 0
   list_email = []
   printable = set(string.printable)
   path_mail = "/Users/MonLogin/Library/Mail/V6/"
   es_keys = "mail"
   es=Elasticsearch([{'host':'localhost','port':9200}])
   for root, dirs, files in os.walk(path_mail):
      for file in files:
          if file.endswith(".emlx"):
             file_full = os.path.join(root, file)
             my_check = md5(root+'/'+file)
             my_count = list_email.count(my_check)
             list_email.append(my_check)
             message, plist = msg.parse(file_full)
             statinfo = os.stat(file_full)
             if (my_count > 0):
                save_space += int(statinfo.st_size)
                #os.unlink(root+'/'+file)
             my_date = message['Date']
             my_id = message['Message-ID']
             my_server = message['Received']
             my_date_str = ""
             if my_date is not None and my_date is not Header:
                 try:
                   my_date_str = datetime.fromtimestamp(parsedate_to_datetime(my_date).timestamp()).strftime('%Y-%m-%dT%H:%M:%S')
                 except :
                   my_date_str = ""
             my_email = str(message['From'])
             my_email = str(make_header(decode_header(my_email)))
             if my_email is not None:
                 my_domain = re.search("@[\w.\-\_]+", str(my_email))
                 if my_domain is not None:
                      my_domain_str = str(my_domain.group ());
                      my_domain_str = my_domain_str.lower()
             if my_email is not None:
                 my_name = re.search("[\w.\-\_]+@", str(my_email))
                 if my_name is not None:
                      my_name_str = str(my_name.group ());
                      my_name_str = my_name_str.lower()
             json = '{"checksum":"'+my_check+'","count":"'+str(my_count)+'","size":'+str(statinfo.st_size)
             if my_domain is not None:
                 #print(my_domain.group())
                 #print(my_name.group())
                 json = json+',"name":"'+my_name_str+'","domain":"'+my_domain_str+'"'
             else:
                 my_email = my_email.replace(",","")
                 my_email = my_email.replace('"','')
                 my_email = str(re.sub(r'[^\x00-\x7f]',r'', my_email)) 
                 my_email = my_email.lower()
                 json = json+',"name":"'+my_email+'","domain":"None"';
             if my_date is not None and len(my_date_str) > 1:
                 json = json+',"date":"'+my_date_str+'","id":'+str(nb_parse)
             else:
                 json = json+',"id":'+str(nb_parse)
             if my_server is not None and my_server is not Header:
                 ip = re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(my_server))
                 if ip is not None:
                    my_ip = ip.group()
                    json = json+',"ip":"'+str(my_ip)+'"'
                 else:
                    my_ip = ""
                 #ip = re.findall(r'\b25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\b',my_server)
                 #ip = re.findall( r'[0-9]+(?:\.[0-9]+){1,3}', my_server )
                 #ip = re.findall(r'[\d.-]+', my_server) 
             else:
                 json = json
             if my_id is not None and my_id is not Header:
                 try:
                    my_id =my_id.strip()
                    my_id =my_id.strip('\n')
                    json = json+',"Message-ID":"'+my_id+'","file":"'+file+'"}'
                 except:
                    json = json+',"file":"'+file+'"}'
             else:
                 json = json+',"file":"'+file+'"}'
             print(json)
             try:
                res = es.index(index=es_keys,doc_type='emlx',id=nb_parse,body=json)
             except:
                nb_error += 1   
             nb_parse += 1
             #print(plist)
   print(nb_parse)

A suivre pour la V4 !

OSMC/Raspberry : Ajout de ELK ( Elasticsearch / Logstash / Kibana / Beats / Nginx )

Le but est d’installer ELK sur un OSMC/Raspberry déjà fonctionnel … afin de ne pas acheter un nouveau Raspberry Pi …

Pour l’installation de OSMC voir : https://www.cyber-neurones.org/2016/09/installation-un-media-center-avec-osmc-sur-un-raspberry-pi-3-model-b/ . « Installation un media-center avec OSMC sur un Raspberry Pi 3 Model B » ( fait le

La première étape est d’ouvrir un console, puis faire un SSH : sur l’IP avec le login osmc et le mot de passe osmc ( si pas changé ) :

Quelques informations avant les installations

# hostnamectl 
   Static hostname: osmc
         Icon name: computer
           Chassis: n/a
        Machine ID: 1671b5b617884fdf85a465a5ac731b8f
           Boot ID: aa6d496d3a6f4261bc752f881dec919b
  Operating System: Open Source Media Center
            Kernel: Linux 4.4.16-4-osmc
      Architecture: arm
# cat /proc/cpuinfo 
processor	: 0
model name	: ARMv7 Processor rev 4 (v7l)
BogoMIPS	: 38.40
Features	: half thumb fastmult vfp edsp neon vfpv3 tls vfpv4 idiva idivt vfpd32 lpae evtstrm crc32 
CPU implementer	: 0x41
CPU architecture: 7
CPU variant	: 0x0
CPU part	: 0xd03
CPU revision	: 4

processor	: 1
model name	: ARMv7 Processor rev 4 (v7l)
BogoMIPS	: 38.40
Features	: half thumb fastmult vfp edsp neon vfpv3 tls vfpv4 idiva idivt vfpd32 lpae evtstrm crc32 
CPU implementer	: 0x41
CPU architecture: 7
CPU variant	: 0x0
CPU part	: 0xd03
CPU revision	: 4

processor	: 2
model name	: ARMv7 Processor rev 4 (v7l)
BogoMIPS	: 38.40
Features	: half thumb fastmult vfp edsp neon vfpv3 tls vfpv4 idiva idivt vfpd32 lpae evtstrm crc32 
CPU implementer	: 0x41
CPU architecture: 7
CPU variant	: 0x0
CPU part	: 0xd03
CPU revision	: 4

processor	: 3
model name	: ARMv7 Processor rev 4 (v7l)
BogoMIPS	: 38.40
Features	: half thumb fastmult vfp edsp neon vfpv3 tls vfpv4 idiva idivt vfpd32 lpae evtstrm crc32 
CPU implementer	: 0x41
CPU architecture: 7
CPU variant	: 0x0
CPU part	: 0xd03
CPU revision	: 4

Hardware	: BCM2709
Revision	: a22082
Serial		: 00000000bf2140a5
# uptime 
 17:36:15 up  1:42,  1 user,  load average: 0,04, 0,26, 0,22

Etape n°1 :  Installation de Java (personne n’est parfait )

# sudo apt-get install software-properties-common
# sudo add-apt-repository ppa:webupd8team/java
# sudo apt-get update
# sudo apt-get install oracle-java8-installer
-> Erreur : Impossible de trouver le paquet oracle-java8-installer
# sudo add-apt-repository ppa:openjdk-r/ppa
# sudo apt-get update
# sudo apt-get install openjdk-8-jdk
-> Erreur : Package 'openjdk-8-jdk' has no installation candidate
-->W: Impossible de récupérer http://ppa.launchpad.net/openjdk-r/ppa/ubuntu/dists/jessie/main/binary-armhf/Packages  404  Not Found
-->W: Impossible de récupérer http://ppa.launchpad.net/webupd8team/java/ubuntu/dists/jessie/main/binary-armhf/Packages  404  Not Found

Misère de Java …

# sudo -s
# echo "deb http://ppa.launchpad.net/webupd8team/java/ubuntu xenial main" \
    | tee /etc/apt/sources.list.d/webupd8team-java.list
# echo "deb-src http://ppa.launchpad.net/webupd8team/java/ubuntu xenial main" \
    | tee -a /etc/apt/sources.list.d/webupd8team-java.list
# apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys EEA14886
# apt-get update
# apt-get install oracle-java8-installer
exit

J’ai horreur de Java … petit test :

# java -version
java version "1.8.0_181"
Java(TM) SE Runtime Environment (build 1.8.0_181-b13)
Java HotSpot(TM) Client VM (build 25.181-b13, mixed mode)

Mais je pense que l’idéal aurait été de faire:

# sudo apt-get install default-jre
# java -version
java version "1.8.0_181"
Java(TM) SE Runtime Environment (build 1.8.0_181-b13)
Java HotSpot(TM) Client VM (build 25.181-b13, mixed mode)

Avec cette commande on avait le JRE par défaut …

Etape n°2 : Installation des outils.

# apt-get install -y wget apt-transport-https

Etape n°3 : Installation de Elasticsearch

# wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
# echo "deb https://artifacts.elastic.co/packages/5.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elk.list
# apt-get update
# apt-get install -y elasticsearch
--> Erreur : Impossible de trouver le paquet elasticsearch

On essaye de plan B :

# sudo mkdir /usr/share/elasticsearch
# cd /usr/share/elasticsearch
# wget -qO - https://packages.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
# apt-get update
# apt-get install elasticsearch
--> Erreur : Impossible de trouver le paquet elasticsearch

On essaye le plan C :

# echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-6.x.list
# sudo apt-get update && sudo apt-get install elasticsearch

On essaye de plan D :

# sudo wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.5.2.deb
# sudo dpkg -i elasticsearch-5.5.2.deb

Modification du fichier /etc/elasticsearch/elasticsearch.yml :

# cat /etc/elasticsearch/elasticsearch.yml | grep -v "#"
network.host: 127.0.0.1
http.port: 9200

Lancement :

# sudo service elasticsearch start
# sudo service elasticsearch status
● elasticsearch.service - Elasticsearch
   Loaded: loaded (/usr/lib/systemd/system/elasticsearch.service; disabled)
   Active: failed (Result: exit-code) since mer. 2018-09-19 18:07:39 UTC; 2s ago
     Docs: http://www.elastic.co
  Process: 5873 ExecStart=/usr/share/elasticsearch/bin/elasticsearch -p ${PID_DIR}/elasticsearch.pid --quiet -Edefault.path.logs=${LOG_DIR} -Edefault.path.data=${DATA_DIR} -Edefault.path.conf=${CONF_DIR} (code=exited, status=1/FAILURE)
  Process: 5869 ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec (code=exited, status=0/SUCCESS)
 Main PID: 5873 (code=exited, status=1/FAILURE)

sept. 19 18:07:39 osmc elasticsearch[5873]: Error occurred during initialization of VM
sept. 19 18:07:39 osmc elasticsearch[5873]: Could not reserve enough space for 2097152KB object heap
sept. 19 18:07:39 osmc systemd[1]: elasticsearch.service: main process exited, code=exited, status=1/FAILURE
sept. 19 18:07:39 osmc systemd[1]: Unit elasticsearch.service entered failed state.

Misère … JAVA commence à me gonfler … Modification du fichier /etc/elasticsearch/jvm.options :

# cat /etc/elasticsearch/jvm.options  | grep Xm
## -Xms4g
## -Xmx4g
# Xms represents the initial size of total heap space
# Xmx represents the maximum size of total heap space
#-Xms2g
-Xms200m
#-Xmx2g
-Xmx500m

Nouveau test :

# sudo service elasticsearch start
# sudo service elasticsearch status
● elasticsearch.service - Elasticsearch
   Loaded: loaded (/usr/lib/systemd/system/elasticsearch.service; disabled)
   Active: active (running) since mer. 2018-09-19 18:11:26 UTC; 3s ago
     Docs: http://www.elastic.co
  Process: 5940 ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec (code=exited, status=0/SUCCESS)
 Main PID: 5944 (java)
   CGroup: /system.slice/elasticsearch.service
           └─5944 /usr/bin/java -Xms200m -Xmx500m -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+AlwaysPreTouch -server -Xss1m -Djava....

Etape n°4 : Installation de logstash :

# sudo wget https://artifacts.elastic.co/downloads/logstash/logstash-5.5.2.deb
# sudo dpkg -i logstash-5.5.2.deb
Sélection du paquet logstash précédemment désélectionné.
(Lecture de la base de données... 26506 fichiers et répertoires déjà installés.)
Préparation du dépaquetage de logstash-5.5.2.deb ...
Dépaquetage de logstash (1:5.5.2-1) ...
Paramétrage de logstash (1:5.5.2-1) ...
Using provided startup.options file: /etc/logstash/startup.options
Java HotSpot(TM) Client VM warning: TieredCompilation is disabled in this release.
io/console on JRuby shells out to stty for most operations
/usr/share/logstash/vendor/bundle/jruby/1.9/gems/pleaserun-0.0.30/lib/pleaserun/installer.rb:46 warning: executable? does not in this environment and will return a dummy value
Successfully created system startup script for Logstash

Etape n°5 : Installation de JFFI :

# sudo apt-get install ant
# sudo apt-get install git
# sudo git clone https://github.com/jnr/jffi.git
# cd jffi
# sudo ant jar
# sudo ant jar
Buildfile: /root/jffi/build.xml

-pre-init:

-init-vars:
    [mkdir] Created dir: /root/jffi/build/jni

-post-init:

-init:

-pre-jar:

-pre-compile:

-do-compile:
    [mkdir] Created dir: /root/jffi/build/classes
    [javac] Compiling 42 source files to /root/jffi/build/classes
    [javac] warning: [options] bootstrap class path not set in conjunction with -source 1.6
    [javac] /root/jffi/src/main/java/com/kenai/jffi/MemoryIO.java:847: warning: Unsafe is internal proprietary API and may be removed in a future release
    [javac]         protected static sun.misc.Unsafe unsafe = sun.misc.Unsafe.class.cast(getUnsafe());
    [javac]                                  ^
    [javac] /root/jffi/src/main/java/com/kenai/jffi/MemoryIO.java:847: warning: Unsafe is internal proprietary API and may be removed in a future release
    [javac]         protected static sun.misc.Unsafe unsafe = sun.misc.Unsafe.class.cast(getUnsafe());
    [javac]                                                           ^
    [javac] Note: /root/jffi/src/main/java/com/kenai/jffi/ClosureMagazine.java uses or overrides a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] Note: Some input files use unchecked or unsafe operations.
    [javac] Note: Recompile with -Xlint:unchecked for details.
    [javac] 3 warnings

-generate-version-source:
     [echo] Generating Version.java
    [mkdir] Created dir: /root/jffi/build/java/com/kenai/jffi

-generate-version:
    [javac] Compiling 1 source file to /root/jffi/build/classes
    [javac] warning: [options] bootstrap class path not set in conjunction with -source 1.6
    [javac] 1 warning

-compile-java:

-generate-native-headers:

-build-native-library:

BUILD FAILED
/root/jffi/build.xml:344: Execute failed: java.io.IOException: Cannot run program "make": error=2, Aucun fichier ou dossier de ce type
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:1048)
	at java.lang.Runtime.exec(Runtime.java:620)
	at org.apache.tools.ant.taskdefs.launcher.Java13CommandLauncher.exec(Java13CommandLauncher.java:58)
	at org.apache.tools.ant.taskdefs.Execute.launch(Execute.java:428)
	at org.apache.tools.ant.taskdefs.Execute.execute(Execute.java:442)
	at org.apache.tools.ant.taskdefs.ExecTask.runExecute(ExecTask.java:628)
	at org.apache.tools.ant.taskdefs.ExecTask.runExec(ExecTask.java:669)
	at org.apache.tools.ant.taskdefs.ExecTask.execute(ExecTask.java:495)
	at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:292)
	at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
	at org.apache.tools.ant.Task.perform(Task.java:348)
	at org.apache.tools.ant.Target.execute(Target.java:435)
	at org.apache.tools.ant.Target.performTasks(Target.java:456)
	at org.apache.tools.ant.Project.executeSortedTargets(Project.java:1393)
	at org.apache.tools.ant.Project.executeTarget(Project.java:1364)
	at org.apache.tools.ant.helper.DefaultExecutor.executeTargets(DefaultExecutor.java:41)
	at org.apache.tools.ant.Project.executeTargets(Project.java:1248)
	at org.apache.tools.ant.Main.runBuild(Main.java:851)
	at org.apache.tools.ant.Main.startAnt(Main.java:235)
	at org.apache.tools.ant.launch.Launcher.run(Launcher.java:280)
	at org.apache.tools.ant.launch.Launcher.main(Launcher.java:109)
Caused by: java.io.IOException: error=2, Aucun fichier ou dossier de ce type
	at java.lang.UNIXProcess.forkAndExec(Native Method)
	at java.lang.UNIXProcess.(UNIXProcess.java:247)
	at java.lang.ProcessImpl.start(ProcessImpl.java:134)
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:1029)
	... 23 more

Total time: 11 seconds

On va essayer un plan B :

# sudo apt-get install zip
# cd /usr/share/logstash/vendor/jruby/lib
sudo zip -g jruby.jar jni/arm-Linux/libjffi-1.2.so
updating: jni/arm-Linux/libjffi-1.2.so
	zip warning: Local Entry CRC does not match CD: jni/arm-Linux/libjffi-1.2.so
 (deflated 63%)

Je croise les doigts … lancement :

# sudo service logstash start
# sudo service logstash status
● logstash.service - logstash
   Loaded: loaded (/etc/systemd/system/logstash.service; disabled)
   Active: active (running) since mer. 2018-09-19 18:33:29 UTC; 9s ago
 Main PID: 6431 (java)
   CGroup: /system.slice/logstash.service
           └─6431 /usr/bin/java -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+DisableExplicitGC -Djava.awt.headless=...

Etape n°5 : Installation de Kibana :

# sudo wget https://artifacts.elastic.co/downloads/kibana/kibana-5.5.2-linux-x86.tar.gz
# sudo tar –xzf kibana-5.5.2-linux-x86.tar.gz
# sudo mkdir /opt/kibana/
# sudo mv kibana-5.5.2-linux-x86/ /opt/kibana/
# sudo wget https://nodejs.org/download/release/v6.10.2/node-v6.10.2-linux-armv6l.tar.gz
# tar -xzf node-v6.10.2-linux-armv6l.tar.gz
# sudo cp node-v6.10.2-linux-armv6l/bin/node /usr/local/bin/node
# sudo cp node-v6.10.2-linux-armv6l/bin/npm /usr/local/bin/npm
# apt-get install tree
# sudo mv /opt/kibana/kibana-5.5.2-linux-x86/node/bin/node  /opt/kibana/kibana-5.5.2-linux-x86/node/bin/node.orig
# sudo mv /opt/kibana/kibana-5.5.2-linux-x86/node/bin/npm  /opt/kibana/kibana-5.5.2-linux-x86/node/bin/npm.orig
# sudo ln -s /usr/local/bin/node /opt/kibana/kibana-5.5.2-linux-x86/node/bin/node
# sudo ln -s /usr/local/bin/npm /opt/kibana/kibana-5.5.2-linux-x86/node/bin/npm

Modification de la configuration /opt/kibana/kibana-5.5.2-linux-x86/config/kibana.yml & /etc/systemd/system/kibana.service :

# cat /opt/kibana/kibana-5.5.2-linux-x86/config/kibana.yml | grep -v '^#' | grep -v '^$'
server.port: 5601
server.host: "127.0.0.1"
elasticsearch.url: "http://127.0.0.1:9200"
# cat /etc/systemd/system/kibana.service
[Unit]
Description=Kibana

[Service]
ExecStart=/opt/kibana/kibana-5.5.2-linux-x86/bin/kibana
StandardOutput=null

[Install]
WantedBy=multi-user.target

Lancement du service :

# sudo service kibana start
# sudo service kibana status
● kibana.service - Kibana
   Loaded: loaded (/etc/systemd/system/kibana.service; disabled)
   Active: active (running) since mer. 2018-09-19 18:50:09 UTC; 2s ago
 Main PID: 7396 (node)
   CGroup: /system.slice/kibana.service
           └─7396 /opt/kibana/kibana-5.5.2-linux-x86/bin/../node/bin/node --no-warnings /opt/kibana/kibana-5.5.2-linux-x86/bin/../src/cli

Etape n°6 : Installation de NGinx :

# sudo apt-get install nginx apache2-utils
# sudo htpasswd -c /etc/nginx/htpasswd.users kibana_admin
New password: 
Re-type new password: 
Adding password for user kibana_admin

Modification de /etc/nginx/sites-available/default :

Etape n°7 : Lancement de tous les services :

root@osmc:~# sudo service logstash restart && sudo service elasticsearch restart && sudo service kibana restart && sudo service nginx start
root@osmc:~# sudo service logstash status

● logstash.service - logstash
   Loaded: loaded (/etc/systemd/system/logstash.service; disabled)
   Active: active (running) since mer. 2018-09-19 18:56:55 UTC; 1min 25s ago
 Main PID: 7933 (java)
   CGroup: /system.slice/logstash.service
           └─7933 /usr/bin/java -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+DisableExplicitGC -Djava.awt.headless=...
root@osmc:~# 
root@osmc:~# sudo service elasticsearch status
● elasticsearch.service - Elasticsearch
   Loaded: loaded (/usr/lib/systemd/system/elasticsearch.service; disabled)
   Active: failed (Result: signal) since mer. 2018-09-19 18:58:30 UTC; 49s ago
     Docs: http://www.elastic.co
  Process: 7960 ExecStart=/usr/share/elasticsearch/bin/elasticsearch -p ${PID_DIR}/elasticsearch.pid --quiet -Edefault.path.logs=${LOG_DIR} -Edefault.path.data=${DATA_DIR} -Edefault.path.conf=${CONF_DIR} (code=killed, signal=KILL)
  Process: 7956 ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec (code=exited, status=0/SUCCESS)
 Main PID: 7960 (code=killed, signal=KILL)

sept. 19 18:58:30 osmc systemd[1]: elasticsearch.service: main process exited, code=killed, status=9/KILL
sept. 19 18:58:30 osmc systemd[1]: Unit elasticsearch.service entered failed state.
root@osmc:~# sudo service kibana status
● kibana.service - Kibana
   Loaded: loaded (/etc/systemd/system/kibana.service; disabled)
   Active: active (running) since mer. 2018-09-19 18:56:55 UTC; 2min 40s ago
 Main PID: 7985 (node)
   CGroup: /system.slice/kibana.service
           └─7985 /opt/kibana/kibana-5.5.2-linux-x86/bin/../node/bin/node --no-warnings /opt/kibana/kibana-5.5.2-linux-x86/bin/../src/cli
root@osmc:~# sudo service nginx status
● nginx.service - A high performance web server and a reverse proxy server
   Loaded: loaded (/lib/systemd/system/nginx.service; enabled)
   Active: active (running) since mer. 2018-09-19 18:54:47 UTC; 4min 59s ago
 Main PID: 7783 (nginx)
   CGroup: /system.slice/nginx.service
           ├─7783 nginx: master process /usr/sbin/nginx -g daemon on; master_process on;
           ├─7784 nginx: worker process
           ├─7785 nginx: worker process
           ├─7786 nginx: worker process
           └─7787 nginx: worker process

Il y en a 1/4 qui ne fonctionne pas … Misère.

root@osmc:~# sudo service elasticsearch start
root@osmc:~# sudo service elasticsearch status
● elasticsearch.service - Elasticsearch
   Loaded: loaded (/usr/lib/systemd/system/elasticsearch.service; disabled)
   Active: active (running) since mer. 2018-09-19 19:00:04 UTC; 43s ago
     Docs: http://www.elastic.co
  Process: 8208 ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec (code=exited, status=0/SUCCESS)
 Main PID: 8213 (java)
   CGroup: /system.slice/elasticsearch.service
           └─8213 /usr/bin/java -Xms200m -Xmx500m -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+AlwaysPreTouch -server -Xss1m -Djava...

Vive le Java … et ensuite il plante, sans laisser trop de logs :

root@osmc:~# tail -f /var/log/elasticsearch/elasticsearch.log 
[2018-09-19T19:00:44,349][INFO ][o.e.n.Node               ] initialized
[2018-09-19T19:00:44,350][INFO ][o.e.n.Node               ] [feSXsTX] starting ...
[2018-09-19T19:00:45,591][INFO ][o.e.t.TransportService   ] [feSXsTX] publish_address {127.0.0.1:9300}, bound_addresses {127.0.0.1:9300}
[2018-09-19T19:00:45,699][WARN ][o.e.b.BootstrapChecks    ] [feSXsTX] initial heap size [209715200] not equal to maximum heap size [524288000]; this can cause resize pauses and prevents mlockall from locking the entire heap
[2018-09-19T19:00:45,700][WARN ][o.e.b.BootstrapChecks    ] [feSXsTX] system call filters failed to install; check the logs and fix your configuration or disable system call filters at your own risk
[2018-09-19T19:00:48,977][INFO ][o.e.c.s.ClusterService   ] [feSXsTX] new_master {feSXsTX}{feSXsTXeQw-AEPi_pWmySw}{FlzLJ3stTwO--_vZD3nxLw}{127.0.0.1}{127.0.0.1:9300}, reason: zen-disco-elected-as-master ([0] nodes joined)
[2018-09-19T19:00:49,201][INFO ][o.e.h.n.Netty4HttpServerTransport] [feSXsTX] publish_address {127.0.0.1:9200}, bound_addresses {127.0.0.1:9200}
[2018-09-19T19:00:49,202][INFO ][o.e.n.Node               ] [feSXsTX] started
[2018-09-19T19:00:50,662][INFO ][o.e.g.GatewayService     ] [feSXsTX] recovered [1] indices into cluster_state
[2018-09-19T19:00:54,270][INFO ][o.e.c.r.a.AllocationService] [feSXsTX] Cluster health status changed from [RED] to [YELLOW] (reason: [shards started [[.kibana][0]] ...]).

J’ai donc fait un changement sur la mémoire :

[2018-09-19T19:08:50,943][INFO ][o.e.n.Node               ] JVM arguments [-Xms100m, -Xmx300m, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -Djdk.io.permissionsUseCanonicalPath=true, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Dlog4j.skipJansi=true, -XX:+HeapDumpOnOutOfMemoryError, -Des.path.home=/usr/share/elasticsearch]
[2018-09-19T19:08:59,377][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [aggs-matrix-stats]
[2018-09-19T19:08:59,378][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [ingest-common]
[2018-09-19T19:08:59,379][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [lang-expression]
[2018-09-19T19:08:59,380][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [lang-groovy]
[2018-09-19T19:08:59,381][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [lang-mustache]
[2018-09-19T19:08:59,382][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [lang-painless]
[2018-09-19T19:08:59,383][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [parent-join]
[2018-09-19T19:08:59,384][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [percolator]
[2018-09-19T19:08:59,384][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [reindex]
[2018-09-19T19:08:59,385][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [transport-netty3]
[2018-09-19T19:08:59,386][INFO ][o.e.p.PluginsService     ] [feSXsTX] loaded module [transport-netty4]
[2018-09-19T19:08:59,389][INFO ][o.e.p.PluginsService     ] [feSXsTX] no plugins loaded
[2018-09-19T19:09:10,792][INFO ][o.e.d.DiscoveryModule    ] [feSXsTX] using discovery type [zen]
[2018-09-19T19:09:14,675][INFO ][o.e.n.Node               ] initialized
[2018-09-19T19:09:14,677][INFO ][o.e.n.Node               ] [feSXsTX] starting ...
[2018-09-19T19:09:15,785][INFO ][o.e.t.TransportService   ] [feSXsTX] publish_address {127.0.0.1:9300}, bound_addresses {127.0.0.1:9300}
[2018-09-19T19:09:15,878][WARN ][o.e.b.BootstrapChecks    ] [feSXsTX] initial heap size [104857600] not equal to maximum heap size [314572800]; this can cause resize pauses and prevents mlockall from locking the entire heap
[2018-09-19T19:09:15,879][WARN ][o.e.b.BootstrapChecks    ] [feSXsTX] system call filters failed to install; check the logs and fix your configuration or disable system call filters at your own risk
[2018-09-19T19:09:19,189][INFO ][o.e.c.s.ClusterService   ] [feSXsTX] new_master {feSXsTX}{feSXsTXeQw-AEPi_pWmySw}{GJAcwscZQNacEta1vC5mPA}{127.0.0.1}{127.0.0.1:9300}, reason: zen-disco-elected-as-master ([0] nodes joined)
[2018-09-19T19:09:19,320][INFO ][o.e.h.n.Netty4HttpServerTransport] [feSXsTX] publish_address {127.0.0.1:9200}, bound_addresses {127.0.0.1:9200}
[2018-09-19T19:09:19,321][INFO ][o.e.n.Node               ] [feSXsTX] started
[2018-09-19T19:09:20,504][INFO ][o.e.g.GatewayService     ] [feSXsTX] recovered [1] indices into cluster_state
[2018-09-19T19:09:21,932][INFO ][o.e.c.r.a.AllocationService] [feSXsTX] Cluster health status changed from [RED] to [YELLOW] (reason: [shards started [[.kibana][0]] ...]).

Voir même :

# grep "JVM arguments " /var/log/elasticsearch/elasticsearch.log
[2018-09-19T18:11:34,304][INFO ][o.e.n.Node               ] JVM arguments [-Xms200m, -Xmx500m, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -Djdk.io.permissionsUseCanonicalPath=true, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Dlog4j.skipJansi=true, -XX:+HeapDumpOnOutOfMemoryError, -Des.path.home=/usr/share/elasticsearch]
[2018-09-19T18:57:06,655][INFO ][o.e.n.Node               ] JVM arguments [-Xms200m, -Xmx500m, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -Djdk.io.permissionsUseCanonicalPath=true, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Dlog4j.skipJansi=true, -XX:+HeapDumpOnOutOfMemoryError, -Des.path.home=/usr/share/elasticsearch]
[2018-09-19T19:00:14,095][INFO ][o.e.n.Node               ] JVM arguments [-Xms200m, -Xmx500m, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -Djdk.io.permissionsUseCanonicalPath=true, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Dlog4j.skipJansi=true, -XX:+HeapDumpOnOutOfMemoryError, -Des.path.home=/usr/share/elasticsearch]
[2018-09-19T19:08:50,943][INFO ][o.e.n.Node               ] JVM arguments [-Xms100m, -Xmx300m, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -XX:+AlwaysPreTouch, -Xss1m, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djna.nosys=true, -Djdk.io.permissionsUseCanonicalPath=true, -Dio.netty.noUnsafe=true, -Dio.netty.noKeySetOptimization=true, -Dio.netty.recycler.maxCapacityPerThread=0, -Dlog4j.shutdownHookEnabled=false, -Dlog4j2.disable.jmx=true, -Dlog4j.skipJansi=true, -XX:+HeapDumpOnOutOfMemoryError, -Des.path.home=/usr/share/elasticsearch]

Il me reste donc à comprendre le plantage d’elasticsearch ….

A noter que quand je fais : http://192.168.0.28/ ( L’IP de OSMC ) :

Mais ensuite cela se complique :

A suivre … après un long instant j’ai Kabana qui est devenu accessible ( mais avec le Status : Red ) :

Je viens de modifier /etc/logstash/jvm.options :

-Xms256m
-Xmx400m