# Vector pipeline that collects logs and sends them to ElasticSearch [sources.es_logs_from_file] type = "file" include = [ "/var/log/elasticsearch/console-cluster.log", ] # Parse data [transforms.parse_es_logs] type = "remap" inputs = [ "es_logs_from_file" ] source = ''' source_file = .file parsed, err = parse_regex(.message, r'(?P\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2},\d*\])(?P.*)') message = parsed.message if err != null { abort } . = {} .timestamp = now() .message = message .file = source_file ''' [sinks.es_logs_to_es] type = "elasticsearch" inputs = ["parse_es_logs"] compression = "none" healthcheck = true auth.strategy= "basic" auth.user = "{{ elastic_login }}" auth.password = "{{ elastic_password }}" endpoint = "{{ elastic_url }}" normal.index = "system-logs" id_key = "event_uuid"