# Vector pipeline that collects logs and sends them to ElasticSearch [sources.nginx_logs_from_file] type = "file" include = [ "/var/log/armaconsole/nginx.error.log", "/var/log/armaconsole/nginx.access.log", ] # Parse data [transforms.parse_nginx_logs] type = "remap" inputs = [ "nginx_logs_from_file" ] source = ''' source_file = .file if ends_with(.file, "nginx.error.log") { parsed, err = parse_regex(.message, r'(?P\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2}) (?P.*)') message = parsed.message } else if ends_with(.file, "nginx.access.log") { parsed, err = parse_regex(.message, r'(?P\s\[\d{2}/\D+/\d{4}:\d{2}:\d{2}:\d{2} \+\d{4}\])') message = replace(.message, parsed.timestamp, "") ?? "" } if err != null { abort } . = {} .timestamp = now() .message = message .file = source_file ''' [sinks.nginx_logs_to_es] type = "elasticsearch" inputs = ["parse_nginx_logs"] compression = "none" healthcheck = true auth.strategy= "basic" auth.user = "{{ elastic_login }}" auth.password = "{{ elastic_password }}" endpoint = "{{ elastic_url }}" normal.index = "system-logs" id_key = "event_uuid"