# Vector pipeline that collects logs and sends them to ElasticSearch [sources.postgresql_logs_from_file] type = "file" include = [ "/var/log/postgresql/postgresql-11-main.log", ] # Parse data [transforms.parse_postgresql_logs] type = "remap" inputs = [ "postgresql_logs_from_file" ] source = ''' source_file = .file parsed, err = parse_regex(.message, r'(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d*) [^ ]* (?P.*)') message = parsed.message if err != null { abort } . = {} .timestamp = now() .message = message .file = source_file ''' [sinks.postgresql_logs_to_es] type = "elasticsearch" inputs = ["parse_postgresql_logs"] compression = "none" healthcheck = true auth.strategy= "basic" auth.user = "{{ elastic_login }}" auth.password = "{{ elastic_password }}" endpoint = "{{ elastic_url }}" normal.index = "system-logs" id_key = "event_uuid"