# Vector pipeline that collects logs and sends them to ElasticSearch [sources.celery_logs_from_file] type = "file" include = [ "/var/log/armaconsole/celeryd.log", "/var/log/armaconsole/celerybeat.log", ] # Parse data [transforms.parse_celery_logs] type = "remap" inputs = [ "celery_logs_from_file" ] source = ''' source_file = .file if ends_with(.file, "celerybeat.log") { parsed, err = parse_regex(.message, r'\[(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d*:\s)[^ ]* (?P.*)') message = replace(.message, parsed.timestamp, "") ?? "" } else if ends_with(.file, "celeryd.log") { parsed, err = parse_regex(.message, r'\[(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d*:\s)[^ ]* (?P.*)') message = replace(.message, parsed.timestamp, "") ?? "" } if err != null { abort } . = {} .timestamp = now() .message = message .file = source_file ''' [sinks.celery_logs_to_es] type = "elasticsearch" inputs = ["parse_celery_logs"] compression = "none" healthcheck = true auth.strategy= "basic" auth.user = "{{ elastic_login }}" auth.password = "{{ elastic_password }}" endpoint = "{{ elastic_url }}" normal.index = "system-logs" id_key = "event_uuid"