Commit 4fedf8df authored by Domenico Giordano's avatar Domenico Giordano
Browse files

adding fluentd image

parent 67eaa134
......@@ -64,7 +64,7 @@ job_build_compose_pipelines_image:
before_script:
- export DOCKERFILE=$CI_PROJECT_DIR/docker-images/compose-pipelines/Dockerfile
- export CONTEXT=$CI_PROJECT_DIR
- export DESTINATIONS="--destination $CI_REGISTRY_IMAGE/compose-pipelines:latest --destination $CI_REGISTRY_IMAGE/compose-pipelines:cimaster-${CI_COMMIT_SHA:0:8}"
- export DESTINATIONS="--destination $CI_REGISTRY_IMAGE/compose-pipelines:latest --destination $CI_REGISTRY_IMAGE/compose-pipelines:ci-${CI_COMMIT_BRANCH}-${CI_COMMIT_SHA:0:8}"
<<: *template_build_image
only:
changes:
......@@ -72,6 +72,18 @@ job_build_compose_pipelines_image:
#refs:
# - master
job_build_fluentd_image:
stage: build-images
before_script:
- export DOCKERFILE=$CI_PROJECT_DIR/docker-images/fluentd//Dockerfile
- export CONTEXT=$CI_PROJECT_DIR
- export DESTINATIONS="--destination $CI_REGISTRY_IMAGE/fluentd:latest --destination $CI_REGISTRY_IMAGE/fluentd:ci-${CI_COMMIT_BRANCH}-${CI_COMMIT_SHA:0:8}"
<<: *template_build_image
only:
changes:
- docker-images/fluentd/*
#refs:
# - master
#-------------------------------------------------------------------------------------
# Build image that runs swan spark notebook with the data-analytics libraries installed
......
FROM fluent/fluentd
RUN ["gem", "install", "fluent-plugin-elasticsearch", "--no-rdoc", "--no-ri"]
<source>
@type forward
port 24224
bind 0.0.0.0
</source>
# Store Data in Elasticsearch
#<match *.**>
# @type copy
# <store>
# @type elasticsearch
# host elasticsearch
# port 9200
# include_tag_key true
# tag_key @log_name
# logstash_format true
# flush_interval 10s
# </store>
#</match>
<filter **>
@type grep
<regexp>
key log
pattern /RESULT/
</regexp>
</filter>
<filter **>
@type parser
key_name log
reserve_data true
remove_key_name_field true
<parse>
@type json
</parse>
</filter>
#<filter **>
# @type record_transformer
# enable_ruby
# <record>
# @timestamp ${mytime.strftime('%Y-%m-%d T%H:%M:%S')}
# </record>
#</filter>
#<filter **>
# @type record_transformer
# enable_ruby
# <record>
# category ${record["log"].gsub(/: {.*/, '')}
# content ${record["log"].sub(/^[^:]*: /, '')}
# </record>
#</filter>
#<filter **>
# @type parser
# key_name content
# reserve_data true
# remove_key_name_field true
# <parse>
# @type json
# </parse>
#</filter>
#match all and print
#<match **>
# @type stdout
#</match>
# Store Data in Elasticsearch
<match *.**>
@type copy
<store>
@type elasticsearch
host elasticsearch
port 9200
index_name algo_results
type_name fluentd
include_tag_key true
tag_key @log_name
include_timestamp true
flush_interval 3s
</store>
</match>
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment