diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 5226965c3afd91758fe061757c4186de5187c528..4e3845a61291008a27291b11b0eb04042a939f19 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -22,7 +22,7 @@ build:
 # but we use Maven POMs to maintain metadata and interact with gitflow
 .kanikoSetup: &kanikoSetup
   image: 
-      name: gitlab-registry.cern.ch/ci-tools/docker-image-builder
+      name: gitlab-registry.cern.ch/ci-tools/docker-image-builder:latest
       entrypoint: [""]
   before_script:
     - POM_VERSION=`cat .POM_VERSION`
@@ -46,5 +46,14 @@ push tagged version:
   only: 
     - tags
     
+push stable version:
+  <<: *kanikoSetup
+  stage: deploy
+  script:
+    - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --build-arg=SPARK_NXCALS_URL=${SPARK_NXCALS_URL} --build-arg=FROM=${FROM} --destination $CI_REGISTRY_IMAGE:stable
+  only: 
+    - master
+    
+
 
     
diff --git a/Dockerfile b/Dockerfile
index 4018f08c4eec3b494c8e0d75f3414395d6be533b..434c94e6f25efc21db1261b77ec1f8519669a193 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -3,7 +3,9 @@ FROM $FROM
 
 ARG SPARK_NXCALS_URL=http://photons-resources.cern.ch/downloads/nxcals_pro/spark/spark-nxcals.zip
 
-RUN yum update -y && yum install -y python3 python3-pip python3-virtualenv git unzip bsdtar rsync CERN-CA-certs which && \
+COPY influxdb.centos7.repo /etc/yum.repos.d/influxdb.repo
+
+RUN yum update -y && yum install -y python3 python3-cython python3-pip python3-virtualenv git unzip bsdtar rsync CERN-CA-certs which influxdb && \
     yum clean all
 
 ENV SPARK_HOME=/opt/nxcals-spark
@@ -25,10 +27,10 @@ USER etlworker
 RUN cd $SPARK_HOME && \
     yes | source $SPARK_HOME/source-me.sh && \
     source $SPARK_HOME/nxcals-python3-env/bin/activate && \
-    pip3 list && pip3 install pyspark==2.3.2 pandas scipy influxdb pyarrow 
+    pip3 list && pip3 install pyspark[sql]==2.3.2 pandas scipy influxdb pyarrow==1.0.0
 
-RUN source $SPARK_HOME/nxcals-python3-env/bin/activate && \
-    pip install git+https://gitlab.cern.ch/industrial-controls/services/dash/dash-etl.git@master
+RUN source $SPARK_HOME/nxcals-python3-env/bin/activate &&  \
+    pip3 install git+https://gitlab.cern.ch/industrial-controls/services/dash/dash-etl.git@develop
 
 VOLUME /auth
 
diff --git a/README.md b/README.md
index 7878b976ba458b8ad55793a79b862021a3e6dc8c..6c8d4248dd1d165c39e794bc0fdb13d2fe5844f5 100644
--- a/README.md
+++ b/README.md
@@ -4,41 +4,31 @@ Docker image for a Dashboard ETL worker - contains software performing NXCALS ex
 
 # How to use
 
-The entrypoint triggers a mvn call.
-Mount your python script folder as a volume to /work and start up the docker image to run py-spark
-You can pass arguments directly on the command line :
-```bash
-    docker run -ti --rm -v `pwd`:/work gitlab-registry.cern.ch/industrial-controls/services/dash/worker:latest my-script.py
-```
-You can also mount `/opt/nxcals-spark/work` as a persistent volume if you wish to collect the output of your build.
-
-You can run a bash session in the container too :
-
-```bash
-   docker run -it -e KPRINCIPAL=bcopy -v ~/nxcals.keytab:/auth/private.keytab -v `pwd`/scripts:/opt/nxcals-spark/work:z etlworker bash
-
-
-# How to use
-
-
 * Generate a keytab with :
 ```
    cern-get-keytab --user --keytab nxcals.keytab
 ```
 * Provide Influxdb connectivity env variables
 * Provide parameters to your extraction script
-* Run :a
+* Run :
 
 ```bash
-docker run -e KPRINCIPAL=$USER -v `pwd`/nxcals.keytab:/auth/private.keytab -v `pwd`/myscript.py:/opt/nxcals-spark/work/script.py etlworker
+docker run --net=host -e KPRINCIPAL=$USER -v `pwd`/nxcals.keytab:/auth/private.keytab -v `pwd`/myscript.py:/opt/nxcals-spark/work/script.py etlworker
 ``` 
 
-You can also mount `/opt/nxcals-spark/work` as a persistent volume if you wish to collect the output of your build.
+* You can also mount `/opt/nxcals-spark/work` as a persistent volume if you wish to collect the output of your build.
+* You can also specify a password instead of a keytab via environment variable KPASSWORD :
+
+```bash
+# Read your password into local env variable (no echo)
+read -s YOUR_PASSWORD
+docker run --net=host -e KPRINCIPAL=$USER -e KPASSWORD=$YOUR_PASSWORD -v `pwd`/myscript.py:/opt/nxcals-spark/work/script.py etlworker
+``` 
 
-You can run a bash session in the container too :
+* You can also run a bash session in the container :
 
 ```bash
-   docker run -it -e KPRINCIPAL=$USER -v ~/nxcals.keytab:/auth/private.keytab -v `pwd`/scripts:/opt/nxcals-spark/work:z etlworker bash
+docker run -it --net=host -e KPRINCIPAL=$USER -v ~/nxcals.keytab:/auth/private.keytab -v `pwd`/scripts:/opt/nxcals-spark/work:z etlworker bash
 ```
 
 # Development-related instructions
diff --git a/entrypoint.sh b/entrypoint.sh
index f0f4fc0f075796916f7d75951dffb34744c1f064..470851d285e5a5c336c118ec6abe4f71dd1163c5 100644
--- a/entrypoint.sh
+++ b/entrypoint.sh
@@ -3,15 +3,14 @@ source $SPARK_HOME/nxcals-python3-env/bin/activate
  
 export PYTHONPATH=$SPARK_HOME/nxcals-python3-env/lib/python3.6/site-packages:$PYTHONPATH
 
-x
 if [ -z "$KPRINCIPAL" ]
 then
     echo "Please provide Kerberos Principal with ENV KPRINCIPAL"
     exit 1
 else
     if [[ $KPASSWORD ]]; then
-      echo "Authenticating with provided password for principal $KPRINCIPAL"
-      echo "${KPASSWORD}" | kinit ${KPRINCIPAL}@CERN.CH
+      # echo "Authenticating with provided password for principal $KPRINCIPAL"
+      echo "${KPASSWORD}" | kinit ${KPRINCIPAL}@CERN.CH 1> /dev/null
     else
       if [ -f /auth/private.keytab ]
       then
@@ -24,5 +23,5 @@ else
     fi
 fi
 
-echo "Now running $@"
-exec "$@"
\ No newline at end of file
+# echo "Now running $@"
+exec "$@"
diff --git a/influxdb.centos7.repo b/influxdb.centos7.repo
new file mode 100644
index 0000000000000000000000000000000000000000..e57289e6d25e83bd386822ddd41fa94ed7c8d896
--- /dev/null
+++ b/influxdb.centos7.repo
@@ -0,0 +1,6 @@
+[influxdb]
+name = InfluxDB Repository - RHEL 7
+baseurl = https://repos.influxdata.com/rhel/7/x86_64/stable
+enabled = 1 
+gpgcheck = 1
+gpgkey = https://repos.influxdata.com/influxdb.key