diff --git a/.ci/create-artifacts.py b/.ci/create-artifacts.py
deleted file mode 100755
index c7d8e4bf9941d6ac1a3611afb35b4569770e9929..0000000000000000000000000000000000000000
--- a/.ci/create-artifacts.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-import subprocess
-import shutil
-import os
-from os.path import dirname, isdir, isfile, islink, join, relpath
-import tempfile
-
-subdir = "linux-64"
-cvmfs_base = "/cvmfs/lhcbdev.cern.ch/conda"
-pkgs_dir = join(cvmfs_base, "pkgs", subdir)
-envs_dir = join(cvmfs_base, "envs")
-output_dir = join(os.getcwd(), 'build-artifacts')
-TAR_CMD = ["tar", "--zstd", "--hard-dereference", "--recursive", "--create"]
-
-if isdir(output_dir):
-    shutil.rmtree(output_dir)
-os.makedirs(output_dir)
-
-tarball_paths = set()
-
-# Package cache
-for fn in os.listdir(pkgs_dir):
-    if fn in ["cache", "urls"]:
-        continue
-    fn = join(pkgs_dir, fn)
-    if not islink(fn) and not isdir(fn):
-        print("Adding:", fn)
-        tarball_paths.add(fn)
-
-# Environments
-for dirpath, dirnames, filenames in os.walk(envs_dir, topdown=True):
-    print("Trying", dirpath)
-    if ".cvmfscatalog" in filenames:
-        print("Found environment:", dirpath)
-        tarball_paths.add(dirpath)
-        yaml_fn = dirpath + ".yaml"
-        if not isfile(yaml_fn):
-            raise RuntimeError("Failed to find yaml file" + repr(yaml_fn))
-        tarball_paths.add(yaml_fn)
-        dirnames[:] = []
-
-if not tarball_paths:
-    raise RuntimeError("Failed to find anything to upload!")
-
-with tempfile.NamedTemporaryFile(mode="wt") as fp:
-    fp.write("\n".join([relpath(fn, cvmfs_base) for fn in tarball_paths]))
-    fp.flush()
-    tar_fn = join(output_dir, f"{os.environ['CI_PIPELINE_ID']}.tar.ztd")
-    print("Creating", tar_fn)
-    subprocess.run(TAR_CMD + [
-        f"--directory={cvmfs_base}",
-        f"--file={tar_fn}",
-        "--files-from",
-        fp.name
-    ], check=True)
diff --git a/.ci/create_environments.sh b/.ci/create_environments.sh
index 1ceb54364ce759b71d064b5436e8bba49ad5f786..64c441427a0e8b48c3fada807cddd93ad512023a 100755
--- a/.ci/create_environments.sh
+++ b/.ci/create_environments.sh
@@ -1,20 +1,28 @@
 #!/usr/bin/env bash
-source /cvmfs/lhcbdev.cern.ch/conda/miniconda/linux-64/prod/bin/activate
+yum install -y bzip2
+export MAMBA_ROOT_PREFIX=$(mktemp -d)
+curl -L --silent https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
+eval "$(./bin/micromamba shell hook -s bash -p "$PWD/micromamba")"
+micromamba create --yes --prefix /tmp/prefix -c conda-forge python git gitpython mamba
+micromamba activate /tmp/prefix
+
 set -euo pipefail
 IFS=$'\n\t'
 
-for ENV_NAME in $(.ci/find_new_environments.py); do
-    PREFIX=/cvmfs/lhcbdev.cern.ch/conda/envs/${ENV_NAME}/$(date -u '+%Y-%m-%d_%H-%M')/linux-64
-    echo "Creating environment for ${ENV_NAME} in ${PREFIX}"
-    mamba env create --prefix "${PREFIX}/" --file "environments/${ENV_NAME}.yaml"
-    touch "${PREFIX}/.cvmfscatalog"
-    conda env export --prefix "${PREFIX}/" | tee "${PREFIX}.yaml"
-done
+rm -rf environments-to-deploy/
+mkdir -p environments-to-deploy/
 
-# Create the artifact tarballs and upload them
-python .ci/create-artifacts.py
-du -h build-artifacts/*
+for env_name in $(.ci/find_new_environments.py); do
+    subdir=linux-64
+    timestamp=$(date -u '+%Y-%m-%d_%H-%M')
 
-if [ -n "${REQUEST_DEPLOY_URL_TOKEN+x}" ]; then
-    python .ci/upload-artifacts.py;
-fi
+    test_prefix="/tmp/${env_name}/${timestamp}/${subdir}"
+    echo "Creating environment for ${test_prefix}"
+    # Need these be released/fixed before we can use micromamba here
+    #     https://github.com/mamba-org/mamba/pull/1422
+    #     https://github.com/mamba-org/mamba/pull/1442
+    #     micromamba env export returns an invalid file
+    mamba env create --prefix "${test_prefix}" --file "environments/${env_name}.yaml"
+    mkdir -p "environments-to-deploy/${env_name}/${timestamp}"
+    mamba env export --prefix "${test_prefix}" |  tee "environments-to-deploy/${env_name}/${timestamp}/${subdir}.yaml"
+done
diff --git a/.ci/deploy_environments.sh b/.ci/deploy_environments.sh
index 3391987f6e97f1431505417d6375fad22b5e1ae2..926f81e9c39e575da44ed209c9f0a68a0ac6b291 100755
--- a/.ci/deploy_environments.sh
+++ b/.ci/deploy_environments.sh
@@ -2,16 +2,33 @@
 set -euo pipefail
 IFS=$'\n\t'
 
-TASK_UUID=$(curl --silent -L -X PUT  -H "Authorization: Bearer ${REQUEST_DEPLOY_INSTALL_TOKEN}" "https://lhcb-core-tasks.web.cern.ch/hooks/conda-deploy/deploy/${CI_PIPELINE_ID}/" | cut -d '"' -f 2)
-echo "Sucessfully sent task with ID ${TASK_UUID}"
-while true; do
-    CURRENT_STATUS=$(curl --silent -L "https://lhcb-core-tasks.web.cern.ch/tasks/status/$TASK_UUID" | cut -d '"' -f 2);
-    echo "$(date -u): Status is ${CURRENT_STATUS}";
-    if [[ "${CURRENT_STATUS}" == "SUCCESS" ]]; then
-        exit 0;
-    elif [[ "${CURRENT_STATUS}" == "FAILURE" ]]; then
-        exit 1;
-    else
-        sleep 30;
-    fi
-done
+
+function do_deploy() {
+    yaml_path="$1"
+    name=$(dirname "${yaml_path}")
+    subdir=$(basename "${yaml_path}")
+    subdir=${subdir%.*}
+
+    echo "Deploying ${name} for ${subdir} from ${yaml_path}"
+    task_uuid=$(curl -L -X PUT \
+        -H "Authorization: Bearer ${REQUEST_DEPLOY_INSTALL_TOKEN}" \
+        --data-binary "@${yaml_path}" \
+        "https://lhcb-core-tasks.web.cern.ch/hooks/conda-deploy/deploy/v2/${name}/?subdir=${subdir}")
+    echo "Sucessfully sent task with ID ${task_uuid}"
+
+    while true; do
+        current_status=$(curl --silent -L "https://lhcb-core-tasks.web.cern.ch/tasks/status/$task_uuid" | cut -d '"' -f 2);
+        echo "$(date -u): Status is ${current_status}";
+        if [[ "${current_status}" == "SUCCESS" ]]; then
+            exit 0;
+        elif [[ "${current_status}" == "FAILURE" ]]; then
+            exit 1;
+        else
+            sleep 30;
+        fi
+    done
+}
+export -f do_deploy
+
+cd "environments-to-deploy/"
+find * -name '*.yaml' -type f -exec bash -c "do_deploy '{}'" \;
diff --git a/.ci/setup_lhcbdev_test_cvmfs.sh b/.ci/setup_lhcbdev_test_cvmfs.sh
deleted file mode 100755
index 2eca95de3d8d48e394c63b256f64382cdea9c5d6..0000000000000000000000000000000000000000
--- a/.ci/setup_lhcbdev_test_cvmfs.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-
-# Install CVMFS
-yum -y install https://ecsft.cern.ch/dist/cvmfs/cvmfs-release/cvmfs-release-latest.noarch.rpm
-yum install -y cvmfs
-# TODO: Stop using the test instance of lhcbdev
-echo "CVMFS_SERVER_URL=http://cvmfs-lhcbdev.s3.cern.ch/cvmfs/lhcbdev.cern.ch" > /etc/cvmfs/config.d/lhcbdev.cern.ch.conf
-echo "CVMFS_HTTP_PROXY=DIRECT" >> /etc/cvmfs/default.conf
-mkdir /real_cvmfs
-mount -t cvmfs lhcbdev.cern.ch /real_cvmfs
-# Prepare mocked CVMFS
-mkdir -p "/cvmfs/lhcbdev.cern.ch/conda/envs"
-mkdir -p "/cvmfs/lhcbdev.cern.ch/conda/pkgs/linux-64"
-mkdir -p "/cvmfs/lhcbdev.cern.ch/conda/miniconda/linux-64/"
-# Symlink the main miniconda installation
-ln -s "/real_cvmfs/conda/miniconda/linux-64/prod" "/cvmfs/lhcbdev.cern.ch/conda/miniconda/linux-64/"
-ln -s "/real_cvmfs/conda/miniconda/linux-64/$(basename "$(readlink /real_cvmfs/conda/miniconda/linux-64/prod)")" "/cvmfs/lhcbdev.cern.ch/conda/miniconda/linux-64/"
-ln -s "/real_cvmfs/conda/.condarc" "/cvmfs/lhcbdev.cern.ch/conda/"
-# Activate miniconda
-source /cvmfs/lhcbdev.cern.ch/conda/miniconda/linux-64/prod/bin/activate
-# Add the existing CVMFS packages directory to the search list
-conda config --add pkgs_dirs /real_cvmfs/conda/pkgs/linux-64/
diff --git a/.ci/upload-artifacts.py b/.ci/upload-artifacts.py
deleted file mode 100755
index c0a8471728ec294aaa0d497984b9b8dccc7761f4..0000000000000000000000000000000000000000
--- a/.ci/upload-artifacts.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-import os
-
-import requests
-from requests_toolbelt import MultipartEncoder
-
-pipeline_id = os.environ["CI_PIPELINE_ID"]
-response = requests.get(
-    f"https://lhcb-core-tasks.web.cern.ch/hooks/conda-deploy/artifact_url/{pipeline_id}/",
-    headers={"Authorization": f"Bearer {os.environ['REQUEST_DEPLOY_URL_TOKEN']}"}
-)
-response.raise_for_status()
-data = response.json()
-
-with open(f"build-artifacts/{pipeline_id}.tar.ztd", "rb") as fp:
-    files = {"file": (f"{pipeline_id}.tar.ztd", fp, 'application/octet-stream')}
-    m = MultipartEncoder(fields=dict(**data["fields"], **files))
-    response = requests.post(data["url"], data=m, headers={'content-type': m.content_type})
-response.raise_for_status()
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 8b6b6911f7fcffc71d564f9b4d1d77127d2063d3..e31762b1929d977377d4bc34c1f9cca056857984 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,19 +2,21 @@ stages:
   - build
   - deploy
 
-create conda environments:
-  tags:
-    - docker-privileged-xl
+create environments:
   image: centos:7
   stage: build
-  before_script:
-    - .ci/setup_lhcbdev_test_cvmfs.sh
   script:
     - source .ci/create_environments.sh
+  artifacts:
+    expose_as: 'Environments to deploy'
+    paths:
+      - environments-to-deploy/
 
-deploy conda environments:
+deploy environments:
   stage: deploy
   only:
     - master@lhcb-core/conda-environments
   script:
     - source .ci/deploy_environments.sh
+  dependencies:
+    - create environments
diff --git a/environments/DPA/analysis-productions.yaml b/environments/DPA/analysis-productions.yaml
index a86882f629590665fb7912e3b6897a361bcf05a0..8c54dc746933d66fa072303be62e88e2d6214b0b 100644
--- a/environments/DPA/analysis-productions.yaml
+++ b/environments/DPA/analysis-productions.yaml
@@ -1,12 +1,12 @@
 name: analysis-productions
 channels:
-  - conda-forge/label/lhcbdirac_rc
   - conda-forge
-  - defaults
+  - diracgrid
+  - nodefaults
 dependencies:
   - python =3.9
   - pip
-  - lhcbdirac
+  - lhcbdirac >=10.4
   - lhcbdirac-config >=1
   - ipython
   - backports.tempfile
@@ -21,6 +21,8 @@ dependencies:
   - git
   - gitpython
   - python-kubernetes
+  - requests-toolbelt
+  - python-gssapi
   - m2crypto >=0.36
   - singularity
   - LbAPCommon