Commit 17b99735 authored by Manuel Guth's avatar Manuel Guth Committed by Alexander Froch
Browse files

Adding version and new test ci preprocess location

parent 122baa7f
......@@ -26,5 +26,10 @@ python_install/
# ignoring insitute dependent parameter files
Preprocessing-parameters-*.yaml
# ignoring preprocessing integration test folders
preprocessing_*/
test_*_model*/
test_preprocessing_*/
test_train_*/
# ignoring any test directory
test-*/
jobs/
install_*/
*_batch/
\ No newline at end of file
......@@ -7,7 +7,7 @@ sample_path: &sample_path <path_palce_holder>/hybrids/
# Path where the merged and ready-to-train samples are saved
file_path: &file_path <path_palce_holder>/preprocessed/
# Name of the output file from the preprocessing
# Name of the output file from the preprocessing (has to be a .h5 file, no folder)
.outfile_name: &outfile_name <path_palce_holder>/PFlow-hybrid.h5
# List of variables for training (yaml)
......
......@@ -32,7 +32,7 @@ test_preprocessing_dips_count:
<<: *artifact_template
paths:
- plots/
- preprocessing_dips/
- test_preprocessing_dips/
- coverage_files/
test_preprocessing_dl1r_count:
......@@ -45,7 +45,7 @@ test_preprocessing_dl1r_count:
<<: *artifact_template
paths:
- plots/
- preprocessing_dl1r/
- test_preprocessing_dl1r/
- coverage_files/
test_preprocessing_umami_count:
......@@ -58,7 +58,7 @@ test_preprocessing_umami_count:
<<: *artifact_template
paths:
- plots/
- preprocessing_umami/
- test_preprocessing_umami/
- coverage_files/
test_preprocessing_dips_pdf:
......@@ -71,7 +71,7 @@ test_preprocessing_dips_pdf:
<<: *artifact_template
paths:
- plots/
- preprocessing_dips_pdf/
- test_preprocessing_dips_pdf/
- coverage_files/
test_preprocessing_dl1r_pdf:
......@@ -84,7 +84,7 @@ test_preprocessing_dl1r_pdf:
<<: *artifact_template
paths:
- plots/
- preprocessing_dl1r_pdf/
- test_preprocessing_dl1r_pdf/
- coverage_files/
test_preprocessing_umami_pdf:
......@@ -97,7 +97,7 @@ test_preprocessing_umami_pdf:
<<: *artifact_template
paths:
- plots/
- preprocessing_umami_pdf/
- test_preprocessing_umami_pdf/
- coverage_files/
test_preprocessing_dips_weighting:
......@@ -110,7 +110,7 @@ test_preprocessing_dips_weighting:
<<: *artifact_template
paths:
- plots/
- preprocessing_dips_weighting/
- test_preprocessing_dips_weighting/
- coverage_files/
test_preprocessing_dl1r_weighting:
......@@ -123,7 +123,7 @@ test_preprocessing_dl1r_weighting:
<<: *artifact_template
paths:
- plots/
- preprocessing_dl1r_weighting/
- test_preprocessing_dl1r_weighting/
- coverage_files/
test_preprocessing_umami_weighting:
......@@ -136,7 +136,7 @@ test_preprocessing_umami_weighting:
<<: *artifact_template
paths:
- plots/
- preprocessing_umami_weighting/
- test_preprocessing_umami_weighting/
- coverage_files/
test_train_dips:
......
......@@ -10,7 +10,7 @@ from setuptools import setup
setup(
name="umami",
version="0.0.0", # Also change in module
version="0.5", # Also change in module
packages=[
"umami",
"umami.configuration",
......@@ -25,16 +25,6 @@ setup(
"umami.metrics",
"umami.models",
],
# install_requires=[
# "h5py",
# "numpy",
# "matplotlib",
# "seaborn",
# "tables",
# "pandas",
# "tensorflow",
# "keras"
# ],
include_package_data=True,
test_suite="umami.tests",
scripts=[
......
"""Umami framework used in ATLAS FTAG for dataset preparation and tagger training."""
__version__ = "0.5"
......@@ -104,7 +104,13 @@ class Configuration:
if option is None and iteration is None:
return self.outfile_name
out_file = self.outfile_name
idx = out_file.index(".h5")
try:
idx = out_file.index(".h5")
except ValueError as error:
raise ValueError(
"Your specified `outfile_name` has to be a .h5 file. "
f"You defined in the preprocessing config {out_file}"
) from error
if iteration is None:
if option is None:
......
......@@ -188,7 +188,7 @@ def runPreprocessing(config: dict, tagger: str, method: str) -> bool:
"Test failed: preprocessing.py --to_records."
) from Error
tagger_path = f"./preprocessing_{tagger}/"
tagger_path = f"./test_preprocessing_{tagger}/"
if not os.path.isdir(tagger_path):
run(["mkdir", tagger_path], check=True)
......
......@@ -72,11 +72,11 @@ def prepareConfig(
config = os.path.join(test_dir, os.path.basename(config_source))
preprocessing_config_source = os.path.join(
f"./preprocessing_{preprocess_files}/preprocessing/",
f"./test_preprocessing_{preprocess_files}/preprocessing/",
os.path.basename(data["test_preprocessing"]["config"]),
)
preprocessing_config_paths_source = os.path.join(
f"./preprocessing_{preprocess_files}/preprocessing/",
f"./test_preprocessing_{preprocess_files}/preprocessing/",
os.path.basename(data["test_preprocessing"]["config_paths"]),
)
preprocessing_config = os.path.join(
......@@ -87,7 +87,7 @@ def prepareConfig(
)
var_dict_source = os.path.join(
f"./preprocessing_{preprocess_files}/preprocessing/",
f"./test_preprocessing_{preprocess_files}/preprocessing/",
os.path.basename(data["test_preprocessing"][f"var_dict_{preprocess_files}"]),
)
var_dict = os.path.join(test_dir, os.path.basename(var_dict_source))
......@@ -96,13 +96,14 @@ def prepareConfig(
logger.info("Retrieving files from preprocessing...")
train_file = os.path.join(
f"./preprocessing_{preprocess_files}/preprocessing/",
f"./test_preprocessing_{preprocess_files}/preprocessing/",
"PFlow-hybrid_70-test-resampled_scaled_shuffled.h5",
)
test_file_ttbar = os.path.join(test_dir, "ci_ttbar_testing.h5")
test_file_zprime = os.path.join(test_dir, "ci_zpext_testing.h5")
scale_dict = os.path.join(
f"./preprocessing_{preprocess_files}/preprocessing/", "PFlow-scale_dict.json"
f"./test_preprocessing_{preprocess_files}/preprocessing/",
"PFlow-scale_dict.json",
)
# prepare config files by modifying local copies of config files
......@@ -183,7 +184,7 @@ def prepareConfig(
if useTFRecords is True:
config_file["train_file"] = os.path.join(
f"./preprocessing_{preprocess_files}/preprocessing/",
f"./test_preprocessing_{preprocess_files}/preprocessing/",
"PFlow-hybrid_70-test-resampled_scaled_shuffled",
)
config_file["model_name"] = data["test_dips"]["model_name"] + "_tfrecords"
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment