diff --git a/.gitmodules b/.gitmodules
index d75866031082d2ea38242046f99b498755c547e2..2b47352b03c5e482d622ad909967a74d506ce230 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -10,3 +10,6 @@
 [submodule "src/exts/influxdb-cpp"]
 	path = src/exts/influxdb-cpp
 	url = https://github.com/orca-zhang/influxdb-cpp.git
+[submodule "src/exts/json-schema-validator"]
+	path = src/exts/json-schema-validator
+	url = https://github.com/pboettch/json-schema-validator.git
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 45f5b598b03c80fbaca7e0a75fb9df7068468284..9ba079b848b10a157e161f0a93ae572122006f99 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -12,6 +12,7 @@ set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
 set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
 set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
 
+
 set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake/")
 
 set(CMAKE_CXX_STANDARD 11)
diff --git a/scripts/update_config.py b/scripts/update_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..6b4033f95ea0666979a48c1330be30593a38e013
--- /dev/null
+++ b/scripts/update_config.py
@@ -0,0 +1,223 @@
+#!/bin/env python
+
+#
+# Script to update labRemote JSON configurations (c.f. !168).
+#
+# author: Daniel Joseph Antrim
+# e-mail: dantrim AT lbl DOT gov
+# date: February 2021
+#
+
+import sys
+from argparse import ArgumentParser
+
+import json
+import jq # needed for updating the fields
+from typing import Dict
+
+from pathlib import Path
+
+
+FIELDS_TO_UPDATE = ["devices", "channels", "datastreams", "datasinks"]
+
+def update_path(path : str, json_config : Dict) -> Dict :
+    """
+    Method to update the JSON object and/or value
+    located at the position within the input (potentially
+    arbitrarily deeply nested) JSON configuration.
+
+    If no node appears in the input JSON configuration
+    with the provided path, then an exception is thrown.
+
+    The update takes the JSON objects located under
+    the pointed-to node path by replacing them
+    with a list of the same objects now with a "name" field
+    that was once the key to the initial object.
+    For example, providing the path "labA.devices" 
+    along with the following JSON config,
+
+    {
+        "labA" : {
+            "devices" : {
+                "PS1" : {
+                    "foo" : 123,
+                    "bar" : "/dev/ttyAMC0"
+                },
+                "PS2" : {
+                    "foo" : 456,
+                    "bar" : "/dev/ttyAMC1"
+                }
+            }
+        },
+        "labB" : {
+            "devices" : {
+                "PS3" : {
+                    "foo" : 200,
+                    "bar" : "/dev/ttyAMC0"
+                },
+                "PS4" : {
+                    "foo" : 800,
+                    "bar" : "/dev/ttyAMC1"
+                }
+            }
+        }
+    }
+
+    would produce the following updated JSON config as an output:
+
+    {
+        "labA" : {
+            "devices" : [
+                {
+                    "name" : "PS1",
+                    "foo" : 123,
+                    "bar" : "/dev/ttyAMC0"
+                },
+                {
+                    "name" : "PS2",
+                    "foo" : 456,
+                    "bar" : "/dev/ttyAMC1"
+                }
+            ]
+        },
+        "labB" : {
+            "devices" : {
+                "PS3" : {
+                    "foo" : 200,
+                    "bar" : "/dev/ttyAMC0"
+                },
+                "PS4" : {
+                    "foo" : 800,
+                    "bar" : "/dev/ttyAMC1"
+                }
+            }
+        }
+    }
+
+    Notice that the "devices" node under the "labB" parent node
+    is unaffected since the provided path was "labA.devices".
+
+    Parameters
+    ----------
+    path: str
+        Indicates the path to a JSON node that should be updated.
+        For example, a path "labA.foo.bar.devices" will update
+        the JSON objects appearing under ["labA"]["foo"]["bar"]["devices"]
+        in Python dict notiation.
+
+    json_config: Dict
+        The JSON configuration to be updated.
+            
+    """
+
+    keys = [x.strip() for x in path.strip().split('.')]
+    final_node = json_config
+    for key in keys :
+        if key not in final_node :
+            raise ValueError(f"ERROR: Key \"{key}\" does not appear in specified path \"{path}\" in provided JSON config")
+        final_node = final_node[key]
+
+    # the keys appearing in the JSON objects under the final node
+    # specified by 'path' are now values to "name" fields of
+    # JSON objects that will be grouped into a list
+    updated_node = []
+    for key, val in final_node.items() :
+        new_object = {}
+        new_object["name"] = key
+        for sub_key, sub_val in val.items() :
+            new_object[sub_key] = sub_val
+        updated_node.append(new_object)
+
+    # use jq to update the arbitrarily nested node
+    jq_path = '.' + path.strip()
+    compile_string = f'{jq_path} = {updated_node}'
+
+    # jq needs double-quotes in order to interpet the string fields in the JSON
+    compile_string = compile_string.replace("'", '"')
+    updated_string = jq.compile(compile_string).input(json_config).text()
+    updated_config = json.loads(updated_string)
+    return updated_config
+
+
+def update_config(input_filename : str) -> None :
+    """
+    Insepect the input JSON file and update
+    any of the labRemote JSON objects that
+    need to be.
+
+    The updates are not made in place, rather a
+    new file is created with a suffix indicating
+    that it is updated.
+
+    Parameters
+    ----------
+    input_filename: str
+        The input file to open and parse
+
+    Returns
+    ----------
+    None
+    """
+
+    global FIELDS_TO_UPDATE
+
+    with open(input_filename, "r") as infile :
+
+        try :
+            input_config = json.load(infile)
+        except json.JSONDecodeError as e:
+            print(f"ERROR: failed to parse JSON from input: {e}")
+            sys.exit(1)
+
+    def find_path_to_field(element_to_look_for, node, path = '', paths = []) :
+        """
+        Find the paths to any child nodes whose name is "element_to_look_for".
+        """ 
+        if element_to_look_for in node :
+            path = path + element_to_look_for #+ ' = ' + node[element_to_look_for] #.encode("utf-8")
+            paths.append(path)
+        for key, val in node.items() :
+            if isinstance(val, dict) :
+                find_path_to_field(element_to_look_for, val, path + key + ".", paths)
+
+    # update those fields indicated in the global list
+    # (can handle cases where a JSON config has multiple
+    # paths leading to nodes whose names appear in the
+    # global list)
+    for field_to_update in FIELDS_TO_UPDATE :
+        found_paths = []
+        find_path_to_field(field_to_update, input_config, '', found_paths)
+        for found_path in found_paths :
+            input_config = update_path(found_path, input_config)
+
+    # create the output filename based on adding a suffix
+    # to the input file's name
+    input_path = Path(input_filename)
+    output_filename = input_path.stem + "_updated.json"
+
+    # here we don't worry about overwriting, since applying
+    # this script multiple times would in any case lead to
+    # issues (e.g. running it over an already-updated config
+    # is not recommended and may case issues)
+    print(f"Storing updated configuration: {Path(output_filename).absolute()}")
+    with open(output_filename, "w", encoding = "utf-8") as ofile :
+        json.dump(input_config, ofile, ensure_ascii = False, indent = 4)
+
+def main() :
+
+    parser = ArgumentParser(description = "Update a labRemote JSON configuration file!")
+    parser.add_argument("input", type = str,
+            help = "The JSON file to inspect and update"
+    )
+    args = parser.parse_args()
+
+    path = Path(args.input)
+    if not path.is_file() or not path.exists() :
+        print(f"ERROR: bad input \"{args.input}\"")
+        sys.exit(1)
+
+    # update the config and save the updated one as a new file
+    update_config(args.input)
+
+if __name__ == "__main__" :
+    main()
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index e700dbbe48e2be247da5dc56ed15960bd0295594..1d63b5a3adfc8f75f7c4606ef769aa472fcfabf3 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -55,6 +55,9 @@ else()
   set(JSON_Install OFF CACHE INTERNAL "")
   add_subdirectory(exts/nlohmann_json EXCLUDE_FROM_ALL)
   set(JSON_LIBS "nlohmann_json::nlohmann_json")
+  set(BUILD_SHARED_LIBS ON) # this must be set since json validator is static by default
+  set(BUILD_TESTS OFF)
+  add_subdirectory(exts/json-schema-validator EXCLUDE_FROM_ALL)
 endif()
 
 # Find influxdb-cpp or use integrated one
@@ -72,6 +75,13 @@ else()
   message(STATUS "Disabling linux-GPIB code due to missing libgpib (LIBGPIB_FOUND = ${LIBGPIB_FOUND})")
 endif()
 
+#
+# copy resource files to specified location(s) (as is, this needs to be done before the add_subdirectory calls)
+#
+set(LABREMOTE_SCHEMA_FILENAME "labremote_config_schema.json")
+file(COPY schema/${LABREMOTE_SCHEMA_FILENAME} DESTINATION ${CMAKE_BINARY_DIR}/share/schema)
+INSTALL(FILES schema/${LABREMOTE_SCHEMA_FILENAME} DESTINATION ${CMAKE_INSTALL_PREFIX}/share/labRemote/schema)
+
 #
 # Add libraries
 add_subdirectory(libPS)
diff --git a/src/configs/input-hw.json b/src/configs/input-hw.json
index 4b9aba47905a0ab23ee8d32c5edc2b31de15280e..3a77039100f7d04aeb695770b9486aa4c23022b4 100644
--- a/src/configs/input-hw.json
+++ b/src/configs/input-hw.json
@@ -1,81 +1,92 @@
 {
     "version": "1.0",
-
     "options" : {
     },
-    "datasinks":{
-	"Console":{
+    "datasinks": [
+        {
+            "name": "Console",
             "sinktype": "ConsoleSink"
         },
-	"File":{
+        {
+            "name": "File",
             "sinktype": "CSVSink",
             "directory": "myOutputData"
         },
-	"db": {
+        {
+            "name": "db",
             "sinktype": "InfluxDBSink",
             "host": "127.0.0.1",
             "port": 8086,
             "database": "dcsDB",
             "username": "userName",
             "password": "password",
-	    "precision": 5
+            "precision": 5
         }
-    },
-    "datastreams":{
-	"PowerSupplies":{
-	    "sinks": ["Console", "db"]
-	},
-	"Climate":{
-	    "sinks": ["Console"]
-	}
-    },
-    "devices": {
-	"PS" : {
-	    "hw-type": "PS",
-	    "hw-model": "RigolDP832",
-	    "communication": {
-		"protocol" : "CharDeviceCom",
-		"port" : "/dev/usbtmc0"
-	    }
-	},
-	"PS2": {
-	    "hw-type": "PS",
-	    "hw-model": "AgilentPs",
-	    "communication": {
-		"protocol" : "GPIBSerialCom",
-		"port" : "/dev/ttyUSB1",
-		"gpib_addr": 3
-	    }
-	}
-    },
-    "channels": {
-	"low-voltage" : {
-	    "hw-type" : "PS",
-	    "device": "PS",
-	    "channel" : 1,
-	    "program": {
-		"maxvoltage": 5
-	    }
-	},
-	"peltier" : {
-	    "hw-type" : "PS",
-	    "device": "PS",
-	    "channel" : 2
-	},
-	"low-voltage_modA": {
-	    "hw-type" : "PS",	    
-	    "device": "PS2",
-	    "channel" : 1
-	},
-	"low-voltage_modB": {
-	    "hw-type" : "PS",
-	    "device": "PS2",
-	    "channel" : 2
-	},
-	"low-voltage_modC": {
-	    "hw-type" : "PS",	    
-	    "device": "PS2",
-	    "channel" : 3
-	}
-    }	    
+    ],
+    "datastreams": [
+        {
+            "name": "PowerSupplies",
+            "sinks": ["Console", "db"]
+        },
+        {
+            "name": "Climate",
+            "sinks": ["Console"]
+        }
+    ],
+    "devices": [
+        {
+            "name": "PS",
+            "hw-type": "PS",
+            "hw-model": "RigolDP832",
+            "communication": {
+                "protocol": "CharDeviceCom",
+                "port": "/dev/usbtmc0"
+            }
+        },
+        {
+            "name": "PS2",
+            "hw-type": "PS",
+            "hw-model": "AgilentPs",
+            "communication": {
+                "protocol": "GPIBSerialCom",
+                "port": "/dev/ttyUSB1",
+                "gpib_addr": 3
+            }
+        }
+    ],
+    "channels": [
+        {
+            "name": "low-voltage",
+            "hw-type" : "PS",
+            "device": "PS",
+            "channel" : 1,
+            "program": {
+                "maxvoltage": 5
+            }
+        },
+        { 
+            "name": "peltier",
+            "hw-type" : "PS",
+            "device": "PS",
+            "channel" : 2
+        },
+        {
+            "name": "low-voltage_modA",
+            "hw-type" : "PS",       
+            "device": "PS2",
+            "channel" : 1
+        },
+        {
+            "name": "low-voltage_modB",
+            "hw-type" : "PS",
+            "device": "PS2",
+            "channel" : 2
+        },
+        {
+            "name": "low-voltage_modC",
+            "hw-type" : "PS",       
+            "device": "PS2",
+            "channel" : 3
+        }
+    ]
 }
diff --git a/src/exts/json-schema-validator b/src/exts/json-schema-validator
new file mode 160000
index 0000000000000000000000000000000000000000..1519c845c2b1bbf35021feb11c23625de603ca1e
--- /dev/null
+++ b/src/exts/json-schema-validator
@@ -0,0 +1 @@
+Subproject commit 1519c845c2b1bbf35021feb11c23625de603ca1e
diff --git a/src/exts/nlohmann_json b/src/exts/nlohmann_json
index b7be613b6ec6269c829144ff1cc8a633876d3092..db78ac1d7716f56fc9f1b030b715f872f93964e4 160000
--- a/src/exts/nlohmann_json
+++ b/src/exts/nlohmann_json
@@ -1 +1 @@
-Subproject commit b7be613b6ec6269c829144ff1cc8a633876d3092
+Subproject commit db78ac1d7716f56fc9f1b030b715f872f93964e4
diff --git a/src/labRemote/CMakeLists.txt b/src/labRemote/CMakeLists.txt
index be02947409d691f1e856ff4dbf6ea65ce2c8e90e..5147d2df2d0343e7dfc245865f30d80ee9dd41d0 100644
--- a/src/labRemote/CMakeLists.txt
+++ b/src/labRemote/CMakeLists.txt
@@ -4,22 +4,22 @@ file(GLOB REGISTER_PYTHON_MODULE_SRC
           )
 
 set(python_module_name _labRemote)
-set(labremote_libs Com Utils PS EquipConf DataSink DevCom Chiller)
+set(labremote_libs Com Utils PS EquipConf DataSink DevCom Chiller nlohmann_json_schema_validator)
 
 pybind11_add_module(${python_module_name} module.cpp ${REGISTER_PYTHON_MODULE_SRC})
 target_link_libraries(${python_module_name} PRIVATE ${labremote_libs} pybind11_json)
 
 if (SKBUILD)
-  # Set the RPATH
-  if (APPLE)
-    set(rpath "@loader_path/../../..")
-  else()
-    set(rpath "$ORIGIN/../../..")
-  endif()
-  message("${rpath}")
-  set_target_properties(${python_module_name} PROPERTIES INSTALL_RPATH ${rpath})
-endif()
+    # install the compiled python module in src/labRemote to be picked up by the python wheels setup
+    install(TARGETS ${python_module_name} LIBRARY DESTINATION src/labRemote)
+    # co-locate all dependent shared libaries alongside where we put the python wrapper
+    install(TARGETS ${labremote_libs} LIBRARY DESTINATION src/labRemote)
 
-set(PYLABREMOTE_INSTALL_PATH ${CMAKE_INSTALL_LIBDIR}/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/site-packages/labRemote)
-install(TARGETS ${python_module_name} LIBRARY DESTINATION ${PYLABREMOTE_INSTALL_PATH} )
-install(FILES __init__.py com.py ps.py ec.py DESTINATION ${PYLABREMOTE_INSTALL_PATH} )
+    # Set the RPATH
+    if (APPLE)
+      set(rpath "@loader_path")
+    else()
+      set(rpath "$ORIGIN")
+    endif()
+    set_target_properties(${python_module_name} ${labremote_libs} PROPERTIES INSTALL_RPATH ${rpath})
+endif()
diff --git a/src/libDataSink/CMakeLists.txt b/src/libDataSink/CMakeLists.txt
index dfdf32df34253315a358f472b7e90de0764194fb..ee39ea8c412997ea990cedd96fd5a39874ebde39 100644
--- a/src/libDataSink/CMakeLists.txt
+++ b/src/libDataSink/CMakeLists.txt
@@ -9,7 +9,7 @@ target_sources(DataSink
   )
 
 target_link_libraries(DataSink PRIVATE Utils)
-target_link_libraries(DataSink PUBLIC ${JSON_LIBS})
+target_link_libraries(DataSink PUBLIC ${JSON_LIBS} nlohmann_json_schema_validator)
 target_include_directories(DataSink PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
 
 # Add optional influxdb datasink
diff --git a/src/libEquipConf/CMakeLists.txt b/src/libEquipConf/CMakeLists.txt
index 6865e4c274a126a1a5722f338cd72ac9de8d9513..c22008d9e8a1d11af415014b129c8593ea4f5ec2 100644
--- a/src/libEquipConf/CMakeLists.txt
+++ b/src/libEquipConf/CMakeLists.txt
@@ -9,7 +9,7 @@ set(SrcFiles
 # create target library
 add_library(EquipConf SHARED ${SrcFiles})
 
-target_link_libraries(EquipConf PUBLIC Utils PS Com DataSink ${JSON_LIBS})
+target_link_libraries(EquipConf PUBLIC Utils PS Com DataSink ${JSON_LIBS} nlohmann_json_schema_validator)
 target_include_directories(EquipConf PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
 
 # Tell rest of labRemote that the library exists
diff --git a/src/libEquipConf/DataSinkConf.cpp b/src/libEquipConf/DataSinkConf.cpp
index 233512ede0e47ce2cb0eb429d40aaa6fcf904366..af70f61bd6d8bc646d6569ccb53222cf56f900a2 100644
--- a/src/libEquipConf/DataSinkConf.cpp
+++ b/src/libEquipConf/DataSinkConf.cpp
@@ -6,13 +6,14 @@
 #include <string>
 
 #include "Logger.h"
-
 #include "DataSinkRegistry.h"
 #include "CombinedSink.h"
+#include "FileUtils.h" // labremote_schema_file
 
-#include "Logger.h"
-
-using json = nlohmann::json;
+// json
+#include <nlohmann/json-schema.hpp>
+using nlohmann::json;
+using nlohmann::json_schema::json_validator;
 
 using json = nlohmann::json;
 
@@ -44,31 +45,63 @@ void DataSinkConf::setHardwareConfig(const std::string& hardwareConfigFile)
     throw std::runtime_error("Provided datasink configuration file \"" + hardwareConfigFile + "\" could not be found or opened");
   }
   i >> m_hardwareConfig;
+
+  // validate the loaded schema
+  json_validator validator;
+  std::string schema_path = utils::labremote_schema_file();
+  if(schema_path != "") {
+    std::ifstream ifs_schema(schema_path, std::ios::in);
+    validator.set_root_schema(json::parse(ifs_schema));
+    try {
+        validator.validate(m_hardwareConfig);
+    } catch (std::exception& e) {
+        logger(logERROR) << "The provided JSON configuration failed the schema check (input file: " << hardwareConfigFile << ")";
+        logger(logERROR) << "Are using an old-style labRemote JSON configuration? If so, try running the following command:";
+        logger(logERROR) << "    python /path/to/labRemote/scripts/update_config.py " << hardwareConfigFile;
+        logger(logERROR) << "The error messsage from the JSON validation was: " << e.what();
+    }
+  } else {
+    logger(logWARNING) << "Could not locate schema definition, cannot validate datasink configuration!";
+  }
 }
 
 void DataSinkConf::setHardwareConfig(const json& hardwareConfig)
 {
   //store JSON config file
   m_hardwareConfig = hardwareConfig;
+
+  // validate the loaded schema
+  json_validator validator;
+  std::string schema_path = utils::labremote_schema_file();
+  if(schema_path != "") {
+    std::ifstream ifs_schema(schema_path, std::ios::in);
+    validator.set_root_schema(json::parse(ifs_schema));
+    try {
+        validator.validate(m_hardwareConfig);
+    } catch (std::exception& e) {
+        logger(logERROR) << "The provided JSON configuration failed the schema check";
+        logger(logERROR) << "Are using an old-style labRemote JSON configuration? If so, try running the following command:";
+        logger(logERROR) << "    python /path/to/labRemote/scripts/update_config.py";
+        logger(logERROR) << "The error messsage from the JSON validation was: " << e.what();
+    }
+  } else {
+    logger(logWARNING) << "Could not locate schema definition, cannot validate datasink configuration!";
+  }
 }
 
 json DataSinkConf::getDataSinkConf(const std::string& label)
 {
-  for (const auto& hw : m_hardwareConfig["datasinks"].items())
-    {
-      //check label 
-      if (hw.key() == label) return hw.value();
-    }
+  for (const auto& datasink : m_hardwareConfig["datasinks"]) {
+    if (datasink["name"] == label) return datasink;
+  }
   return json();
 }
 
 json DataSinkConf::getDataStreamConf(const std::string& label)
 {
-  for (const auto& hw : m_hardwareConfig["datastreams"].items())
-    {
-      //check label 
-      if (hw.key() == label) return hw.value();
-    }
+  for (const auto& datastream : m_hardwareConfig["datastreams"]) {
+    if (datastream["name"] == label) return datastream;
+  }
   return json();
 }
 
diff --git a/src/libEquipConf/EquipConf.cpp b/src/libEquipConf/EquipConf.cpp
index 3245617872604af24fd67b1fb2ddf074949adcb8..72f12de1c048aaf2568750b2afd772e7de9e8f85 100644
--- a/src/libEquipConf/EquipConf.cpp
+++ b/src/libEquipConf/EquipConf.cpp
@@ -8,9 +8,12 @@
 #include "PowerSupplyRegistry.h"
 #include "ComRegistry.h"
 #include "Logger.h"
+#include "FileUtils.h" // labremote_schema_file
 
-#include "Logger.h"
-using json = nlohmann::json;
+// json
+#include <nlohmann/json-schema.hpp>
+using nlohmann::json;
+using nlohmann::json_schema::json_validator;
 
 using json = nlohmann::json;
 
@@ -42,28 +45,62 @@ void EquipConf::setHardwareConfig(const std::string& hardwareConfigFile)
     throw std::runtime_error("Provided equipment configuration file \"" + hardwareConfigFile + "\" could not be found or opened");
   }
   i >> m_hardwareConfig;
+
+  // validate the loaded schema
+  json_validator validator;
+  std::string schema_path = utils::labremote_schema_file();
+  if(schema_path != "") {
+    std::ifstream ifs_schema(schema_path, std::ios::in);
+    validator.set_root_schema(json::parse(ifs_schema));
+    try {
+        validator.validate(m_hardwareConfig);
+    } catch (std::exception& e) {
+        logger(logERROR) << "The provided JSON configuration failed the schema check (input file: " << hardwareConfigFile << ")";
+        logger(logERROR) << "Are using an old-style labRemote JSON configuration? If so, try running the following command:";
+        logger(logERROR) << "    python /path/to/labRemote/scripts/update_config.py " << hardwareConfigFile;
+        logger(logERROR) << "The error messsage from the JSON validation was: " << e.what();
+    }
+  } else {
+    logger(logWARNING) << "Could not locate schema definition, cannot validate equipment configuration!";
+  }
 }
 
 void EquipConf::setHardwareConfig(const json& hardwareConfig)
 {
   //store JSON config file
   m_hardwareConfig = hardwareConfig;
+
+  // validate the loaded schema
+  json_validator validator;
+  std::string schema_path = utils::labremote_schema_file();
+  if(schema_path != "") {
+    std::ifstream ifs_schema(schema_path, std::ios::in);
+    validator.set_root_schema(json::parse(ifs_schema));
+    try {
+        validator.validate(m_hardwareConfig);
+    } catch (std::exception& e) {
+        logger(logERROR) << "The provided JSON configuration failed the schema check";
+        logger(logERROR) << "Are using an old-style labRemote JSON configuration? If so, try running the following command:";
+        logger(logERROR) << "    python /path/to/labRemote/scripts/update_config.py";
+        logger(logERROR) << "The error messsage from the JSON validation was: " << e.what();
+    }
+  } else {
+    logger(logWARNING) << "Could not locate schema definition, cannot validate equipment configuration!";
+  }
 }
 
 json EquipConf::getDeviceConf(const std::string& label)
 {
-  for (const auto& hw : m_hardwareConfig["devices"].items()) {
-    //check label 
-    if (hw.key() == label) return hw.value();
+  for (const auto& device : m_hardwareConfig["devices"]) {
+    if(device["name"] == label) return device;
   }
   return json();
 }
 
 json EquipConf::getChannelConf(const std::string& label)
 {
-  for (const auto& ch : m_hardwareConfig["channels"].items()) {
-    //check label 
-    if (ch.key() == label) return ch.value();
+  for (const auto& channel : m_hardwareConfig["channels"]) {
+     if(channel["name"] == label) return channel;
   }
   return json();
 }
diff --git a/src/libUtils/CMakeLists.txt b/src/libUtils/CMakeLists.txt
index 70c3504a8c40e3891a50e246d334c145a49cc249..8d820c430016ddcccc3d717227e4a1541771f271 100644
--- a/src/libUtils/CMakeLists.txt
+++ b/src/libUtils/CMakeLists.txt
@@ -3,9 +3,12 @@ target_sources(Utils
   PRIVATE
   Logger.cpp
   ScopeLock.cpp
+  FileUtils.cpp
   )
 target_include_directories(Utils PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
 set(libUtils_FOUND TRUE PARENT_SCOPE)
 
 set_target_properties(Utils PROPERTIES VERSION ${labRemote_VERSION_MAJOR}.${labRemote_VERSION_MINOR})
 install(TARGETS Utils)
+
+configure_file( FileDefs.h.in ${CMAKE_CURRENT_SOURCE_DIR}/FileDefs.h @ONLY)
diff --git a/src/libUtils/FileDefs.h.in b/src/libUtils/FileDefs.h.in
new file mode 100644
index 0000000000000000000000000000000000000000..274174a1f041440ef393e3ebcf4ca6165685ec16
--- /dev/null
+++ b/src/libUtils/FileDefs.h.in
@@ -0,0 +1,11 @@
+#include <string>
+
+namespace utils {
+namespace defs {
+
+    const std::string LABREMOTE_SCHEMA_FILENAME = "@LABREMOTE_SCHEMA_FILENAME@";
+    const std::string LABREMOTE_BUILD_DIR = "@CMAKE_BINARY_DIR@";
+    const std::string LABREMOTE_INSTALL_PREFIX = "@CMAKE_INSTALL_PREFIX@";
+
+} // namespace defs
+} // namespace utils
diff --git a/src/libUtils/FileUtils.cpp b/src/libUtils/FileUtils.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..ad765d5e379177d38e996c6b2973f2da90d3508a
--- /dev/null
+++ b/src/libUtils/FileUtils.cpp
@@ -0,0 +1,62 @@
+#include "FileUtils.h"
+#include "FileDefs.h" // LABREMOTE_SCHEMA_FILE
+#include "Logger.h"
+
+// std/stl
+#include <sstream>
+#include <iostream>
+#include <cstdlib> // std::getenv
+#if __linux__
+#include <sys/stat.h>
+#include <unistd.h> // getcwd, getlogin_r
+#include <stdio.h>
+#elif (__APPLE__) or (__unix__)
+#include <stdlib.h>
+#include <string.h>
+#include <errno.h>
+#include <libproc.h>
+#include <sys/types.h>
+#include <unistd.h>
+#endif
+
+namespace utils {
+
+std::string labremote_schema_file() {
+    std::string schema_path = "";
+
+    // check environment
+    char* env = std::getenv("LABREMOTE_RESOURCE_DIR");
+    if(env) {
+        std::string env_resource_dir(env);
+        if (!path_exists(env_resource_dir)) {
+            logger(logWARNING) << "Defined environment variable (LABREMOTE_RESOURCE_DIR) points to a non-existent path, cannot find labRemote schema file!";
+            return "";
+        }
+        schema_path = env_resource_dir + "/schema/" + utils::defs::LABREMOTE_SCHEMA_FILENAME;
+        if (!path_exists(schema_path)) {
+            logger(logWARNING) << "Could not find expected labRemote schema file: " << schema_path;
+            return "";
+        }
+        return schema_path;
+    }
+
+    // check labRemote build directory and install directory, if needed
+    schema_path = utils::defs::LABREMOTE_BUILD_DIR + "/share/schema/" + utils::defs::LABREMOTE_SCHEMA_FILENAME;
+    if (path_exists(schema_path)) {
+        return schema_path;
+    } else {
+        schema_path = utils::defs::LABREMOTE_INSTALL_PREFIX + "/share/labRemote/schema/" + utils::defs::LABREMOTE_SCHEMA_FILENAME;
+        if (path_exists(schema_path)) {
+            return schema_path;
+        }
+    }
+    return "";
+}
+
+bool path_exists(const std::string& path_name) {
+    struct stat st;
+    return (stat(path_name.c_str(), &st) == 0);
+}
+
+}; // namespace utils
+
diff --git a/src/libUtils/FileUtils.h b/src/libUtils/FileUtils.h
new file mode 100644
index 0000000000000000000000000000000000000000..fac4ecc2ef3e394832e0b6bbe70a8de034d30792
--- /dev/null
+++ b/src/libUtils/FileUtils.h
@@ -0,0 +1,37 @@
+#ifndef LABREMOTE_FILEUTILS_H
+#define LABREMOTE_FILEUTILS_H
+
+// std/stl
+#include <string>
+
+//! \brief Functions for handling common filesystem and file handling operations
+
+namespace utils {
+//! \brief Check if a given path exists in the current filesystem
+/**
+ * \param path_name The path whose existence is to be tested.
+ * \return `true` if the path exists, `false` otherwise.
+ */
+bool path_exists(const std::string& path_name);
+
+//! \brief Get the path to the labRemote json schema file
+/**
+ * This function attempts to find and return the path to the labRemote JSON
+ * schema file. It checks several possible locations for the schema file,
+ * based on the priority below (in the order that they are checked):
+ *
+ *  -# User-defined environment variable `LABREMOTE_RESOURCE_DIR`: If the
+ *  environment variable `LABREMOTE_RESOURCE_DIR` is defined, the schema file will be
+ *  searched for under the path given by `<LABREMOTE_RESOURCE_DIR>/schema/`.
+ *  -# User's labRemote build directory (i.e. `CMAKE_BINARY_DIR`): In this case, the
+ *  schema will be searched for under `<CMAKE_BINARY_DIR>/share/schema/`.
+ *  -# User's labRemote installation prefix path (i.e. `CMAKE_INSTALL_PREFIX`): In
+ *  this case, the schema will be searched for under `<CMAKE_INSTALL_PREFIX>/share/labRemote/schema/`.
+ *
+ * \return An `std::string` pointing to the path of the found labRemote schema
+ * file. An empty string `""` is returned if the schema file could not be located.
+ */
+std::string labremote_schema_file();
+}; // namespace utils
+
+#endif // LABREMOTE_FILEUTILS_H
diff --git a/src/schema/input-hw.json b/src/schema/input-hw.json
new file mode 100644
index 0000000000000000000000000000000000000000..e677fe976d0d2b7f0b542b12eb70a9f9c86b412f
--- /dev/null
+++ b/src/schema/input-hw.json
@@ -0,0 +1,92 @@
+{
+    "version": "1.0",
+    "options" : {
+    },
+    "datasinks": [
+        {
+	        "name": "Console",
+            "sinktype": "ConsoleSink"
+        },
+	    {
+            "name": "File",
+            "sinktype": "CSVSink",
+            "directory": "myOutputData"
+        },
+        {
+            "name": "db",
+            "sinktype": "InfluxDBSink",
+            "host": "127.0.0.1",
+            "port": 8086,
+            "database": "dcsDB",
+            "username": "userName",
+            "password": "password",
+	        "precision": 5
+        }
+    ],
+    "datastreams": [
+	    {
+            "name": "PowerSupplies",
+	        "sinks": ["Console", "db"]
+	    },
+	    {
+            "name": "Climate",
+	        "sinks": ["Console"]
+	    }
+    ],
+    "devices": [
+        {
+            "name": "PS",
+            "hw-type": "PS",
+            "hw-model": "RigolDP832",
+            "communication": {
+                "protocol": "CharDeviceCom",
+                "port": "/dev/usbtmc0"
+            }
+        },
+        {
+            "name": "PS2",
+            "hw-type": "PS",
+            "hw-model": "AgilentPs",
+            "communication": {
+                "protocol": "GPIBSerialCom",
+                "port": "/dev/ttyUSB1",
+                "gpib_addr": 3
+            }
+	    }
+    ],
+    "channels": [
+        {
+	        "name": "low-voltage",
+	        "hw-type" : "PS",
+	        "device": "PS",
+	        "channel" : 1,
+	        "program": {
+		        "maxvoltage": 5
+            }
+	    },
+        { 
+	        "name": "peltier",
+	        "hw-type" : "PS",
+	        "device": "PS",
+	        "channel" : 2
+	    },
+	    {
+            "name": "low-voltage_modA",
+	        "hw-type" : "PS",	    
+	        "device": "PS2",
+	        "channel" : 1
+	    },
+        {
+	        "name": "low-voltage_modB",
+	        "hw-type" : "PS",
+	        "device": "PS2",
+	        "channel" : 2
+	    },
+        {
+	        "name": "low-voltage_modC",
+	        "hw-type" : "PS",	    
+	        "device": "PS2",
+	        "channel" : 3
+	    }
+    ]
+}
diff --git a/src/schema/labremote_config_schema.json b/src/schema/labremote_config_schema.json
new file mode 100644
index 0000000000000000000000000000000000000000..97a1fd3af9835747fc967e94d2a1f480d8344308
--- /dev/null
+++ b/src/schema/labremote_config_schema.json
@@ -0,0 +1,165 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "definitions": {
+        "nodedef_options_config": {
+            "type": "object",
+            "properties": {
+                "autoconfigure": { "type": "boolean" }
+            },
+            "additionalProperties": false
+        },
+        "nodedef_channels_config": {
+            "type": "array",
+            "items": { "$ref": "#/definitions/def_channel" }
+        },
+        "def_channel": {
+            "type": "object",
+            "properties": {
+                "name": { "type": "string" },
+                "hw-type": { "type": "string", "enum": ["PS"] },
+                "device": { "type": "string" },
+                "channel": { "type": "integer" },
+                "program": { "type": "object" },
+                "datasink": { "type": "string" }
+            },
+            "required": ["name", "hw-type", "device", "channel" ],
+            "additionalProperties": false
+        },
+        "nodedef_devices_config": {
+            "type": "array",
+            "items": { "$ref": "#/definitions/def_device" }
+        },
+        "def_device": {
+            "type": "object",
+            "properties": {
+                "name": { "type": "string" },
+                "hw-type": { "type": "string", "enum": ["PS"]},
+                "hw-model": { "type": "string" },
+                "communication" : { "$ref": "#/definitions/def_device_communication" },
+                "autoVoltageRange": { "type": "boolean" },
+                "autoCurrentRange": { "type" : "boolean" }
+            },
+            "required": ["name", "hw-type", "hw-model", "communication"],
+            "additionalProperties": false
+        },
+        "def_device_communication": {
+            "type": "object",
+            "oneOf": [
+                {
+                    "properties": {
+                        "protocol": { "const": "GPIBSerialCom" },
+                        "port": { "type": "string" },
+                        "gpib_addr": { "type": "integer" },
+                        "read_tmo_ms": { "type": "integer" },
+                        "baudrate": { "type": "string" },
+                        "parityBit": { "type": "boolean" },
+                        "twoStopBits": { "type": "boolean" },
+                        "flowControl": { "type": "boolean" },
+                        "charsize": { "type": "integer" },
+                        "timeout": { "type": "number" }
+                    },
+                    "required": ["protocol", "port", "gpib_addr"],
+                    "additionalProperties": false
+                },
+                {
+                    "properties": {
+                        "protocol": { "const": "CharDeviceCom" },
+                        "port": { "type": "string" }
+                    },
+                    "required": ["protocol", "port"],
+                    "additionalProperties": false
+                },
+                {
+                    "properties": {
+                        "protocol": { "const": "TextSerialCom" },
+                        "port": { "type": "string" },
+                        "termination": { "type": "string" },
+                        "baudrate": { "type": "string" },
+                        "parityBit": { "type": "boolean" },
+                        "twoStopBits": { "type": "boolean" },
+                        "flowControl": { "type": "boolean" },
+                        "charsize": { "type": "integer" },
+                        "timeout": { "type": "number" }
+                    },
+                    "required": ["protocol", "port", "termination"],
+                    "additionalProperties": false
+                }
+            ]
+        },
+        "nodedef_datastreams_config": {
+            "type": "array",
+            "items": { "$ref": "#/definitions/def_datastreams" }
+        },
+        "def_datastreams": {
+            "type": "object",
+            "properties": {
+                "name": { "type": "string" },
+                "sinks": { "type": "array", "items": {"type": "string"} }
+            },
+            "required": ["name", "sinks"],
+            "additionalProperties": false
+        },
+        "nodedef_datasinks_config": {
+            "type": "array",
+            "items": {
+                "anyOf": [
+                    {
+                        "$ref": "#/definitions/def_datasink_console"
+                    },
+                    {
+                        "$ref": "#/definitions/def_datasink_csv"
+                    },
+                    {
+                        "$ref": "#/definitions/def_datasink_influxdb"
+                    }
+                ]
+            }
+        },
+        "def_datasink_console": {
+            "type": "object",
+            "properties": {
+                "name": { "type": "string" },
+                "sinktype": { "type": "string" },
+                "column_width": { "type": "integer" }
+            },
+            "required": ["name", "sinktype"],
+            "additionalProperties": false
+        },
+        "def_datasink_csv": {
+            "type": "object",
+            "properties": {
+                "name": { "type": "string" },
+                "sinktype": { "type": "string" },
+                "directory": { "type": "string" }
+            },
+            "required": ["name", "sinktype", "directory"],
+            "additionalProperties": false
+        },
+        "def_datasink_influxdb": {
+            "type": "object",
+            "properties": {
+                "name": { "type": "string" },
+                "sinktype": { "type": "string" },
+                "host": { "type": "string" },
+                "port": { "type": "integer" },
+                "database": { "type": "string" },
+                "username": { "type": "string" },
+                "password": { "type": "string" },
+                "precision": { "type": "integer" }
+            },
+            "required": ["name", "sinktype", "host", "port", "database", "username", "password"],
+            "additionalProperties": false
+        }
+    },
+    "type": "object",
+    "properties": {
+        "version": { "type": "string" },
+        "options": { "$ref": "#/definitions/nodedef_options_config" },
+        "devices": { "$ref": "#/definitions/nodedef_devices_config" },
+        "channels": { "$ref": "#/definitions/nodedef_channels_config" },
+        "datastreams": { "$ref": "#/definitions/nodedef_datastreams_config" },
+        "datasinks": { "$ref": "#/definitions/nodedef_datasinks_config" }
+    },
+    "required": [],
+    "additionalProperties": true
+}
diff --git a/src/tools/CMakeLists.txt b/src/tools/CMakeLists.txt
index 56af09bd57398b9f29bd6f842e27f071c269a020..8f06242e6f912fd79b3237cf05e85fc875ec74c3 100644
--- a/src/tools/CMakeLists.txt
+++ b/src/tools/CMakeLists.txt
@@ -1,3 +1,9 @@
+#
+# check json schema
+add_executable(check_json_schema)
+target_sources(check_json_schema PRIVATE check_json_schema.cpp)
+target_link_libraries(check_json_schema PRIVATE Utils nlohmann_json_schema_validator)
+
 #
 # ftdi_setidentity
 if ( ${ENABLE_FTDI} )
diff --git a/src/tools/check_json_schema.cpp b/src/tools/check_json_schema.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..b54007e0e62259592037c8070cca90abbd6a23af
--- /dev/null
+++ b/src/tools/check_json_schema.cpp
@@ -0,0 +1,123 @@
+// std/stl
+#include <iostream>
+#include <iomanip>
+#include <fstream>
+#include <getopt.h>
+
+// json
+#include <nlohmann/json-schema.hpp>
+using nlohmann::json;
+using nlohmann::json_schema::json_validator;
+
+// labremote
+#include "FileUtils.h"
+
+void usage(char* argv[]) {
+    std::cerr << "Usage: " << argv[0] << " [OPTIONS] <input> [<schema>] \n";
+    std::cerr << "\n";
+    std::cerr << "Options:\n";
+    std::cerr << " -v, --verbose    Increase verbosity of output\n";
+    std::cerr << "Required positional arguments:\n";
+    std::cerr << " input            JSON file to validate against the provided schema\n";
+    std::cerr << "Optional positional arguments:\n";
+    std::cerr << " schema           File containing JSON schema specification (if not provided, will use default labRemote schema)\n";
+    std::cerr << "\n";
+}
+
+int main(int argc, char* argv[])
+{
+    bool _verbose = false;
+    static struct option long_options[] = {
+        {"verbose", no_argument, NULL, 'v'},
+        {"help", no_argument, NULL, 'h'},
+        {0,0,0,0}
+    };
+    int c;
+    while((c = getopt_long(argc, argv, "vh", long_options, NULL)) != -1) {
+        switch(c) {
+            case 'v':
+                _verbose = true;
+                break;
+            case 'h':
+                usage(argv);
+                return 0;
+            case '?':
+                std::cerr << "Invalid option \"" << (char)(c) << "\" provided\n";
+                return 1;
+        } // switch
+    } // while
+
+    if((argc-optind)>2) {
+        std::cerr << "Too many positional arguments provided (expect exactly 2)\n";
+        return 1;
+    }
+    else if((argc-optind) < 1) {
+        std::cerr << "Too few positional arguments provided (require exactly 2)\n";
+        return 1;
+    }
+
+    std::string input_filename = argv[optind++];
+    std::string schema_filename = "";
+    if((argc-optind) == 2) {
+        schema_filename = argv[optind++];
+    } else {
+        schema_filename = utils::labremote_schema_file();
+        if(schema_filename.empty()) {
+            std::cerr << "ERROR Failed to find schema file (did it get installed properly during the cmake configuration?)\n";
+            return 1;
+        }
+    }
+    if(_verbose) {
+        std::cout << "Using schema definition: " << schema_filename << "\n";
+    }
+
+    // check that the input files exist
+    std::ifstream ifs_schema(schema_filename, std::ios::in);
+    if(!ifs_schema.good()) {
+        std::cerr << "ERROR Could not open schema file (=" << schema_filename << ")\n";
+        return 1;
+    }
+
+    std::ifstream ifs_input(input_filename, std::ios::in);
+    if(!ifs_input.good()) {
+        std::cerr << "ERROR Could not open input file (=" << input_filename << ")\n";
+        return 1;
+    }
+
+    // parse the json objects from the schema and provided input file
+    json j_schema;
+    json j_input;
+    try {
+        j_schema = json::parse(ifs_schema);
+    } catch(std::exception& e) {
+        std::cerr << "ERROR Unable to parse JSON schema: " << e.what() << "\n";
+        return 1;
+    }
+    
+    try {
+        j_input = json::parse(ifs_input);
+    } catch(std::exception& e) {
+        std::cerr << "ERROR Unable to parse input JSON file: " << e.what() << "\n";
+        return 1;
+    }
+
+    // validate
+    json_validator validator;
+    try {
+        validator.set_root_schema(j_schema);
+    } catch(std::exception& e) {
+        std::cerr << "ERROR Provided schema is invalid: " << e.what() << "\n";
+        return 1;
+    }
+
+    try {
+        validator.validate(j_input);
+        if(_verbose) {
+            std::cout << "Validation successful\n";
+        }
+        return 0;
+    } catch(std::exception& e) {
+        std::cerr << "ERROR Input fails schema check: " << e.what() << "\n";
+        return 1;
+    }
+}