diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM.py
index 49e4c2023f51aba7a0d6cf23ecc86d7ea37275e4..ade3d256df3a2f4263c117831ef8d1f71d1f579d 100755
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM.py
@@ -2,6 +2,21 @@
 
 print "hello from MaDQM.py"
 
+def getAndSetConfig(dbconnection, MCK, runtype, slice_name, reco_step, *vargs, **kwargs):
+  # here we check which entry there is in the DB for this slice
+  config_thisFunction = readSliceConfigFromDB(dbconnection, MCK, runtype, slice_name, reco_step) 
+  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
+  if config_thisFunction != None:
+    for (toolname, toolconfig) in config_thisFunction['Config']:
+      # Ben: if the tool is not loaded into ToolSvc then we best load it now!
+      if hasattr(ToolSvc,toolname) == False:
+        exec "from %s import %s" % (config_thisFunction["PkgName"],config_thisFunction["CreatorName"])
+        exec "%s()" % (config_thisFunction["CreatorName"])
+      # modify defaults according to config dictionary
+      tool = getattr(ToolSvc,toolname)
+      for confattr,confvalue in toolconfig.items():
+        tool.__setattr__(confattr, confvalue)
+
 # read monitoring configureation from the database
 # calling readSliceConfigFromDB(dbconnection, MCK, runtype, slicename, recostep) returns monitoring configuration
 # dbconnection is 'oracle'
@@ -15,136 +30,58 @@ print "hello from MaDQM.py"
 
 include("TrigHLTMonitoring/readConfigFromDB.py")
 
-if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0Raw':
-  # here we check which entry there is in the DB for this slice
-  config_caloraw = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'CALORAW' , 'RAW') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_caloraw != None:
-    for (toolname, toolconfig) in config_caloraw['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+# if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0Raw':
+#   # here we check which entry there is in the DB for this slice
+#   config_caloraw = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'CALORAW' , 'RAW') 
+#   # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
+#   if config_caloraw != None:
+#     for (toolname, toolconfig) in config_caloraw['Config']:
+#       #print "BENtest1:"
+#       #print "ToolSvc._Configurable__children",ToolSvc._Configurable__children
+#       #print "config_caloraw[\"PkgName\"]",config_caloraw["PkgName"]
+#       #print "config_caloraw[\"CreatorName\"]",config_caloraw["CreatorName"]
+#       exec "from %s import %s" % (config_caloraw["PkgName"],config_caloraw["CreatorName"])
+#       exec "%s()" % (config_caloraw["CreatorName"])
+#       #print "BENtest2:"
+#       #print "ToolSvc._Configurable__children",ToolSvc._Configurable__children
+#       #print "hasattr(ToolSvc,\"HLTCalo\")",hasattr(ToolSvc,"HLTCalo")
+#       # modify defaults according to config dictionary
+#       tool = getattr(ToolSvc,toolname)
+#       for confattr,confvalue in toolconfig.items():
+#         tool.__setattr__(confattr, confvalue)
 
+if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0Raw':
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'CALORAW' , 'RAW')
 
 if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0ESD':
-  # here we check which entry there is in the DB for this slice
-  config_caloesd = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'CALOESD' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_caloesd != None:
-    for (toolname, toolconfig) in config_caloesd['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'CALOESD' , 'ESD')
 
 if HLTMonFlags.doTau:
-  # here we check which entry there is in the DB for this slice
-  config_tau = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'TAU' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_tau != None:
-    for (toolname, toolconfig) in config_tau['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'TAU' , 'ESD')
 
 if HLTMonFlags.doBjet:
-  # here we check which entry there is in the DB for this slice
-  config_bjet = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'BJET' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_bjet != None:
-    for (toolname, toolconfig) in config_bjet['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'BJET' , 'ESD')
 
 if HLTMonFlags.doBphys:
-  # here we check which entry there is in the DB for this slice
-  config_bphys = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'BPHYS' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_bphys != None:
-    for (toolname, toolconfig) in config_bphys['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'BPHYS' , 'ESD')
 
 if HLTMonFlags.doMET:
-  # here we check which entry there is in the DB for this slice
-  config_met = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'MET' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_met != None:
-    for (toolname, toolconfig) in config_met['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'MET' , 'ESD')
 
 if HLTMonFlags.doJet:
-  # here we check which entry there is in the DB for this slice
-  config_jet = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'JET' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_jet != None:
-    for (toolname, toolconfig) in config_jet['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'JET' , 'ESD')
 
 if HLTMonFlags.doEgamma:
-  # here we check which entry there is in the DB for this slice
-  config_egamma = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'EGAMMA' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_egamma != None:
-    for (toolname, toolconfig) in config_egamma['Config']:
-        # modify defaults according to config dictionary
-       tool = getattr(ToolSvc,toolname)
-       for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'EGAMMA' , 'ESD')
 
 if HLTMonFlags.doMuon:
-  # here we check which entry there is in the DB for this slice
-  config_muon = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'MUON' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_muon != None:
-    for (toolname, toolconfig) in config_muon['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'MUON' , 'ESD')
 
 if HLTMonFlags.doIDtrk:
-  # here we check which entry there is in the DB for this slice
-  config_idtrk = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'IDTRK' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_idtrk != None:
-    for (toolname, toolconfig) in config_idtrk['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'IDTRK' , 'ESD')
 
 if HLTMonFlags.doMinBias:
-  # here we check which entry there is in the DB for this slice
-  config_minbias = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'MINBIAS' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_minbias != None:
-    for (toolname, toolconfig) in config_minbias['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'MINBIAS' , 'ESD')
 
 if HLTMonFlags.doIDJpsiMon:
-  # here we check which entry there is in the DB for this slice
-  config_idjpsi = readSliceConfigFromDB('oracle','ACTIVE_KEY','Physics', 'IDJPSI' , 'ESD') 
-  # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
-  if config_idjpsi != None:
-    for (toolname, toolconfig) in config_idjpsi['Config']:
-        # modify defaults according to config dictionary
-        tool = getattr(ToolSvc,toolname)
-        for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
-
+  getAndSetConfig('oracle','ACTIVE_KEY','Physics', 'IDJPSI' , 'ESD')
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM1.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM1.py
index a4aa4edc86b1c912a1235261388566d36aae7168..69c340ee7012e61efb8fa957c0a5816e7b0218af 100755
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM1.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/MaDQM1.py
@@ -24,10 +24,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_caloraw != None:
       for (toolname, toolconfig) in config_caloraw['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
 
   if HLTMonFlags.doCalo and DQMonFlags.monManEnvironment == 'tier0ESD':
@@ -36,10 +36,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_caloesd != None:
       for (toolname, toolconfig) in config_caloesd['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doTau:
     # here we check which entry there is in the DB for this slice
@@ -47,10 +47,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_tau != None:
       for (toolname, toolconfig) in config_tau['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doBjet:
     # here we check which entry there is in the DB for this slice
@@ -58,10 +58,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_bjet != None:
       for (toolname, toolconfig) in config_bjet['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doBphys:
     # here we check which entry there is in the DB for this slice
@@ -69,10 +69,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_bphys != None:
       for (toolname, toolconfig) in config_bphys['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doMET:
     # here we check which entry there is in the DB for this slice
@@ -80,10 +80,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_met != None:
       for (toolname, toolconfig) in config_met['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doJet:
     # here we check which entry there is in the DB for this slice
@@ -91,10 +91,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_jet != None:
       for (toolname, toolconfig) in config_jet['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doEgamma:
     # here we check which entry there is in the DB for this slice
@@ -102,10 +102,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_egamma != None:
       for (toolname, toolconfig) in config_egamma['Config']:
-          # modify defaults according to config dictionary
-         tool = getattr(ToolSvc,toolname)
-         for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doMuon:
     # here we check which entry there is in the DB for this slice
@@ -113,10 +113,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_muon != None:
       for (toolname, toolconfig) in config_muon['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doIDtrk:
     # here we check which entry there is in the DB for this slice
@@ -124,10 +124,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_idtrk != None:
       for (toolname, toolconfig) in config_idtrk['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-            tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doMinBias:
     # here we check which entry there is in the DB for this slice
@@ -135,10 +135,10 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_minbias != None:
       for (toolname, toolconfig) in config_minbias['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
   if HLTMonFlags.doIDJpsiMon:
     # here we check which entry there is in the DB for this slice
@@ -146,8 +146,8 @@ def configureMonitoring(MCK_number):
     # if entry is not None then we take monitoring config from DB, else it is taken from the release in a standard way 
     if config_idjpsi != None:
       for (toolname, toolconfig) in config_idjpsi['Config']:
-          # modify defaults according to config dictionary
-          tool = getattr(ToolSvc,toolname)
-          for confattr,confvalue in toolconfig.items():
-              tool.__setattr__(confattr, confvalue)
+        # modify defaults according to config dictionary
+        tool = getattr(ToolSvc,toolname)
+        for confattr,confvalue in toolconfig.items():
+          tool.__setattr__(confattr, confvalue)
 
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/create_schema.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/create_schema.sql
new file mode 100644
index 0000000000000000000000000000000000000000..f7e5572debdf877b4ee0d98edcb9fc547e34ddef
--- /dev/null
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/create_schema.sql
@@ -0,0 +1,70 @@
+CREATE SEQUENCE seq_mck_table_id;
+
+CREATE TABLE mck_table (
+    mck_id			  NUMBER(10),
+    mck_default			  CHAR,
+    mck_athena_version            VARCHAR2(20),
+    mck_creator          	  VARCHAR2(100),
+    mck_creation_date    	  TIMESTAMP,
+    mck_comment          	  VARCHAR2(1000),
+    CONSTRAINT        	   	  mck_pk		       PRIMARY KEY(mck_id),
+    CONSTRAINT           	  mck_default_nn	       CHECK(mck_default IN (0,1)),
+    CONSTRAINT           	  mck_athena_version_nn        CHECK(mck_athena_version IS NOT NULL),
+    CONSTRAINT           	  mck_creator_nn               CHECK(mck_creator IS NOT NULL)
+);
+
+CREATE SEQUENCE seq_smck_table_id;
+
+CREATE TABLE smck_table (
+    smck_id			  NUMBER(10),
+    smck_slice_type      	  VARCHAR2(50),
+    smck_tool_type      	  VARCHAR2(50),
+    smck_tool_patch_version   	  VARCHAR2(50),
+    smck_processing_step 	  VARCHAR2(10),
+    smck_config          	  CLOB,
+    smck_config_hash          	  VARCHAR2(128),
+    smck_default         	  CHAR,
+    smck_athena_version  	  VARCHAR2(20),
+    smck_creator         	  VARCHAR2(100),
+    smck_creation_date   	  TIMESTAMP,
+    smck_comment         	  VARCHAR2(1000),
+    CONSTRAINT           	  smck_pk		       PRIMARY KEY(smck_id),
+    CONSTRAINT           	  smck_slice_type_nn           CHECK(smck_slice_type IS NOT NULL),
+    CONSTRAINT           	  smck_tool_type_nn            CHECK(smck_tool_type IS NOT NULL),
+    CONSTRAINT           	  smck_tool_patch_version_u    UNIQUE(smck_tool_patch_version),
+    CONSTRAINT           	  smck_tool_patch_version_nn   CHECK(smck_tool_patch_version IS NOT NULL),
+    CONSTRAINT           	  smck_processing_step_nn      CHECK(smck_processing_step IS NOT NULL),
+    CONSTRAINT           	  smck_config_nn               CHECK(smck_config IS NOT NULL),
+    CONSTRAINT           	  smck_config_hash_u           UNIQUE(smck_tool_type,smck_processing_step,smck_config_hash),
+    CONSTRAINT           	  smck_config_hash_nn          CHECK(smck_config_hash IS NOT NULL),
+    CONSTRAINT           	  smck_default_nn              CHECK(smck_default IN (0,1)),
+    CONSTRAINT           	  smck_athena_version_nn       CHECK(smck_athena_version IS NOT NULL),
+    CONSTRAINT           	  smck_creator_nn              CHECK(smck_creator IS NOT NULL)
+);
+
+CREATE TABLE mck_to_smck_link (
+    link_mck                      NUMBER(10),
+    link_smck            	  NUMBER(10),
+    CONSTRAINT           	  link_u		       UNIQUE(link_mck,link_smck),
+    CONSTRAINT           	  link_mck_nn                  CHECK(link_mck IS NOT NULL),
+    CONSTRAINT           	  link_mck_fk                  FOREIGN KEY(link_mck) REFERENCES mck_table(mck_id),
+    CONSTRAINT           	  link_smck_nn                 CHECK(link_smck IS NOT NULL),
+    CONSTRAINT       	 	  link_smck_fk                 FOREIGN KEY(link_smck) REFERENCES smck_table(smck_id)
+);
+
+CREATE TABLE mck_to_smk_link (
+    smk_link_mck                  NUMBER(10),
+    smk_link_smk            	  NUMBER(10),
+    smk_link_creator          	  VARCHAR2(100),
+    smk_link_creation_date    	  TIMESTAMP,
+    smk_link_comment          	  VARCHAR2(1000),
+    CONSTRAINT           	  smk_link_mck_nn              CHECK(smk_link_mck IS NOT NULL),
+    CONSTRAINT           	  smk_link_mck_fk              FOREIGN KEY(smk_link_mck) REFERENCES mck_table(mck_id),
+    CONSTRAINT           	  smk_link_smk_u	       UNIQUE(smk_link_smk),
+    CONSTRAINT           	  smk_link_smk_nn              CHECK(smk_link_smk IS NOT NULL),
+    CONSTRAINT           	  smk_link_creator_nn          CHECK(smk_link_creator IS NOT NULL)
+);
+
+CREATE INDEX mck_default_index ON mck_table(mck_default);
+CREATE INDEX link_mck_index    ON mck_to_smck_link(link_mck);
+CREATE INDEX link_smck_index   ON mck_to_smck_link(link_smck);
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/del.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/del.sql
index eac11cc0d7beacf21d686c3a78faf0d5cdfa6c9a..11cdad81b3ff308c1347619e41836a133a55ec21 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/del.sql
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/del.sql
@@ -1,28 +1,22 @@
-DROP INDEX master_id_ind;
-DROP INDEX type_id_ind;
-DROP INDEX type_id_ind_1;
-DROP INDEX type2sig_id_ind;
-DROP INDEX type_name_ind;
---DROP INDEX sig_config_id_ind1;
-DROP INDEX sig_config_name_ind;
-DROP INDEX sig_config_recostep_ind;
---DROP INDEX sig_config_data_ind;
---DROP INDEX sig_config_script_ind;
-DROP INDEX sig_config_data_hash_ind;
-DROP INDEX sig_config_script_hash_ind;
-ALTER TABLE mt2type DROP CONSTRAINT mt2type_fk_mt;
-ALTER TABLE mt2type DROP CONSTRAINT mt2type_fk_type;
-ALTER TABLE type2sig DROP CONSTRAINT type2sig_fk_type;
-ALTER TABLE type2sig DROP CONSTRAINT type2sig_fk_sig;
-ALTER TABLE master_table DROP CONSTRAINT mt_pk;
-ALTER TABLE type_table DROP CONSTRAINT type_pk1;
-ALTER TABLE sig_config DROP CONSTRAINT sc_pk;  
-ALTER TABLE mt2type DROP CONSTRAINT mt2type_uc;
-ALTER TABLE mt2type DROP CONSTRAINT master_id_NN;
-ALTER TABLE mt2type DROP CONSTRAINT type_id_NN1;
-ALTER TABLE type2sig DROP CONSTRAINT type2sig_uc;
-DROP TABLE master_table;
-DROP TABLE type_table;
-DROP TABLE sig_config;
-DROP TABLE mt2type;
-DROP TABLE type2sig; 
+DROP INDEX mck_id_index;
+DROP INDEX mck_default_index;
+DROP INDEX smck_id_index;
+DROP INDEX smck_config_hash_index;
+DROP INDEX link_mck_index;
+DROP INDEX link_smck_index;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_smck_fk;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_smck_nn;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_mck_fk;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_mck_nn;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_u;
+ALTER TABLE smck_table DROP CONSTRAINT smck_config_hash_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_config_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_slice_version_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_slice_version_u;
+ALTER TABLE smck_table DROP CONSTRAINT smck_slice_type_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_pk;
+ALTER TABLE mck_table DROP CONSTRAINT mck_default_nn;
+ALTER TABLE mck_table DROP CONSTRAINT mck_pk;
+DROP TABLE mck_to_smck_link;
+DROP TABLE smck_table;
+DROP TABLE mck_table;
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/delete_schema.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/delete_schema.sql
new file mode 100644
index 0000000000000000000000000000000000000000..bd2c410bc6c61746a4c7e442607440d7441b5e01
--- /dev/null
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/delete_schema.sql
@@ -0,0 +1,38 @@
+DROP INDEX mck_default_index;
+DROP INDEX link_mck_index;
+DROP INDEX link_smck_index;
+
+ALTER TABLE mck_to_smk_link DROP CONSTRAINT smk_link_creator_nn;
+ALTER TABLE mck_to_smk_link DROP CONSTRAINT smk_link_smk_nn;
+ALTER TABLE mck_to_smk_link DROP CONSTRAINT smk_link_smk_u;
+ALTER TABLE mck_to_smk_link DROP CONSTRAINT smk_link_mck_fk;
+ALTER TABLE mck_to_smk_link DROP CONSTRAINT smk_link_mck_nn;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_smck_fk;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_smck_nn;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_mck_fk;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_mck_nn;
+ALTER TABLE mck_to_smck_link DROP CONSTRAINT link_u;
+ALTER TABLE smck_table DROP CONSTRAINT smck_creator_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_athena_version_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_default_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_config_hash_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_config_hash_u;
+ALTER TABLE smck_table DROP CONSTRAINT smck_config_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_processing_step_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_tool_patch_version_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_tool_patch_version_u;
+ALTER TABLE smck_table DROP CONSTRAINT smck_tool_type_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_slice_type_nn;
+ALTER TABLE smck_table DROP CONSTRAINT smck_pk;
+ALTER TABLE mck_table DROP CONSTRAINT mck_creator_nn;
+ALTER TABLE mck_table DROP CONSTRAINT mck_athena_version_nn;
+ALTER TABLE mck_table DROP CONSTRAINT mck_default_nn;
+ALTER TABLE mck_table DROP CONSTRAINT mck_pk;
+
+DROP SEQUENCE seq_mck_table_id;
+DROP SEQUENCE seq_smck_table_id;
+
+DROP TABLE mck_to_smk_link;
+DROP TABLE mck_to_smck_link;
+DROP TABLE smck_table;
+DROP TABLE mck_table;
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Cosmics_dictionary.py
index b403387f0a0e9c105c044f815118f936fd0abb2b..1ff4fdb6ccc2c4d53ef3e5f1947735f83d6242ae 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Cosmics_dictionary.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Cosmics_dictionary.py
@@ -5,8 +5,7 @@ RunType='Cosmics'
 Recostep='ESD'
 MonitoringTriggers= [ " ",]                        
 Config=[]
-Config+= [("HLTIDpvtx", {"OnlineEfficiancyRangeCutY" : 0.02,}), ("HLTIDtrkDump", {"MinSiHits" : 7,}),
-("tidatool", {}), ("MyAtlasExtrapolator", {}), ("MyCaloExtrapolatorTool", {}), ("HLTIDZeeTag", {})
+Config+= [("MyAtlasExtrapolator", {}),
 ]
 PkgName = "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig"
 CreatorName = "TrigIDtrkMonitoringTool"
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Physics_dictionary.py
index c8caa4cb464d48a91cbe0f6d4889716dc9cfd0cc..cf2683999840413daae2d8aa68648ea95b80d316 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Physics_dictionary.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Physics_dictionary.py
@@ -3,10 +3,9 @@
 Slice='IDTRK'
 RunType='Physics'
 Recostep='ESD'
-MonitoringTriggers= [ " ",]                        
+MonitoringTriggers= [ "BENS_MADE-UP_TRIGGER_NAME",]
 Config=[]
-Config+= [("HLTIDpvtx", {"OnlineEfficiancyRangeCutY" : 0.02,}), ("HLTIDtrkDump", {"MinSiHits" : 7,}),
-("tidatool", {}), ("MyAtlasExtrapolator", {}), ("MyCaloExtrapolatorTool", {}), ("HLTIDZeeTag", {})
+Config+= [("MyAtlasExtrapolator", {}),
 ]
 PkgName = "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig"
 CreatorName = "TrigIDtrkMonitoringTool"
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Standby_dictionary.py
index e95153ebe1cec7101aa10f1487e0200be5626567..94d14e1849d9b8a055f00716dae8aad5bfae55ec 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Standby_dictionary.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/IDTRK_Standby_dictionary.py
@@ -5,8 +5,7 @@ RunType='Standby'
 Recostep='ESD'
 MonitoringTriggers= [ " ",]                        
 Config=[]
-Config+= [("HLTIDpvtx", {"OnlineEfficiancyRangeCutY" : 0.02,}), ("HLTIDtrkDump", {"MinSiHits" : 7,}),
-("tidatool", {}), ("MyAtlasExtrapolator", {}), ("MyCaloExtrapolatorTool", {}), ("HLTIDZeeTag", {})
+Config+= [("MyAtlasExtrapolator", {}),
 ]
 PkgName = "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig"
 CreatorName = "TrigIDtrkMonitoringTool"
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Cosmics_dictionary.py
index a5717b472c9c1cf31ab942818d2c5da505ac7c29..42bb105ac97b2cc344d89d288d6ad9468dfbe139 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Cosmics_dictionary.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Cosmics_dictionary.py
@@ -5,7 +5,7 @@ RunType='Cosmics'
 Recostep='ESD'
 MonitoringTriggers= [" ",]                        
 Config=[]
-Config+= [("HLTMETMon", {}), ("HLTMETMon_FEB", {}), ("HLTMETMon_topocl", {}),]
+Config+= [("HLTMETMon", {}),]
 PkgName = "TrigMETMonitoring.TrigMETMonitoringConfig"
 CreatorName = "HLTMETMonitoringTool"
 Script = "print 'Test script'"
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Physics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Physics_dictionary.py
index fc2f1d96d60f1cc5389313bc4502213291e39048..fdc080df2dc9154ca7ca12931091c67b090fa651 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Physics_dictionary.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Physics_dictionary.py
@@ -5,7 +5,7 @@ RunType='Physics'
 Recostep='ESD'
 MonitoringTriggers= [ " ",]                        
 Config=[]
-Config+= [("HLTMETMon", {}), ("HLTMETMon_FEB", {}), ("HLTMETMon_topocl", {}),]
+Config+= [("HLTMETMon", {}),]
 PkgName = "TrigMETMonitoring.TrigMETMonitoringConfig"
 CreatorName = "HLTMETMonitoringTool"
 Script = "print 'Test script'"
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Standby_dictionary.py
index c4ce4e72235c5589eee310bceb18d5d55d449569..3c1ffd117b4f3fa8c0011d956f63ff38b253f7f1 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Standby_dictionary.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MET_Standby_dictionary.py
@@ -5,7 +5,7 @@ RunType='Standby'
 Recostep='ESD'
 MonitoringTriggers= [ " ",]                        
 Config=[]
-Config+= [("HLTMETMon", {}), ("HLTMETMon_FEB", {}), ("HLTMETMon_topocl", {}),]
+Config+= [("HLTMETMon", {}),]
 PkgName = "TrigMETMonitoring.TrigMETMonitoringConfig"
 CreatorName = "HLTMETMonitoringTool"
 Script = "print 'Test script'"
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Cosmics_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Cosmics_dictionary.py
index 74583d364ec2ca794156596c7254d9b234a940c6..6a4098129e29bc9d96f0dcd57ccdfcf82fd8e1d3 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Cosmics_dictionary.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Cosmics_dictionary.py
@@ -1,2 +1,11 @@
 # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
 
+Slice='MUON'
+RunType='Cosmics'
+Recostep='ESD'
+MonitoringTriggers = [ " ",]                        
+Config = []
+Config += [("HLTMuonMon", {"ZTPPtCone20RelCut" : 0.15,}),]
+PkgName = "TrigMuonMonitoring.TrigMuonMonitoringConfig"
+CreatorName = "TrigMuonMonitoringTool"
+Script = "print 'Test script'"
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Standby_dictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Standby_dictionary.py
index fb371954d109441dbb60525991778e35647f9173..e1416af5805b72f40f03e768b7fe8c45d1212915 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Standby_dictionary.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/dictionaries/MUON_Standby_dictionary.py
@@ -1,3 +1,11 @@
 # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
 
-
+Slice='MUON'
+RunType='Standby'
+Recostep='ESD'
+MonitoringTriggers = [ " ",]                        
+Config = []
+Config += [("HLTMuonMon", {"ZTPPtCone20RelCut" : 0.15,}),]
+PkgName = "TrigMuonMonitoring.TrigMuonMonitoringConfig"
+CreatorName = "TrigMuonMonitoringTool"
+Script = "print 'Test script'"
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readDictionary.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readDictionary.py
index 317353237324c6ffa6c310458eed13e22bfb6f40..9e4dfe172363abafa2641605b6283fc0a805777d 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readDictionary.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/readDictionary.py
@@ -20,6 +20,8 @@ def readDictionary(*vargs, **kwargs):
     ConfigScriptHashMissing=[]
     slice_match_pattern_list=[]
 
+
+
     for sl_index, sl in enumerate(slice_list):
         for rtl in run_type_list:
            dictpath = "dictionaries/"
@@ -33,7 +35,7 @@ def readDictionary(*vargs, **kwargs):
            #print "here", my_dictionary_file
            #print "non zero file ", is_non_zero_file(my_dictionary_file)
            my_dictionary=sl+"_"+rtl+"_"+"dictionary"
-           tmp = "dictionaries."+my_dictionary 
+           #tmp = "dictionaries."+my_dictionary 
            my_dict=sl+"_"+rtl+"_"+"dict"
            #print "la la ", my_dictionary                                                                                                           
            #print my_dict                                                                                                                           
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/schema.sql b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/schema.sql
index 3fd3f4fd6957132a1c1200950ecc07b2d9439419..12552f7a6be8a58612ee1bff8834981434ac8a25 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/schema.sql
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/schema.sql
@@ -1,55 +1,40 @@
-CREATE TABLE master_table (
-    mt_id   NUMBER(10),
-    mt_name  VARCHAR2(200),
-    mt_comment VARCHAR2(2000),
-    CONSTRAINT     mt_pk   PRIMARY KEY(mt_id)
+CREATE TABLE mck_table (
+    mck_id   	       NUMBER(10),
+    mck_default        BOOLEAN,
+    mck_creator        VARCHAR2(100),
+    mck_creation_date  VARCHAR2(100),
+    mck_comment        VARCHAR2(1000),
+    CONSTRAINT         mck_pk                PRIMARY KEY(mck_id),
+    CONSTRAINT         mck_default_nn        CHECK(mck_default IS NOT NULL) 
 );
-CREATE TABLE type_table (
-    tt_id             NUMBER(10),
-    tt_name  VARCHAR2(50),
-    CONSTRAINT     type_pk1   PRIMARY KEY(tt_id)
+CREATE TABLE smck_table (
+    smck_id            NUMBER(10),
+    smck_slice_type    VARCHAR2(10),
+    smck_slice_version VARCHAR2(10),
+    smck_config        CLOB,
+    smck_config_hash   VARCHAR2(200),
+    smck_creator       VARCHAR2(100),
+    smck_creation_date VARCHAR2(100),
+    smck_comment       VARCHAR2(1000),
+    CONSTRAINT         smck_pk               PRIMARY KEY(smck_id),
+    CONSTRAINT         smck_slice_type_nn    CHECK(smck_slice_type IS NOT NULL),
+    CONSTRAINT         smck_slice_version_u  UNIQUE(smck_slice_version),
+    CONSTRAINT         smck_slice_version_nn CHECK(smck_slice_version IS NOT NULL),
+    CONSTRAINT         smck_config_nn        CHECK(smck_config IS NOT NULL),
+    CONSTRAINT         smck_config_hash_nn   CHECK(smck_config_hash IS NOT NULL),
 );
-CREATE TABLE sig_config (
-    sc_id             NUMBER(10),
-    sc_name  VARCHAR2(50),
-    recostep  VARCHAR2(50),
---    sc_data  VARCHAR2(500),
---    sc_script  VARCHAR2(500),
-    sc_data        CLOB,
-    sc_script      CLOB,
-    data_hash VARCHAR2(200),
-    script_hash VARCHAR2(200),
-    CONSTRAINT  sc_pk   PRIMARY KEY(sc_id)
+CREATE TABLE mck_to_smck_link (
+    link_mck           NUMBER(10),
+    link_smck          NUMBER(10),
+    CONSTRAINT         link_u                UNIQUE(link_mck,link_smck),
+    CONSTRAINT         link_mck_nn           CHECK(link_mck IS NOT NULL),
+    CONSTRAINT         link_mck_fk           FOREIGN KEY(link_mck) REFERENCES mck_table(mck_id),
+    CONSTRAINT         link_smck_nn          CHECK(link_smck IS NOT NULL),
+    CONSTRAINT         link_smck_fk          FOREIGN KEY(link_smck) REFERENCES smck_table(smck_id),
 );
-CREATE TABLE mt2type (
-    master_id      NUMBER(10),
-    type_id1     NUMBER(10),
-    CONSTRAINT mt2type_uc UNIQUE (master_id,type_id1),
-    CONSTRAINT  mt2type_fk_mt     FOREIGN KEY (master_id) REFERENCES master_table(mt_id),
-    CONSTRAINT  mt2type_fk_type    FOREIGN KEY (type_id1) REFERENCES type_table(tt_id),
-    CONSTRAINT  master_id_NN  CHECK ( master_id IS NOT NULL),
-    CONSTRAINT  type_id_NN1 CHECK ( type_id1 IS NOT NULL)
-);
-CREATE TABLE type2sig (
-    type_id2        NUMBER(10),
-    sig_config_id      NUMBER(10),
-    CONSTRAINT type2sig_uc UNIQUE (type_id2,sig_config_id),
-    CONSTRAINT  type2sig_fk_type     FOREIGN KEY (type_id2) REFERENCES type_table(tt_id),
-    CONSTRAINT  type2sig_fk_sig    FOREIGN KEY (sig_config_id) REFERENCES sig_config(sc_id),
-    CONSTRAINT  type_id_NN  CHECK ( type_id2 IS NOT NULL),
-    CONSTRAINT  sig_config_id_NN CHECK ( sig_config_id IS NOT NULL)
-);
-CREATE INDEX master_id_ind  ON mt2type(master_id);
-CREATE INDEX type_id_ind ON mt2type(type_id1);
-CREATE INDEX type_id_ind_1  ON type2sig(type_id2);
-CREATE INDEX type2sig_id_ind ON type2sig(sig_config_id);
-CREATE INDEX type_name_ind ON type_table(tt_name);
---CREATE INDEX sig_config_id_ind1 ON sig_config(sc_id);
-CREATE INDEX sig_config_name_ind ON sig_config(sc_name);
-CREATE INDEX sig_config_recostep_ind ON sig_config(recostep);
---CREATE INDEX sig_config_data_ind ON sig_config(sc_data);
---CREATE INDEX sig_config_script_ind ON sig_config(sc_script);
-CREATE INDEX sig_config_data_hash_ind ON sig_config(data_hash);
-CREATE INDEX sig_config_script_hash_ind ON sig_config(script_hash);
-
-
+CREATE INDEX mck_id_index ON mck_table(mck_id);
+CREATE INDEX mck_default_index ON mck_table(mck_default);
+CREATE INDEX smck_id_index ON smck_table(smck_id);
+CREATE INDEX smck_config_hash_index ON smck_table(smck_config_hash);
+CREATE INDEX link_mck_index ON mck_to_smck_link(link_mck);
+CREATE INDEX link_smck_index ON mck_to_smck_link(link_smck);
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/upload.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/upload.py
index a59d684588d6e0991c5fae2691464bf14705ff76..ced9d1d2588b029b6971d4fa1431bd1b352ae941 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/database/upload.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/database/upload.py
@@ -34,7 +34,7 @@ def upload(MakeActive='False'):
   # get active key MCK
 
   activeKeyMCK = getActiveKeyMCK('oracle')
-  #print "active key MCK = ", activeKeyMCK
+  print "active key MCK = ", activeKeyMCK
   
   # if OVERWRITE = 1 then we should create a new config entry in the DB or take the existing one but not the active one
   OVERWRITE=0
@@ -43,15 +43,20 @@ def upload(MakeActive='False'):
   index_list=[]
   dbConfigId_check=[]
   
+  #print "ConfigNameDict", ConfigNameDict
+  #print "enumerate(ConfigNameDict)", enumerate(ConfigNameDict)
+
   for index,conf_name in enumerate(ConfigNameDict):
       #print "#####################################"
+      #print "index", index
       #print " dict slice, RunType ", conf_name, ConfigRunTypeDict[index]
       #print "config code from dict ", ConfigCodeDict[index]
       #print "dict hash ", ConfigCodeHashDict[index]
+      #print "ConfigRecostepDict[index] ", ConfigRecostepDict[index]
 
       # get active key conf and compare hash of config code:
-
-      dbConfigId, dbConfigCode, dbConfigScript,dbConfigCodeHash,dbConfigScriptHash=getConfigCodeForMCK('oracle', activeKeyMCK, ConfigRunTypeDict[index], conf_name, ConfigRecostepDict[index])
+      #print "'oracle', activeKeyMCK, ConfigRunTypeDict[index], conf_name, ConfigRecostepDict[index]",'oracle', activeKeyMCK, ConfigRunTypeDict[index], conf_name, ConfigRecostepDict[index]
+      dbConfigId, dbConfigCode, dbConfigScript, dbConfigCodeHash, dbConfigScriptHash=getConfigCodeForMCK('oracle', activeKeyMCK, ConfigRunTypeDict[index], conf_name, ConfigRecostepDict[index])
   
       dbConfigId_check.append(dbConfigId)
 
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/HLTMonFlags.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/HLTMonFlags.py
index 3afdaf8c13cfac1773c36ffe646e2e5bd31589c7..d941ab08e84a90281f82a0d95f97f9a47f3cd6e1 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/HLTMonFlags.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/HLTMonFlags.py
@@ -22,6 +22,20 @@ class doGeneral(JobProperty):
 	StoredValue=True
 list+=[doGeneral]
 
+class doMaM(JobProperty):
+	""" Switch for menu-aware monitoring """
+	statusOn=True
+	allowedTypes=['bool']
+	StoredValue=False
+list+=[doMaM]
+
+class MCK(JobProperty):
+	""" Monitoring Configuration Key (MCK) for menu-aware monitoring """
+	statusOn=True
+	allowedTypes=['int']
+	StoredValue=-1
+list+=[MCK]
+
 class doEgamma(JobProperty):
 	""" Egamma switch for monitoring """
 	statusOn=True
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoring.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoring.py
new file mode 100644
index 0000000000000000000000000000000000000000..2119ba36119af85c99aea1f1aebe28bea8ccc25e
--- /dev/null
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/MenuAwareMonitoring.py
@@ -0,0 +1,2093 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+#
+# Author: Ben Smart (bsmart@cern.ch)
+#
+
+import sys
+# import oracle interaction class
+from TrigHLTMonitoring.OracleInterface import OracleInterface
+# import tool interrogator
+from TrigHLTMonitoring.ToolInterrogator import ToolInterrogator
+# needed to access the tools in ToolSvc
+from AthenaCommon.AppMgr import ToolSvc
+# use rec to find out if we are making ESDs and/or AODs
+from RecExConfig.RecFlags import rec
+# import subprocess. Required to get Athena version (there must be a better way!)
+import subprocess
+# import hash library for generating smck config hashes
+import hashlib
+# import json for converting configuration dictionaries into strings for hashing
+import json
+
+# all Menu-Aware Monitoring stuff in one class
+# it uses OracleInterface to talk to the Oracle database
+# and ToolInterrogator to talk to the local tools
+class MenuAwareMonitoring:
+    """Menu-aware Monitoring class.
+    Provides functionality for the upload/download of trigger-monitoring tool configurations to an Oracle database,
+    as well as reading in of configurations from locally running tools, and application of configurations to these tools."""
+
+
+    def __init__(self):
+        """Setup Menu-aware Monitoring,
+        find locally running trigger-monitoring tools,
+        connect to the Oracle database,
+        and get the current default from the database (if it exists)."""
+
+        # MaM code version
+        self.version = 1.1
+
+        # flag for setting whether to print out anything to screen or not
+        self.print_output = True
+
+        # flag so that diff instruction are only printed once
+        self.firstdiff = True
+
+        # create oracle interaction object
+        self.oi = OracleInterface()
+
+        # create tool interrogator object
+        self.ti = ToolInterrogator()
+
+        # holder for current local Athena version
+        self.current_athena_version = ""
+        self.__get_athena_version__()
+
+        # holder for current user
+        self.current_user = ""
+        self.__get_current_user__()
+
+        # holder for default global_info
+        self.default_global_info = {}
+
+        # holder for current local global_info
+        self.local_global_info = {}
+        self.local_global_info['MONITORING_TOOL_DICT'] = {}
+
+        # pointer to local tool info
+        self.local = self.local_global_info['MONITORING_TOOL_DICT']
+
+        # automatically fill current local tool info
+        self.get_current_local_info()
+
+        # flag to record if we have connected to Oracle
+        self.connected_to_oracle = False
+
+        # now connect to oracle
+        self.__connect_to_oracle__()
+
+        # fill default global info (if available)
+        self.get_default_from_db()
+
+        # print guide for user if this is an interactive session
+        if self.__is_session_interactive__():
+
+            # print guide for user
+            # TODO
+            print ""
+            print "Author of this code: Ben Smart (bsmart@cern.ch)"
+            print "This is Menu-aware monitoring version",self.version
+            print ""
+
+
+    def __connect_to_oracle__(self):
+        "Connect to the Oracle server."
+
+        # if we are already connected
+        if self.connected_to_oracle:
+
+            # info for user
+            print "We are already connected to the Oracle database"
+
+        # else if we are not
+        else:
+        
+            # info for user
+            print "We are now connecting to the Oracle database"
+
+            # try catch
+            try:
+
+                # connect to oracle
+                self.oi.connect_to_oracle()
+
+                # flag to record that we have connected
+                self.connected_to_oracle = True
+
+            except:
+
+                # info for user
+                print "Error while connecting to Oracle database. Exiting."
+            
+                # exit, otherwise the program will crash later when Oracle database features are required
+                sys.exit(1)
+
+
+    def __disconnect_from_oracle__(self):
+        "Disconnect from the Oracle server."
+
+        # if we are connected to Oracle
+        if self.connected_to_oracle:
+
+            # info for user
+            print "We are now disconnecting from the Oracle database"
+
+            # disconnect from oracle
+            self.oi.disconnect_from_oracle()
+
+
+    def __quiet_output__(self):
+        """Print no output.
+        This can be overwritten for a single function by passing print_output_here=True to that function."""
+
+        # print nothing to screen
+        self.print_output = False
+
+
+    def __verbose_output__(self):
+        """Print all output (default).
+        This can be overwritten for a single function by passing print_output_here=False to that function."""
+
+        # print output to screen by default
+        self.print_output = True
+
+
+    def __get_current_user__(self):
+        "Get the current user."
+
+        # get the current user
+        self.current_user = subprocess.check_output("echo $USER", shell=True).replace("\n","")
+
+
+    def __get_athena_version__(self):
+        "Get the current Athena version."
+
+        # get the current local Athena version (there must be a better way!)
+        self.current_athena_version = subprocess.check_output("echo $AtlasVersion", shell=True).replace("\n","")
+
+
+    def __unicode_to_str__(self,input1=""):
+        "Input a unicode string, list, or dict, and convert all unicode to str."
+
+        # test for the type of input1
+        # if need be, recursively call this function
+        # where unicode is found, convert it to str
+        if type(input1) is str:
+            return input1
+        elif type(input1) is unicode:
+            return str(input1)
+        elif type(input1) is list:
+            for n, item in enumerate(input1):
+                input1[n] = self.__unicode_to_str__(item)
+            return input1
+        elif type(input1) is dict:
+            return_dict = {}
+            for key, value in input1.iteritems():
+                return_dict[str(key)] = self.__unicode_to_str__(value)
+            return return_dict
+        else:
+            return input1
+
+
+    def __get_config_hash__(self,smck_config=""):
+        "Return a sha512 hash of the input SMCK configuration."
+
+        # if the input is not a string, make it one
+        if type(smck_config) is not str:
+
+            # use json to turn the config into a string
+            smck_config_json = json.dumps(smck_config, ensure_ascii=True, sort_keys=True)
+
+        # else if it is a string
+        else:
+
+            # use input string as string to hash
+            print "__get_config_hash__ has been passed a string, rather than an smck_config dict. This is unexpected, so you are being notified in case something has gone wrong. If it has, please contact the author: Ben Smart bsmart@cern.ch"
+            print "input =",smck_config
+            smck_config_json = smck_config
+
+        # calculate hash
+        return_hash = hashlib.sha512(smck_config_json.encode()).hexdigest()
+
+        # return hash
+        return return_hash
+
+
+    def list_local_tools_read_in(self):
+        """List local running trigger-monitoring tools that have been discovered by the Tool Interrogator.
+        These tool configurations are available in the <ThisVariable>.local dictionary."""
+
+        # info for user
+        print "The following local tools have had their configurations extracted by the ToolInterrogator:"
+
+        # list the tools in self.local
+        for tool in self.local.keys():
+            print tool
+
+
+    def __is_input_a_valid_current_processing_step_to_use__(self,input1="",print_output_here=""):
+        "Is input1 ('ALL', 'ESD', or 'AOD') valid for the current Athena running stage."
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # returns True or False depending on whether the input is valid to be used in the current stage of Athena running
+
+        # if no input, then return false
+        if input1 == "":
+            return False
+
+        # if input is ALL, then return true
+        if input1 == "ALL":
+            return True
+
+        # if input is ESD, and doESD is true, then return true
+        if input1 == "ESD":
+            return rec.doESD
+
+        # if input is AOD, and doAOD is true, then return true
+        if input1 == "AOD":
+            return rec.doAOD
+
+        # if we have reached this far, then the input has not been recognised
+        if print_output_here:
+            print "The processing step",input1,"has not been recognised. Valid options are ALL, ESD, and AOD."
+
+
+    def get_default_mck_id_from_db(self,input_athena_version=""):
+        """Get the MCK number (MCK_ID) of the default for this Athena version.
+        If input_athena_version=='', the current Athena version is used."""
+
+        # if no input Athena version is provided, then use the current version
+        if input_athena_version == "":
+            input_athena_version = self.current_athena_version
+
+        # search for default mck
+        return self.oi.read_default_mck_id_from_db(input_athena_version)
+
+
+    def get_default_from_db(self,print_output_here=""):
+        """Prints default MCK number (MCK_ID) for this Athena version.
+        All default information is made available in the <ThisVariable>.default_global_info dictionary."""
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # info for user
+        if print_output_here:
+            print "Attempting to get default tool configuration from database"
+
+        # search for default mck
+        default_mck = self.get_default_mck_id_from_db()
+
+        # if a valid default mck exists
+        if default_mck >= 0:
+
+            # info for user
+            if print_output_here:
+                print "Default mck for this Athena version ("+self.current_athena_version+") is",default_mck
+
+            # fill self.default_global_info
+            self.default_global_info = self.get_global_info_from_db(default_mck)
+
+        # if there is no default for this Athena version
+        else:
+
+            # info for user
+            if print_output_here:
+                print "No default for this Athena version ("+self.current_athena_version+") has been uploaded"
+                print "If you are not running with any local changes to the default, then consider running the command \"<ThisVariable>.upload_default()\""
+
+
+    def get_global_info_from_db(self,mck_id):
+        "For an input MCK number (MCK_ID), get all related MCK and SMCK info, and return it as a dictionary."
+
+        # get mck_info for this mck
+        global_info = {}
+        global_info['MONITORING_TOOL_DICT'] = {}
+        global_info['MCK'] = self.oi.read_mck_info_from_db(mck_id)
+
+        # get mck_to_smck links (list of smck_id that this mck links to)
+        smck_ids = self.oi.read_mck_links_from_db(mck_id)
+
+        # loop over smck_ids and get smck_info
+        for smck_id in smck_ids:
+
+            # get smck_info
+            smck_info = self.oi.read_smck_info_from_db(smck_id)
+
+            # generate monitoring_tool_dict key, a combination of the tool name and the processing step (if not ALL)
+            smck_key = smck_info['SMCK_TOOL_TYPE']
+            if smck_info['SMCK_PROCESSING_STEP'] != "ALL":
+                smck_key += "_"+smck_info['SMCK_PROCESSING_STEP']
+
+            # add this smck_info to monitoring_tool_dict
+            global_info['MONITORING_TOOL_DICT'][smck_key] = smck_info
+
+        # return global_info
+        return global_info
+
+
+    def __get_smck_id_from_smck_identifier__(self,smck_identifier,print_output_here=""):
+        "Input can either be an SMCK_ID or an SMCK_TOOL_PATCH_VERSION. Output will be the the correct SMCK_ID."
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # test if smck_identifier can be an int (smck_id)
+        try:
+
+            # input could be 1.0 or 1, etc.
+            smck_id = int(float(smck_identifier))
+
+            # if we've got this far then we at least have an int
+            # but is it a valid smck_id?
+            smck_info = self.oi.read_smck_info_from_db(smck_id)
+
+            # if smck_id is not valid, then smck_info will equal -1
+            if smck_info == -1:
+
+                # print warning and return -1, as the smck_identifier is not valid
+                if print_output_here:
+                    print "No SMCK found with",smck_identifier
+                return -1
+
+            # else if it has been found, then return smck_id as it is valid smck_id
+            else:
+
+                # return smck_id
+                return smck_id
+
+        # if smck_identifier is not a number
+        except ValueError:
+
+            # get smck_id
+            smck_id = self.oi.get_smck_id_from_smck_tool_patch_version(smck_identifier)
+
+            # if no smck_id is found
+            if smck_id == -1:
+
+                # print warning and return -1
+                if print_output_here:
+                    print "No SMCK found with",smck_identifier
+                return -1
+
+            # else if it has been found, then return smck_id
+            else:
+
+                # return smck_id
+                return smck_id
+
+
+    def get_smck_info_from_db(self,smck_identifier,print_output_here=""):
+        "Get an SMCK configuration from the Oracle database."
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # translate smck_identifier into an smck_id
+        smck_id = self.__get_smck_id_from_smck_identifier__(smck_identifier)
+
+        # if an smck_id has not been found
+        if smck_id == -1:
+
+            # return -1, as no smck_info can be obtained
+            return -1
+
+        else:
+
+            # get smck_info
+            smck_info = self.oi.read_smck_info_from_db(smck_id)
+
+            # return this smck_info
+            return smck_info
+
+
+    def get_current_local_info(self,print_output_here=""):
+        """Use the Tool Interrogator to find locally running trigger-monitoring tools.
+        These tool configurations are made available in the <ThisVariable>.local dictionary."""
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # info for user
+        if print_output_here:
+            print "Attempting to get configurations of all locally running trigger-monitoring tools"
+
+        # get list of locally running monitoring tools
+        mon_tools = self.ti.get_available_trigger_monitoring_tools()
+
+        # print info about available local tools
+        if len(mon_tools) > 0:
+            if print_output_here:
+                print "The following trigger-monitoring tools have been found locally:"
+
+        # get smck_config for each of these tools
+        for tool in mon_tools:
+
+            # get smck_config
+            smck_config = self.ti.get_smck_config_from_ToolSvc_tool_name(tool)
+
+            # check that smck_config has been found
+            # if not (Athena oddities can cause this) then skip to the next tool
+            # only correct, valid trig mon tools should survive this process
+            if smck_config == -1:
+                continue
+
+            # construct smck_info for this tool
+            smck_info = {}
+            smck_info['SMCK_CONFIG'] = smck_config 
+            smck_info['SMCK_CONFIG_HASH'] = self.__get_config_hash__(smck_config)
+            smck_info['SMCK_SLICE_TYPE'] = smck_config['SliceType']
+            smck_info['SMCK_TOOL_TYPE'] = tool
+            smck_info['SMCK_ATHENA_VERSION'] = self.current_athena_version
+
+            # add this info to the local_global_info
+            self.local_global_info['MONITORING_TOOL_DICT'][tool] = smck_info
+
+            # print info about this tool
+            if print_output_here:
+                print ""
+                print "ToolSvc."+tool
+                print "The extracted data of this tool is stored in <ThisVariable>.local['"+tool+"']"
+                print "This can be passed to MaM methods with the string '"+tool+"'"
+
+        # add nice spacing if we have been printing tool info
+        if len(mon_tools) > 0:
+            if print_output_here:
+                print ""
+                print "The extracted data of all local trigger-monitoring tools is stored in <ThisVariable>.local"
+                print "All local trigger-monitoring tools can be passed together as an 'MCK' to MaM diff and search methods with the string 'LOCAL'"
+                print ""
+
+
+    def setup_all_local_tools(self):
+        "Setup all local trigger-monitoring tools and runs get_current_local_info() to read them in using the Tool Interrogator."
+
+        # setup all local packages listed in PackagesToInterrogate via ToolInterrogator
+        self.ti.load_all_tools()
+
+        # we probably want to read in these tools with the Tool Interrogator
+        self.get_current_local_info()
+
+
+    def upload_default(self,comment="",print_output_here=""):
+        "Upload all current trigger-monitoring tool configurations as default for this Athena version."
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+        
+        # search for default mck
+        default_mck = self.get_default_mck_id_from_db()
+
+        # if it already exists
+        if default_mck >= 0:
+
+            if print_output_here:
+                print "There already exists a default mck for this Athena version:"
+                print "Athena version: "+self.current_athena_version
+                print "Default MCK:",default_mck
+
+        else:
+
+            # ensure that all tools are loaded
+            self.setup_all_local_tools()
+
+            # get current local info
+            # this is now done inside self.setup_all_local_tools() so no need to repeat it
+            #self.get_current_local_info()
+
+            # if no comment is provided, then ask for one
+            if comment=="":
+                comment = self.__ask_for_comment__()
+
+            # fill mck_info
+            self.local_global_info['MCK'] = {}
+            self.local_global_info['MCK']['MCK_DEFAULT'] = 1
+            self.local_global_info['MCK']['MCK_ATHENA_VERSION'] = self.current_athena_version
+            self.local_global_info['MCK']['MCK_CREATOR'] = self.current_user
+            self.local_global_info['MCK']['MCK_COMMENT'] = comment
+
+            # for each local tool
+            for tool, smck_info in self.local.iteritems():
+
+                # fill smck_info
+                smck_info['SMCK_PROCESSING_STEP'] = "ALL"
+                smck_info['SMCK_DEFAULT'] = 1
+                smck_info['SMCK_CREATOR'] = self.current_user
+                smck_info['SMCK_COMMENT'] = comment
+
+            # upload this self.local_global_info to the database, and get the new mck_id and smck_ids
+            mck_id, smck_ids = self.oi.upload_mck_and_smck(self.local_global_info)
+
+            # info for user
+            if print_output_here:
+                print "The default for this Athena version ("+self.current_athena_version+") has been uploaded"
+                print "It has been given the MCK",mck_id
+                print ""
+                print "The following tools have had their current configurations uploaded as defaults:"
+                print ""
+                for smck_id in smck_ids:
+                    smck_info = self.oi.read_smck_info_from_db(smck_id)
+                    print "Tool:",smck_info['SMCK_TOOL_TYPE']
+                    print "SMCK:",smck_id
+                    print "SMCK tool patch version:",smck_info['SMCK_TOOL_PATCH_VERSION']
+                    print ""
+
+
+    def upload_smck(self,input1="",processing_step="",comment="",print_output_here=""):
+        """Upload local configuration for tool 'input1' as an SMCK.
+        If input1=='local', then all local configuration changes wrt the default will be uploaded.
+        Optional processing step and comment can be provided."""
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # search for default mck
+        default_mck = self.get_default_mck_id_from_db()
+
+        # if the default does not exist
+        if default_mck < 0:
+
+            # info for user
+            if print_output_here:
+                print "No default for this Athena version ("+self.current_athena_version+") has been uploaded"
+                print "If you are not running with any local changes to the default, then consider running the command \"<ThisVariable>.upload_default()\""
+            return
+
+        # if input is local, then run upload_all_local_changes_as_smck()
+        if input1 == 'local':
+
+            if print_output_here:
+                print "You have provided the input 'local'. All local changes wrt the default will be uploaded."
+            self.upload_all_local_changes_as_smck(processing_step,comment)
+
+        # get all local tool info 
+        #self.get_current_local_info()
+
+        # check if input1 is running (has been read in by the above line)
+        if not self.local_global_info['MONITORING_TOOL_DICT'].__contains__(input1):
+
+            # this tool has not been loaded
+            # exit and suggest to the user how to start the tool, if they so wish
+            if print_output_here:
+                print "Tool",input1,"is not currently set up locally, so can not have its local configuration uploaded as an SMCK."
+                print "To list all local tools currently set up and read in, please run \"<ThisVariable>.list_local_tools_read_in()\""
+                print "To set up and read in all trigger monitoring tools locally, please run \"<ThisVariable>.setup_all_local_tools()\""
+            return
+
+        # get local smck_info for tool input1
+        local_smck_info = self.local_global_info['MONITORING_TOOL_DICT'][input1]
+
+        # get default from database
+        # (should already have been done during __init__, 
+        # but in case the default has only been uploaded in this session then we check again)
+        self.get_default_from_db()
+
+        # get default smck_info
+        default_smck_info = self.default_global_info['MONITORING_TOOL_DICT'][input1]
+        
+        # create diff of smck_info
+        # we want diffed_smck_info2, 
+        # which is the 'patch' to apply to the default to get the current local configuration
+        diffed_smck_info1, diffed_smck_info2 = self.__calculate_diff__(default_smck_info,local_smck_info,False)
+
+        # if there are no local differences wrt the default, then we upload nothing and exit
+        if diffed_smck_info2 == {}:
+            
+            # info for user
+            if print_output_here:
+                print "No local differences have been found with respect to the default SMCK (SMCK_ID="+str(default_smck_info['SMCK_ID'])+") for this tool ("+str(input1)+"), for this Athena version ("+self.current_athena_version+")."
+                print "Nothing shall be uploaded to the Oracle database as a result."
+            return
+
+        # check if this SMCK already exists
+
+        # if no processing_step is provided, then ask for one
+        if processing_step=="":
+            processing_step = self.__ask_for_processing_step__()
+
+        # if no comment is provided, then ask for one
+        if comment=="":
+            comment = self.__ask_for_comment__()
+
+        # fill extra smck_info
+        diffed_smck_info2['SMCK_PROCESSING_STEP'] = processing_step
+        diffed_smck_info2['SMCK_TOOL_TYPE'] = input1
+        diffed_smck_info2['SMCK_SLICE_TYPE'] = local_smck_info['SMCK_SLICE_TYPE']
+        diffed_smck_info2['SMCK_DEFAULT'] = 0
+        diffed_smck_info2['SMCK_ATHENA_VERSION'] = self.current_athena_version
+        diffed_smck_info2['SMCK_CREATOR'] = self.current_user
+        diffed_smck_info2['SMCK_COMMENT'] = comment
+
+        # upload smck_info (diffed_smck_info2)
+        new_smck_id = self.oi.upload_smck(diffed_smck_info2)
+
+        # info for user
+        if print_output_here:
+
+            # print new smck_id and smck_tool_patch_version
+            print "This is SMCK (SMCK_ID)",new_smck_id
+            print "with SMCK_TOOL_PATCH_VERSION",diffed_smck_info2['SMCK_TOOL_PATCH_VERSION']
+
+        # if we are running silently, still return the smck_id and smck_tool_patch_version 
+        # (ie. in case this function has been called by another function, which might like to know the smck_id and smck_tool_patch_version)
+        else:
+            return new_smck_id, diffed_smck_info2['SMCK_TOOL_PATCH_VERSION']
+
+
+    def upload_all_local_changes_as_smck(self,processing_step="",comment="",print_output_here=""):
+        """Upload all local configuration changes wrt the default.
+        Optional processing step and comment can be provided."""
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # search for default mck
+        default_mck = self.get_default_mck_id_from_db()
+
+        # if the default does not exist
+        if default_mck < 0:
+
+            # info for user
+            if print_output_here:
+                print "No default for this Athena version ("+self.current_athena_version+") has been uploaded"
+                print "If you are not running with any local changes to the default, then consider running the command \"<ThisVariable>.upload_default()\""
+            return
+
+        # get all local tool info 
+        #self.get_current_local_info()
+
+        # if no tools are running locally
+        if len(self.local_global_info['MONITORING_TOOL_DICT']) == 0:
+
+            # exit and suggest to the user how to start the tools, if they so wish
+            if print_output_here:
+                print "No trigger monitoring tools are currently set up locally and read in, so they can not have their local configurations uploaded as SMCK."
+                print "To set up and read in all trigger monitoring tools locally, please run \"<ThisVariable>.setup_all_local_tools()\""
+                print "To read in all local tools currently set up, please run \"<ThisVariable>.get_current_local_info()\""
+                print "To list all local tools currently set up and read in, please run \"<ThisVariable>.list_local_tools_read_in()\""
+
+            return
+
+        # get default from database
+        # (should already have been done during __init__, 
+        # but in case the default has only been uploaded in this session then we check again)
+        self.get_default_from_db()
+
+        # create diff of global_info
+        # we want diffed_global_info2, 
+        # which is the 'patch' to apply to the default to get the current local configuration
+        diffed_global_info1, diffed_global_info2 = self.__calculate_diff__(self.default_global_info,self.local_global_info,False)
+
+        # if there are no local differences wrt the default, then we upload nothing and exit
+        if diffed_global_info2 == {}:
+            
+            # info for user
+            if print_output_here:
+                print "No local differences have been found with respect to the default MCK ("+str(default_mck)+") for this Athena version ("+self.current_athena_version+")."
+                print "Nothing shall be uploaded to the Oracle database as a result."
+            return
+
+        # if no processing_step is provided, then ask for one
+        if processing_step=="":
+            processing_step = self.__ask_for_processing_step__()
+
+        # if no comment is provided, then ask for one
+        if comment=="":
+            comment = self.__ask_for_comment__()
+
+        # fill extra mck_info
+        diffed_global_info2['MCK'] = {}
+        diffed_global_info2['MCK']['MCK_DEFAULT'] = 0
+        diffed_global_info2['MCK']['MCK_ATHENA_VERSION'] = self.current_athena_version
+        diffed_global_info2['MCK']['MCK_CREATOR'] = self.current_user
+        diffed_global_info2['MCK']['MCK_COMMENT'] = comment
+
+        # fill extra smck_info for all tools
+        for tool_key, tool_value in diffed_global_info2['MONITORING_TOOL_DICT'].iteritems():
+
+            # fill extra smck_info
+            tool_value['SMCK_PROCESSING_STEP'] = processing_step
+            tool_value['SMCK_TOOL_TYPE'] = tool_key
+            tool_value['SMCK_SLICE_TYPE'] = self.local_global_info['MONITORING_TOOL_DICT'][tool_key]['SMCK_SLICE_TYPE']
+            tool_value['SMCK_DEFAULT'] = 0
+            tool_value['SMCK_ATHENA_VERSION'] = self.current_athena_version
+            tool_value['SMCK_CREATOR'] = self.current_user
+            tool_value['SMCK_COMMENT'] = comment
+
+        # upload global_info (diffed_global_info2)
+        new_mck_id, new_smck_ids = self.oi.upload_mck_and_smck(diffed_global_info2)
+
+        # info for user
+        if print_output_here:
+            
+            # print smck info
+            print "The following SMCK have been created:"
+            print ""
+
+            # loop over filled dictionary diffed_global_info2['MONITORING_TOOL_DICT'] as it contains all the new_smck_ids but also the new smck_tool_patch_version
+            # Edit: no, it seems not to. Where did the pointers break?
+            #for tool_key, tool_value in diffed_global_info2['MONITORING_TOOL_DICT'].iteritems():
+            for smck_id in new_smck_ids:
+
+                # get smck_info
+                tool_value = self.get_smck_info_from_db(smck_id)
+
+                # print info for this smck
+                print "SMCK_ID =",tool_value['SMCK_ID']
+                print "SMCK_TOOL_PATCH_VERSION =",tool_value['SMCK_TOOL_PATCH_VERSION']
+
+            # also print mck info
+            print ""
+            print "For convenience, an MCK has also been created automatically, linking to the above SMCK."
+            print "MCK_ID =",new_mck_id
+            print ""
+
+        # if we are running silently, still return the new_mck_id and new_smck_ids
+        # (ie. in case this function has been called by another function, which might like to know the new_mck_id and new_smck_ids)
+        else:
+
+            return new_mck_id, new_smck_ids
+
+
+    def upload_mck(self,input_smck_list=[],comment="",print_output_here=""):
+        """input_smck_list should be a list of SMCK, identified be either their SMCK_ID or SMCK_TOOL_PATCH_VERSION.
+        An MCK will be uploaded, linking to these SMCK.
+        Optional comment can be provided."""
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # if the input is empty, then we exit
+        if input_smck_list == []:
+
+            # info for user
+            if print_output_here:
+                print "No list of SMCK has been provided."
+                print "No MCK upload is possible without a list of SMCK."
+            return
+
+        # holder for smck_ids
+        smck_ids = []
+
+        # loop over input
+        for smck_identifier in input_smck_list:
+
+            # get smck_id
+            smck_id = self.__get_smck_id_from_smck_identifier__(smck_identifier)
+            
+            # if an smck_id has not been found
+            if smck_id == -1:
+
+                # info for user
+                if print_output_here:
+                    print "Problem with requested SMCK",smck_identifier
+            
+            # add this smck_id to the smck_ids list
+            smck_ids.append(smck_id)
+
+
+        # check if all smck_id have been found
+        # TODO - CHECK THAT THE -1 CRITERIA IS CORRECT (ie do we need '-1' instead? I don't think so)
+        if smck_ids.__contains__(-1):
+
+            # one or more smck are missing. Abort mck upload
+            if print_output_here:
+                print "One or more of the SMCK requested are missing. Aborting MCK upload."
+            return
+
+        # check that the mck does not already exist
+        mck_id = self.oi.check_if_exact_mck_exists(smck_ids)
+
+        # if mck does already exist, then say so and exit
+        if mck_id != -1:
+
+            # info for user
+            if print_output_here:
+                print "This MCK already exists. It is MCK",mck_id
+            return
+        
+        # if no comment is provided, then ask for one
+        if comment=="":
+            comment = self.__ask_for_comment__()
+
+        # fill mck_info
+        mck_info = {}
+        mck_info['MCK_DEFAULT'] = 0
+        mck_info['MCK_ATHENA_VERSION'] = self.current_athena_version
+        mck_info['MCK_CREATOR'] = self.current_user
+        mck_info['MCK_COMMENT'] = comment
+
+        # upload mck_info
+        mck_id = self.oi.upload_mck(mck_info)
+
+        # link this mck to the smck_ids
+        for smck_id in smck_ids:
+
+            # upload this link
+            self.oi.upload_mck_smck_link(mck_id,smck_id)
+
+        # info for user
+        if print_output_here:
+            print "This MCK has been uploaded. It is MCK",mck_id
+
+
+    def __ask_for_comment__(self):
+        "If running interactively, ask user for an upload comment."
+
+        # is this session interactive? If not, return ""
+        if self.__is_session_interactive__():
+
+            # info for user
+            print "Please enter a comment to be attached to this upload to the database."
+            print "The comment is limited in length to 1000 characters"
+
+            # now get user input
+            user_input = raw_input("comment: ")
+
+            # check that comment length is valid
+            while len(user_input) > 1000:
+
+                # warn user that their input is too long, and give them the change to re-enter a comment
+                print "Your comment was too long, (it was",len(user_input),"characters long). Please enter a shorter comment."
+                
+                # get user input
+                user_input = raw_input("comment: ")
+
+            # return valid comment
+            return user_input
+
+        else:
+
+            # this is not an interactive session, so we can not ask for input
+            return ""
+
+
+    def __ask_for_processing_step__(self):
+        """If running interactively, ask user for the processing step(s) an upload will be valid for.
+        'ALL' is the default."""
+        
+        # is this session interactive? If not, return "ALL"
+        if self.__is_session_interactive__():
+
+            # info for user
+            print "Please specify which processing step(s) you wish this input to be for."
+            print "The default is 'ALL' for all processing steps."
+            print "Valid options are 'ESD', 'AOD', and 'ALL'."
+            print "Hit <enter> without any input to select the default option (ALL)"
+
+            # now get user input
+            user_input = raw_input("processing step: ").upper()
+
+            # if input is blank, interpret this as 'ALL'
+            if user_input == "":
+                user_input = "ALL"
+
+                # confirmation to user
+                print "You have selected",user_input
+
+            # valid input
+            # if need be, then this list can be extended at a later date
+            valid_input = ['ALL','ESD','AOD']
+
+            # check for valid input
+            # if input is not valid, ask for it again
+            while not valid_input.__contains__(user_input):
+
+                # warning to user that input was not understood
+                print "The input",user_input,"was not recognised. Please specify a valid option."
+
+                # get user input
+                user_input = raw_input("processing step: ")
+
+                # if input is blank, interpret this as 'ALL'
+                if user_input == "":
+                    user_input = "ALL"
+
+                    # confirmation to user
+                    print "You have selected",user_input
+
+            # return selected processing step
+            return user_input
+
+        else:
+
+            # this is not an interactive session, so we can not ask for input
+            return "ALL"
+
+    def __is_session_interactive__(self):
+        "Is this an interactive Athena session."
+
+        # check if this is an interactive session
+        # (if -i was passed as a command-line argument)
+        return sys.argv.__contains__('-i')
+
+
+    def __print_one_dict__(self,dict1={},tab_space="",name1=""):
+        "Print a dictionary in a neatly formatted manner."
+
+        # loop over the keys (and values) in dict1
+        for key, value1 in dict1.iteritems():
+
+            # if the value is a dict
+            if type(value1) is dict:
+
+                # first print this dict key
+                print tab_space+name1+"['"+key+"']:"
+
+                # recursively call this function
+                # add some space to tab_space, to indent the sub-dictionary
+                self.__print_one_dict__(value1,"   "+tab_space,name1)
+
+            # if the value is a list
+            elif type(value1) is list:
+
+                # print the items nicely (no unicode u' features)
+                print tab_space+name1+"['"+key+"'] = [",
+                for n, item in enumerate(value1):
+                    if type(item) is unicode or type(item) is str:
+                        print "'"+item+"'",
+                    else:
+                        print item,
+                    if n != len(value1)-1:
+                        print ",",
+                print "]"
+
+            # else if this value is not a dict or a list, then we should print
+            else:
+                print tab_space+name1+"['"+key+"'] =",value1
+
+
+    def __print_two_dicts__(self,dict1={},dict2={},tab_space="",input1="",input2=""):
+        """Print two dictionaries in a neatly formatted manner.
+        Designed to be used with diff."""
+
+        # make sure inputs are strings
+        input1 = str(input1)
+        input2 = str(input2)
+
+        # make input names of equal length, for nice printout spacing
+        # if the inputs are already of the same length
+        if len(input1) == len(input2):
+
+            # set the names to print
+            name1 = input1
+            name2 = input2
+
+        # else if input 1 is longer than input 2
+        elif len(input1) > len(input2):
+
+            # set the names to print
+            # pad out the shorter one with spaces before it
+            name1 = input1
+            name2 = ((len(input1)-len(input2))*" ") + input2
+
+        # else if input 2 is longer than input 1
+        elif len(input1) < len(input2):
+
+            # set the names to print
+            # pad out the shorter one with spaces before it
+            name1 = ((len(input2)-len(input1))*" ") + input1
+            name2 = input2
+
+        # loop over the keys (and values) in dict1
+        for key, value1 in dict1.iteritems():
+
+            # if this key is in the second dictionary
+            if dict2.__contains__(key):
+
+                # value from dict2
+                value2 = dict2[key]
+
+                # if the value is a dict
+                if type(value1) is dict:
+
+                    # first print this dict key
+                    print tab_space+name1+"['"+key+"']:"
+                    print tab_space+name2+"['"+key+"']:"
+
+                    # recursively call this function
+                    # add some space to tab_space, to indent the sub-dictionary
+                    self.__print_two_dicts__(value1,value2,"   "+tab_space,name1,name2)
+
+                # if the value is a list
+                elif type(value1) is list:
+
+                    # print the items nicely (no unicode u' features)
+                    print tab_space+name1+"['"+key+"'] = [",
+                    for n, item in enumerate(value1):
+                        if type(item) is unicode or type(item) is str:
+                            print "'"+item+"'",
+                        else:
+                            print item,
+                        if n != len(value1)-1:
+                            print ",",
+                    print "]"
+                    print tab_space+name2+"['"+key+"'] = [",
+                    for n, item in enumerate(value2):
+                        if type(item) is unicode or type(item) is str:
+                            print "'"+item+"'",
+                        else:
+                            print item,
+                        if n != len(value2)-1:
+                            print ",",
+                    print "]"
+
+                # else if this value is not a dict or a list, then we should print
+                else:
+                    print tab_space+name1+"['"+key+"'] =",value1
+                    print tab_space+name2+"['"+key+"'] =",value2
+
+            # else if this key is only in dict1, then we print it anyway
+            else:
+
+                # if this is a dict
+                if type(value1) is dict:
+
+                    # then use __print_one_dict__ to print it
+                    self.__print_one_dict__(value1,tab_space,name1)
+
+                # else just print it
+                else:
+                    print tab_space+name1+"['"+key+"'] =",value1
+
+        # loop over the keys (and values) in dict2
+        for key, value2 in dict2.iteritems():
+
+            # if this key is not in the first dictionary
+            if not dict1.__contains__(key):
+
+                # if this is a dict
+                if type(value2) is dict:
+
+                    # then use __print_one_dict__ to print it
+                    self.__print_one_dict__(value2,tab_space,name2)
+
+                # else just print it
+                else:
+                    print tab_space+name2+"['"+key+"'] =",value2
+
+
+    def __calculate_diff__(self,dict1={},dict2={},diff_all=False):
+        """Calculate and return a diff between two inputs.
+        If diff_all==True, all items that are not identical in both inputs will be returned."
+        If diff_all==False (default), only items that are in both inputs, and different, will be returned."""
+
+        # only return keys that are in both dictionaries
+        # for these keys, return two dictionaries, one of all the values in dict1, one of all the values in dict2
+
+        # dictionaries for filling and returning
+        return_dict1 = {}
+        return_dict2 = {}
+
+        # loop over the first dictionary
+        for key, value1 in dict1.iteritems():
+    
+            # if this key is in the second dictionary
+            if dict2.__contains__(key):
+
+                # get the value in the second dictionary
+                value2 = dict2[key]
+
+                # first check if the values are equal 
+                # (if they are equal, then we don't want to put anything for this key into the return dictionaries)
+                if str(value1) != str(value2):
+
+                    # are value1 and value2 dictionaries? if so, recursively call __calculate_diff__
+                    if type(value1) is dict and type(value2) is dict:
+                    
+                        # recursively call __calculate_diff__ and fill return dictionaries
+                        return_dict1_temp, return_dict2_temp = self.__calculate_diff__(value1,value2,diff_all)
+
+                        # check that the output is not identical
+                        # this catches the case when diff_all==False
+                        # where str(value1) != str(value2) because at least one of the dicts contains a key that the other doesn't
+                        # but all the matching keys have identical values
+                        # which causes the return_dicts to be equal (blank)
+                        if return_dict1_temp != return_dict2_temp:
+                        
+                            # fill return dictionaries
+                            return_dict1[key] = return_dict1_temp
+                            return_dict2[key] = return_dict2_temp
+
+                    else:
+
+                        # fill return dictionaries with the different values
+                        return_dict1[key] = value1
+                        return_dict2[key] = value2
+
+            # else if this key is not in the second dictionary
+            else:
+
+                #if we are adding all differences to the return dicts
+                if diff_all:
+
+                    # then add this key and value to the first return dict
+                    return_dict1[key] = value1
+
+        #if we are adding all differences to the return dicts
+        if diff_all:
+
+            # loop over the second dictionary
+            for key, value2 in dict2.iteritems():
+
+                # if this key is not in the first dictionary
+                if not dict1.__contains__(key):
+
+                    # then add this key and value to the second return dict
+                    return_dict2[key] = value2
+
+        # return the dictionaries
+        return return_dict1, return_dict2
+
+
+    def __is_input_a_local_tool__(self,input1=""):
+        "Is input1 a locally running trigger-monitoring tool that has been read by the Tool Interrogator."
+
+        # loop over loaded local tools
+        for tool_key in self.local.keys():
+
+            # if this tool matches the input
+            if tool_key == input1:
+
+                # then yes, this input is a local tool, so return True
+                return True
+
+        # if we've reached this point, then we've not found the input locally, so return False
+        return False
+
+
+    def __is_input_an_mck__(self,input1=""):
+        "Is input1 a valid MCK_ID."
+
+        # if the input is blank, then no, it's not an mck
+        if input1 == "":
+            return False
+
+        # search for this mck
+        mck_info = self.oi.read_mck_info_from_db(input1)
+
+        # if mck_info == -1 then we've not found an mck, so return False
+        if mck_info == -1:
+            return False
+        
+        # else this is a valid mck
+        else:
+            return True
+
+
+    def __is_input_an_smck__(self,input1="",print_output_here=""):
+        "Is input1 a valid SMCK_ID or SMCK_TOOL_PATCH_VERSION."
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # if the input is blank, then no, it's not an smck
+        if input1 == "":
+            return False
+
+        # search for this smck
+        smck_id = self.__get_smck_id_from_smck_identifier__(input1,print_output_here)
+
+        # if smck_id == -1 then we've not found an smck, so return False
+        if smck_id == -1:
+            return False
+        
+        # else this is a valid smck
+        else:
+            return True
+
+
+    def diff(self,input1="",flag1="",input2="",flag2="",diff_all=False,print_output_here=""):
+        """Perform a diff between two inputs, and print the results.
+        If diff_all==True, all items that are not identical in both inputs will be returned."
+        If diff_all==False (default), only items that are in both inputs, and different, will be returned.
+        The inputs specify what things are to be diffed.
+        The flags specify what kind of inputs the inputs are."""
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        #info for user
+        if print_output_here:
+            if self.firstdiff:
+                print ""
+                print "diff takes four arguments: input1, flag1, input2, flag2."
+                print "An optional fifth argument can be provided, either True or False (False by default)."
+                print "If True, all items that are not identical in both inputs will be returned."
+                print "If False (default), only items that are in both inputs, and different, will be returned."
+                print ""
+                self.firstdiff = False
+
+        # input flags must be specified
+        # acceptable tags are SMCK and MCK
+
+        # valid flags
+        valid_flags = ['MCK','SMCK']
+
+        # if any of the inputs or flags are missing
+        if input1 == "" or flag1 == "" or input2 == "" or flag2 == "":
+
+            #info for user
+            if print_output_here:
+                print "diff takes four arguments: input1, flag1, input2, flag2."
+                print "The inputs specify what things are to be diffed."
+                print "The flags specify what the inputs are. Recognised flags are MCK and SMCK"
+                print "Locally running tools can be specified with their tool name, and the flag SMCK."
+                print "All locally running tools can be specified together with the input LOCAL, and the flag MCK."
+                print "Valid arguments therefore include:"
+                print "1,'MCK','LOCAL','MCK'"
+                print "1,'MCK',2,'MCK'"
+                print "1,'MCK','HLTMuonMon','SMCK'"
+                print "17,'SMCK','HLTMuonMon','SMCK'"
+
+            # if input1 has not been provided
+            if input1 == "":
+
+                #info for user
+                if print_output_here:
+                    print "The first input has not been provided. Please provide the first input."
+
+            # if flag1 has not been provided
+            if flag1 == "":
+
+                #info for user
+                if print_output_here:
+                    print "The first flag has not been provided. Please provide the first flag. Recognised flags are MCK and SMCK"
+
+            # if input2 has not been provided
+            if input2 == "":
+
+                #info for user
+                if print_output_here:
+                    print "The second input has not been provided. Please provide the second input."
+
+            # if flag2 has not been provided
+            if flag2 == "":
+
+                #info for user
+                if print_output_here:
+                    print "The second flag has not been provided. Please provide the second flag. Recognised flags are MCK and SMCK"
+
+            # since one or more inputs are missing, we exit
+            return
+
+        # if flag 1 is not a string
+        if type(flag1) is not str:
+
+            #info for user
+            if print_output_here:
+                print "The flag '"+flag1+"' is not a string. Recognised flags are MCK and SMCK"
+            return
+
+        # if input 1 does not have a valid flag
+        if not flag1.upper() in valid_flags:
+
+            #info for user
+            if print_output_here:
+                print "The flag '"+flag1+"' is not a recognised flag. Recognised flags are MCK and SMCK"
+            return
+
+        # check if input 1 is a valid mck
+        if flag1 == "MCK":
+
+            # if it is not a valid mck or local
+            is_1_local = False
+            if type(input1) is str:
+                if input1.upper() == "LOCAL":
+                    is_1_local = True
+            if is_1_local == False and not self.__is_input_an_mck__(input1):
+
+                #info for user
+                if print_output_here:
+                    print input1,"is not a valid MCK, or 'LOCAL' has not been selected."
+                return
+
+        # check if input 1 is a valid smck
+        elif flag1 == "SMCK":
+
+            # if it is not a valid smck or local tool
+            if not self.__is_input_an_smck__(input1,False) and not self.__is_input_a_local_tool__(input1):
+
+                #info for user
+                if print_output_here:
+                    print input1,"is not a valid SMCK or local tool."
+                return
+
+        # if flag 2 is not a string
+        if type(flag2) is not str:
+
+            #info for user
+            if print_output_here:
+                print "The flag '"+flag2+"' is not a string. Recognised flags are MCK and SMCK"
+            return
+
+        # if input 2 does not have a valid flag
+        if not flag2.upper() in valid_flags:
+
+            #info for user
+            if print_output_here:
+                print "The flag '"+flag2+"' is not a recognised flag. Recognised flags are MCK and SMCK"
+            return
+
+        # check if input 2 is a valid mck
+        if flag2 == "MCK":
+
+            # if it is not a valid mck or local
+            is_2_local = False
+            if type(input2) is str:
+                if input2.upper() == "LOCAL":
+                    is_2_local = True
+            if is_2_local == False and not self.__is_input_an_mck__(input2):
+
+                #info for user
+                if print_output_here:
+                    print input2,"is not a valid MCK, or 'LOCAL' has not been selected."
+                return
+
+        # check if input 2 is a valid smck
+        elif flag2 == "SMCK":
+
+            # if it is not a valid smck or local tool
+            if not self.__is_input_an_smck__(input2,False) and not self.__is_input_a_local_tool__(input2):
+
+                #info for user
+                if print_output_here:
+                    print input2,"is not a valid SMCK or local tool."
+                return
+
+        # if we've reached this far then the inputs have been parsed
+        # now we deal with the combinatorics of the options
+
+        # these are the dicts we will diff
+        diff_input1_dict = {}
+        diff_input2_dict = {}
+
+        # if input 1 is an mck
+        if flag1 == "MCK":
+
+            # if input 1 is local
+            if is_1_local:
+
+                # then use currently stored local global info
+                diff_input1_dict = self.local_global_info
+
+            # else input 1 is an mck
+            # (we know this because we checked if input 1 was a valid mck or 'local' above)
+            else:
+
+                # get the global info from the database
+                diff_input1_dict = self.get_global_info_from_db(input1)
+
+        # else input 1 is an smck
+        elif flag1 == "SMCK":
+
+            # we must either get the smck from the database, or locally if it is a local tool
+            # (it will be one or the other, as we have confirmed above)
+            # we then need to construct a temporary global info for the diff
+
+            # construct the temporary global info for the diff
+            diff_input1_dict['MONITORING_TOOL_DICT'] = {}
+
+            # is input 1 a local tool
+            if self.__is_input_a_local_tool__(input1):
+
+                # set the tool name
+                tool_name1 = input1
+
+                # get the tool from the locally stored config
+                diff_input1_dict['MONITORING_TOOL_DICT'][tool_name1] = self.local[tool_name1]
+
+            # else the input is an smck in the database
+            else:
+
+                # get the tool from the database
+                smck_info1 = self.get_smck_info_from_db(input1)
+
+                # set the tool name
+                tool_name1 = smck_info1['SMCK_TOOL_TYPE']
+
+                # get the tool from the database
+                diff_input1_dict['MONITORING_TOOL_DICT'][tool_name1] = smck_info1
+
+        # if input 2 is an mck
+        if flag2 == "MCK":
+
+            # if input 2 is local
+            if is_2_local:
+
+                # then use currently stored local global info
+                diff_input2_dict = self.local_global_info
+
+            # else input 2 is an mck
+            # (we know this because we checked if input 2 was a valid mck or 'local' above)
+            else:
+
+                # get the global info from the database
+                diff_input2_dict = self.get_global_info_from_db(input2)
+
+        # else input 2 is an smck
+        elif flag2 == "SMCK":
+
+            # we must either get the smck from the database, or locally if it is a local tool
+            # (it will be one or the other, as we have confirmed above)
+            # we then need to construct a temporary global info for the diff
+
+            # construct the temporary global info for the diff
+            diff_input2_dict['MONITORING_TOOL_DICT'] = {}
+
+            # is input 2 a local tool
+            if self.__is_input_a_local_tool__(input2):
+
+                # set the tool name
+                tool_name2 = input2
+
+                # get the tool from the locally stored config
+                diff_input2_dict['MONITORING_TOOL_DICT'][tool_name2] = self.local[tool_name2]
+
+            # else the input is an smck in the database
+            else:
+
+                # get the tool from the database
+                smck_info2 = self.get_smck_info_from_db(input2)
+
+                # set the tool name
+                tool_name2 = smck_info2['SMCK_TOOL_TYPE']
+
+                # get the tool from the database
+                diff_input2_dict['MONITORING_TOOL_DICT'][tool_name2] = smck_info2
+
+        # we now have our dictionaries for the diff
+        # lets do the diff
+        if flag1 == "MCK" and flag2 == "MCK":
+            diff_output1_dict, diff_output2_dict = self.__calculate_diff__(diff_input1_dict,diff_input2_dict,diff_all)
+        elif flag1 == "SMCK" and flag2 == "SMCK":
+            diff_output1_dict, diff_output2_dict = self.__calculate_diff__(diff_input1_dict['MONITORING_TOOL_DICT'][tool_name1],diff_input2_dict['MONITORING_TOOL_DICT'][tool_name2],diff_all)
+        elif flag1 == "MCK" and flag2 == "SMCK":
+            diff_output1_dict, diff_output2_dict = self.__calculate_diff__(diff_input1_dict,diff_input2_dict,diff_all)
+        elif flag1 == "SMCK" and flag2 == "MCK":
+            diff_output1_dict, diff_output2_dict = self.__calculate_diff__(diff_input1_dict,diff_input2_dict,diff_all)
+
+        # now we need to print this in a neat manner
+        if print_output_here:
+
+            # recursively print the diffs
+            self.__print_two_dicts__(diff_output1_dict,diff_output2_dict,"",input1,input2)
+
+        # else we assume the user wants the diffs returned
+        # (ie if this function has been called from within another function)
+        else:
+
+            # return the diffed dictionaries
+            return diff_output1_dict, diff_output2_dict
+
+
+    def print_database_schema(self):
+        "Print the tables and entries in the current Oracle database."
+
+        # get SQL table and column names
+        database_column_list = self.oi.get_db_tables_and_columns()
+
+        # list of tables
+        table_list = []
+
+        # construct list of tables
+        for line in database_column_list:
+
+            # if this table is not in the list
+            if not line['TABLE_NAME'].upper() in table_list:
+
+                # add it to the list
+                table_list.append(line['TABLE_NAME'].upper())
+
+        # info for user
+        print ""
+        print "The Menu-aware Monitoring Oracle database currently has the following format:"
+        print ""
+
+        # for each table
+        for table in table_list:
+
+            # print the table name
+            print "Table",table
+
+            # loop over the database_column_list
+            for column in database_column_list:
+
+                # if this column is in this table
+                if column['TABLE_NAME'].upper() == table:
+
+                    # print this column info
+                    print "     ",column['COLUMN_NAME']
+
+            # nice spacing for user
+            print ""
+
+
+    def search(self,input1="",flag1="",print_output_here=""):
+        """Search the Oracle database for something.
+        input1 is is what is to be searched for.
+        flag1 specifies what kind of input input1 is."""
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # search for an input
+        # there should be an input and a flag
+        # the flag can be any one of the columns found in the SQL schema
+        # the columns in the SQL schema are found using self.oi.get_db_tables_and_columns()
+        # if the flag matches a column, then then that table is searched for rows where input_value=flag_column
+        # such rows are then converted into dictionaries, one dict per row
+        # these dicts are then printed using self.__print_one_dict__(row_dict,"",row_name)
+
+        # get SQL table and column names
+        database_column_list = self.oi.get_db_tables_and_columns()
+
+        # if the input or flag are missing
+        if input1 == "" or flag1 == "":
+
+            #info for user
+            if print_output_here:
+                print "search takes two arguments: input, flag."
+                print "The input is what is to be searched for."
+                print "The flag specifies what the input is. Recognised flags are listed below"
+                print "Valid arguments therefore include:"
+                print "1,'MCK_ID'"
+                print "17,'SMCK_ID'"
+                print "'HLTMuonMon_1','SMCK_TOOL_PATCH_VERSION'"
+                print "'HLTMuonMon','SMCK_TOOL_TYPE'"
+                print "'bsmart','MCK_CREATOR'"
+                print "'Muon','SMCK_SLICE_TYPE'"
+                print ""
+                print "Recognised flags are:"
+
+                # print available columns
+                for row in database_column_list:
+                    
+                    # print this column name, with a preceding -
+                    print row['COLUMN_NAME']
+
+                # nice spacing for the user
+                print ""
+
+            # if input1 has not been provided
+            if input1 == "":
+
+                #info for user
+                if print_output_here:
+                    print "The input has not been provided. Please provide the first input."
+
+            # if flag1 has not been provided
+            if flag1 == "":
+
+                #info for user
+                if print_output_here:
+                    print "The flag has not been provided. Please provide the flag. Recognised flags are listed above"
+
+            # if we do not have the input and the flag, then we can not search, and so must exit
+            return
+
+        # check if the flag is a valid column
+        # flag to indicate that we've matched the flag to a column
+        column_match = False
+
+        # loop over columns
+        for row in database_column_list:
+
+            # check if this column matches
+            if flag1.upper() == row['COLUMN_NAME']:
+
+                # then extract the table name and column name
+                table_name = row['TABLE_NAME']
+                column_name = row['COLUMN_NAME']
+
+                # and set column_match and break out of this loop
+                column_match = True
+                break
+
+        # check that we have found the column
+        if not column_match:
+
+            #info for user
+            if print_output_here:
+                print "The flag '"+flag1+"' has not been recognised as a valid flag. Recognised flags are:"
+
+                # print available columns
+                for row in database_column_list:
+                    
+                    # print this column name, with a preceding -
+                    print "-"+row['COLUMN_NAME']
+                    
+            # no valid flag (column) means we can not search, and so must exit
+            return
+
+        # if we've got to here then we should have a value input1, and a table_name and column_name
+        # if we want to check for naughty input (SQL injection) in input1, then here is the place to do it
+
+        # we can't search for CLOB objects, ie. smck_config
+        # so if the user is attempting to search for smck_config, then warn them
+        # the input they have provided could be hashed, and the hash could be searched for
+        if flag1 == "smck_config":
+            
+            # info for the user
+            if print_output_here:
+
+                # explain problem
+                print ""
+                print "You have attempted to search for an SMCK_CONFIG."
+                print "Unfortunately, due to limitations in Oracle, this is not possible."
+                print "This is because the SMCK_CONFIG is stored as a CLOB, and in Oracle it is not possible to perform SQL query comparisons to CLOB objects, (WHERE SMCK_CONFIG = user_input)."
+
+                # explain the alternative
+                print ""
+                print "To allow for SMCK_CONFIG comparisons, each config is hashed (using sha512) and hashes are compared."
+                print "Your input will now be hashed, and that hash will be searched for."
+                print "As this may not be exactly what you intended, do take care when interpreting these search results."
+                print ""
+
+                # convert the flag
+                flag1 = 'smck_config_hash'
+
+                # convert the input to a hash
+                input1 = self.__get_config_hash__(input1)
+                
+
+        # now lets search
+        search_results_list = self.oi.column_search(input1,table_name,column_name)
+
+        # and print our results in a neat manner
+        if print_output_here:
+
+            # remind the user what was searched for
+            print ""
+            print input1,"has been searched for in the column",column_name,"of table",table_name,"in the menu-aware monitoring Oracle database."
+            print ""
+
+            # if no results have been found
+            if len(search_results_list) == 0:
+
+                # let the user know
+                print "No results have been found."
+                print ""
+                return
+
+            # else if results have been found
+            print "The following",len(search_results_list),"results have been found:"
+
+            # loop over the search results
+            for n, row_dict in enumerate(search_results_list):
+
+                # create a name for the row
+                # this is just "Result_n" where n is the row number
+                row_name = "Result_"+str(n)
+
+                # recursively print the dictionary
+                print ""
+                print row_name+":"
+                self.__print_one_dict__(row_dict,"   ",row_name)
+
+                # added bonus for the user
+                # if this is an mck table, then we shall find and print out the list of smck that this mck links to
+                if table_name == "MCK_TABLE":
+
+                    # get the smck_ids
+                    smck_ids = self.oi.read_mck_links_from_db(row_dict['MCK_ID'])
+
+                    # info for user
+                    print ""
+                    print row_name,"is an MCK, and links to the following SMCK:"
+
+                    # first print the list of SMCK_ID
+                    print "SMCK_ID list =",smck_ids
+
+                    # find the longest length of each of the three strings we want to print below
+                    id_ljust = 0
+                    version_ljust = 0
+                    slice_ljust = 0
+
+                    # temporary dict to hold smck_info so that we don't have to get them a second time
+                    smck_info_dict = {}
+
+                    # for each smck_id
+                    for smck_id in smck_ids:
+
+                        # get the smck_info
+                        smck_info = self.oi.read_smck_info_from_db(smck_id)
+
+                        # add it to smck_info_dict
+                        smck_info_dict[smck_id] = smck_info
+
+                        # find the longest strings
+                        if len(str(smck_id)) > id_ljust:
+                            id_ljust = len(str(smck_id))
+                        if len(str(smck_info['SMCK_TOOL_PATCH_VERSION'])) > version_ljust:
+                            version_ljust = len(str(smck_info['SMCK_TOOL_PATCH_VERSION']))
+                        if len(str(smck_info['SMCK_SLICE_TYPE'])) > slice_ljust:
+                            slice_ljust = len(str(smck_info['SMCK_SLICE_TYPE']))
+
+                    # now we print stuff
+                    # for each smck_id
+                    for smck_id in smck_ids:
+
+                        # get the smck_info from the above dictionary
+                        smck_info = smck_info_dict[smck_id]
+
+                        # print some formatted info on this smck
+                        print "[ SMCK_ID = "+str(smck_id).ljust(id_ljust)+" , SMCK_TOOL_PATCH_VERSION = "+str(smck_info['SMCK_TOOL_PATCH_VERSION']).ljust(version_ljust)+" , SMCK_SLICE_TYPE = "+str(smck_info['SMCK_SLICE_TYPE']).ljust(slice_ljust)+" ]"
+
+            # some nice spacing
+            print ""
+
+        # else we assume the user wants the search results returned
+        # (ie if this function has been called from within another function)
+        else:
+
+            # return the search results
+            return search_results_list
+
+
+    def apply_mck(self,input1="",print_output_here=""):
+        "Apply an MCK to locally running tools."
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # is the input a valid mck?
+        if not self.__is_input_an_mck__(input1):
+
+            # info for user
+            if print_output_here:
+                print "MCK",input1,"has not been recognised as a valid MCK."
+            return
+
+        # get list of smck_id that this mck links to
+        smck_ids = self.oi.read_mck_links_from_db(input1)
+
+        # for each smck_id, apply the smck config
+        for smck_id in smck_ids:
+
+            # apply the smck config
+            self.apply_smck(smck_id)
+
+
+    def apply_smck(self,input1="",print_output_here=""):
+        "Apply an SMCK to a locally running tool."
+
+        # check for empty print_output_here
+        # if it is found, use self.print_output
+        if print_output_here == "":
+            print_output_here = self.print_output
+
+        # make sure we have the smck_id
+        smck_id = self.__get_smck_id_from_smck_identifier__(input1)
+        
+        # if we don't
+        if smck_id == -1:
+
+            # info for user
+            if print_output_here:
+                print "SMCK",input1,"has not been recognised as a valid SMCK."
+            return
+
+        # get the smck_info
+        smck_info = self.oi.read_smck_info_from_db(smck_id)
+
+        # get the processing step this smck should be used for
+        processing_step = smck_info['SMCK_PROCESSING_STEP']
+
+        # are we running in an appropriate processing step?
+        if not self.__is_input_a_valid_current_processing_step_to_use__(processing_step):
+
+            # info for user
+            if print_output_here:
+                print "SMCK",input1,"is for the Athena processing stage '"+processing_step+"', which we are not currently in. This SMCK will therefore not be applied as a config patch at this time."
+            return
+
+        # get the ToolSvc_tool_name
+        ToolSvc_tool_name = smck_info['SMCK_CONFIG']['ToolSvcName']
+
+        # is the input a locally running tool
+        tool_is_running = False
+        tool_is_running = hasattr(ToolSvc,ToolSvc_tool_name)
+        #exec "tool_is_running = hasattr(ToolSvc,%s)" % (ToolSvc_tool_name)
+        if not tool_is_running:
+
+            # info for user
+            if print_output_here:
+                print "SMCK",input1," corresponds to the tool",ToolSvc_tool_name,"which is not running locally, so can not be configured with this SMCK."
+            return
+
+        # get the patch config
+        # make sure there is no unicode in it # TODO Fix code so that this is not needed
+        patch_config = self.__unicode_to_str__(smck_info['SMCK_CONFIG']['ToolInfo'] )
+
+        # now we apply the config patch of the smck
+        # for each variable in the patch
+        for tool_key, tool_value in patch_config.iteritems():
+
+            # test if the tool has this variable
+            tool_contains_variable = False
+            exec "tool_contains_variable = hasattr(ToolSvc.%s,tool_key)" % (ToolSvc_tool_name)
+            
+            # if the tool has this variable
+            if tool_contains_variable:
+
+                # get the type of the value
+                type_to_set_to = type
+                exec "type_to_set_to = type(ToolSvc.%s.%s)" % (ToolSvc_tool_name,tool_key)
+
+                # test that the value to be applied is of the same type as the existing value
+                if type_to_set_to is type(tool_value):
+
+                    # if this is a list, make it blank first
+                    # this is to avoid overzealous Athena type limiting
+                    # TODO Write this in a more generic way, ie. not just for lists
+                    if type_to_set_to is list:
+                        exec "ToolSvc.%s.%s = []" % (ToolSvc_tool_name,tool_key)
+
+                    # apply the config for this variable
+                    exec "ToolSvc.%s.%s = tool_value" % (ToolSvc_tool_name,tool_key)
+
+                # if they are not the same type
+                else:
+
+                    # info for user
+                    if print_output_here:
+                        print "ToolSvc."+str(ToolSvc_tool_name)+"."+str(tool_key)+" is of type "+str(type_to_set_to)+", however in SMCK",input1,str(ToolSvc_tool_name)+"."+str(tool_key),"is of type",type(tool_value)
+                        #print "Athena will only allow ToolSvc."+str(ToolSvc_tool_name)+"."+str(tool_key)+" to be set to values of type",type_to_set_to
+                        print "Attempting conversion...",
+
+                    # get the type_to_set_to as a str
+                    type_to_set_to_str = str(type_to_set_to).split("'")[1]
+
+                    # try to convert
+                    try:
+
+                        # try the conversion
+                        exec "new_value = %s(tool_value)" % (type_to_set_to_str)
+
+                        # if we've got this far then the conversion was a success
+                        # info for user
+                        if print_output_here:
+                            print "conversion successful.",
+
+                        # check that the values are equal
+                        if new_value == tool_value:
+                            
+                            # apply the config for this variable
+                            exec "ToolSvc.%s.%s = new_value" % (ToolSvc_tool_name,tool_key)
+
+                            # nice formatting for user
+                            if print_output_here:
+                                print ""
+
+                        # if the values are not equal
+                        else:
+
+                            # info for user
+                            if print_output_here:
+                                print "However the values do not match. This tool variable will not be set."
+                                print "(",str(new_value),"!=",str(tool_value),")"
+
+                    # if the conversion has failed
+                    except:
+
+                        # info for user
+                        if print_output_here:
+                            print "conversion failed. This tool variable will not be set."
+
+        # info for user
+        if print_output_here:
+            print "SMCK",input1,"has been applied as a config patch to tool",ToolSvc_tool_name
+
+
+    def get_mck_id_from_smk(self,input_smk):
+        """Input an SMK, and get an MCK_ID back.
+        If no MCK is found, -1 is returned.
+        If an MCK of 0 is returned, this is intended to signify 
+        that the default tool configurations should be used."""
+        
+        # get list of all mck_to_smk links
+        mck_to_smk_links = []
+        mck_to_smk_links = self.oi.get_all_mck_to_smk_links()
+
+        # loop over the list
+        for link in mck_to_smk_links:
+
+            # the list should be ordered in smk, from largest to smallest
+            # check if this link's smk is equal to or less than the input_smk
+            if link['SMK'] <= input_smk:
+            
+                # then this is the link we want, so return the mck
+                return link['MCK']
+
+        # if we've made it this far, then an mck has not been found, so return -1
+        return -1
+
+
+    def print_all_mck_to_smk_links(self):
+        "Print all MCK to SMK links."
+
+        # get list of all mck_to_smk links
+        mck_to_smk_links = []
+        mck_to_smk_links = self.oi.get_all_mck_to_smk_links()
+
+        # reverse the list in place
+        mck_to_smk_links.reverse()
+
+        # loop over the list in reverse (lowest smk to largest smk)
+        for n, link in enumerate(mck_to_smk_links):
+
+            # get some values
+            start_smk = link['SMK']
+            end_smk = -1
+            mck_id = link['MCK']
+            
+            # if this is not the last element in the list, then get one less than the next smk
+            if n+1 < len(mck_to_smk_links):
+                end_smk = mck_to_smk_links[n+1]['SMK'] - 1
+
+            # print the results
+            if end_smk != -1:
+                print "SMK",start_smk,"to SMK",end_smk,"= MCK",mck_id
+            else:
+                print "SMK",start_smk,"and greater = MCK",mck_id
+
+
+    def upload_mck_to_smk_link(self,input_mck_id="",input_smk="",comment=""):
+        """Upload a link between an SMK and an MCK.
+        All SMK from the input SMK onwards will be linked to the input MCK,
+        unless another link to a larger SMK is uploaded.
+        ie. if upload_mck_to_smk_link(71,123)
+        and    upload_mck_to_smk_link(72,456)
+        are both called, then:
+        SMK 123 to SMK 455  will link to MCK 71
+        SMK 456 and greater will link to MCK 72
+        As uploaded MCK have positive (non-zero) values,
+        linking an SMK to MCK=0 is intended to signify 
+        that the default tool configurations should be used. 
+        ie. no MCK should be applied for this SMK.
+        An optional comment can be attached to the upload."""
+
+        # is this session interactive?
+        if self.__is_session_interactive__():
+
+            # if type(input_smk) is not int, then ask the user to input an SMK
+            while type(input_smk) is not int:
+
+                # info for user
+                print "Please enter an SMK integer to be linked."
+
+                # now get user input
+                input_str = raw_input("SMK: ")
+
+                # try to convert the input str to int
+                try:
+                    input_smk = int(input_str)
+                except:
+                    print input_str,"could not be converted to an int."
+
+            # check if this SMK has already been linked to
+            existing_mck = self.oi.check_if_smk_link_exists(input_smk)
+            if existing_mck is not -1:
+
+                # info for user, and return
+                print "SMK",input_smk,"has alread been linked to MCK",existing_mck
+                print "In Menu-aware monitoring version",self.version,"once an SMK has been linked to an MCK, the user is prevented from changing which MCK this SMK links to."
+                return
+
+            # if type(input_mck_id) is not int, then ask the user to input an MCK
+            while type(input_mck_id) is not int:
+
+                # info for user
+                print "Please enter an MCK_ID integer to be linked."
+
+                # now get user input
+                input_str = raw_input("MCK_ID: ")            
+
+                # try to convert the input str to int
+                try:
+                    input_mck_id = int(input_str)
+                except:
+                    print input_str,"could not be converted to an int."
+
+        # if this session is not interactive
+        else:
+            
+            # if the inputs are not valid
+            if (type(input_mck_id) is not int) or (type(input_smk) is not int):
+
+                # return, as inputs are not valid
+                # print for logfile
+                print "Menu-aware monitoring: upload_mck_to_smk_link(",input_mck_id,",",input_smk,",",comment,") inputs not valid. MCK and SMK must be integers."
+                print "No MCK to SMK link created."
+                return
+
+                # check if this SMK has already been linked to
+                existing_mck = self.oi.check_if_smk_link_exists(input_smk)
+                if existing_mck is not -1:
+
+                    # info for user, and return
+                    print "SMK",input_smk,"has alread been linked to MCK",existing_mck
+                    print "In Menu-aware monitoring version",self.version,"once an SMK has been linked to an MCK, the user is prevented from changing which MCK this SMK links to."
+                    return
+
+        # if we've got this far, then the input MCK and SMK should be valid
+
+        # # check if this link already exists
+        # if self.oi.check_if_mck_to_smk_link_exists(input_mck_id,input_smk):
+
+        #     # info for user, and return
+        #     print "SMK",input_smk,"has alread been linked to MCK",input_mck_id
+        #     return
+
+        # get the current user for the creator
+        creator = self.current_user
+
+        # if a comment has not been provided, then ask for one
+        if comment == "":
+            comment = self.__ask_for_comment__()
+
+        # try to upload the link
+        try:
+
+            # upload the link
+            self.oi.upload_mck_to_smk_link(input_mck_id,input_smk,creator,comment)
+
+            # info for user
+            print "MCK",input_mck_id,"has been linked to SMK",input_smk
+
+        # if the upload fails
+        except:
+
+            # info for user
+            print "An exception occurred:",sys.exc_info()[0],sys.exc_info()[1]
+            print "No link was uploaded."
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/OracleInterface.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/OracleInterface.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e57728c882cb862674e688fc03d2d2971e7e7e9
--- /dev/null
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/OracleInterface.py
@@ -0,0 +1,722 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+#
+# Author: Ben Smart (bsmart@cern.ch)
+#
+
+import sys
+# needed for oracle database connection
+import cx_Oracle
+# for converting configuration dictionaries into strings (for storing in the database as CLOB) and vice versa
+import json
+
+# put all oracle interaction stuff in a single class
+class OracleInterface:
+    "An interface to the Oracle database for Menu-aware Monitoring."
+
+
+    def connect_to_oracle(self):
+
+        # connection information, to be replaced with something more secure!
+        USER =  'tklimk'
+        PASSWORD = 'IiUTYhtOYL956!'
+        DATASOURCE = 'devdb11'
+
+        # connect to the oracle database
+        self.conn = cx_Oracle.connect(USER, PASSWORD, DATASOURCE)
+        self.cursor = self.conn.cursor()
+
+
+    def disconnect_from_oracle(self):
+
+        # close oracle connection
+        self.conn.close()
+
+
+    def fetch(self, query, parameters_dict = {}):
+
+        # fetch results, based on a query, 
+        # optionally providing additional parameters as a dictionary,
+        # and return all results
+        return self.cursor.execute(query,parameters_dict).fetchall()
+
+
+    def insert(self, query, parameters_dict = {}):
+        
+        # insert a row,
+        # optionally providing additional parameters as a dictionary
+        self.cursor.execute(query,parameters_dict)
+        self.conn.commit()
+
+
+    def read_default_mck_id_from_db(self,athena_version):
+        # return mck_id for default config for Athena version athena_version
+        # if default does not exist, return -1
+        # if there are multiple defaults for this Athena version (this should not be the case), return -2 
+        # EDIT: I haven't got the constraint on this to work (yet), so for now it returns the first mck_id 
+
+        # construct query to search database for default config
+        query = """SELECT mck_table.mck_id \
+        FROM mck_table \
+        WHERE mck_table.mck_default = 1 \
+        AND mck_table.mck_athena_version = :ATHENA_VERSION"""
+        
+        # create dictionary of input parameter
+        parameters_dict = {}
+        parameters_dict['ATHENA_VERSION'] = athena_version
+    
+        # perform search and extract results
+        search_results = self.fetch(query, parameters_dict)
+
+        # check that there is a default
+        if len(search_results) == 0:
+            print "There is no default mck in the Oracle database for Athena", athena_version
+            return -1
+
+        # check that there is only one default
+        if len(search_results) > 1:
+            print "Something has gone horribly wrong with the Oracle database"
+            print "There are",len(search_results),"default mck numbers for Athena ", athena_version
+            for row in search_results:
+                print "default mck_id =",row[0]
+            print "Returning the first result",search_results[0][0]
+            #return -2
+
+        # extract default mck
+        default_mck = search_results[0][0]
+
+        # return the default mck_id for this Athena version
+        return default_mck
+
+
+    def upload_mck_and_smck(self, global_info):
+        # upload smck_info if they are not already present in the database
+        # upload and link mck if it is not already present in the database
+        # return mck_id and smck_ids
+
+        # get the tool/smck info
+        tool_list = global_info['MONITORING_TOOL_DICT']
+
+        # dict of new smck
+        smck_ids = []
+
+        # make new smck
+        for tool_key, tool_info in tool_list.iteritems():
+
+            # upload each smck, and add the smck_id to the list
+            # if the smck config already exists, then the existing smck_id will be returned
+            smck_ids.append(self.upload_smck(tool_info))
+
+        # check if mck already exists
+        mck_id = self.check_if_exact_mck_exists(smck_ids)
+
+        # if the mck does not already exist
+        if mck_id == -1:
+
+            # get the mck info
+            mck_info = global_info['MCK']
+
+            # make a new mck
+            mck_id = self.upload_mck(mck_info)
+
+            # now link the mck to the smck
+            for smck_id in smck_ids:
+
+                # create link
+                self.upload_mck_smck_link(mck_id,smck_id)
+
+        # return mck_id and smck_ids
+        return mck_id, smck_ids
+
+
+    def upload_mck(self,mck_info):
+        # upload new mck
+        # return new mck_id
+        # new values will be inserted into mck_info for future access if so desired
+
+        # prepare insert data
+        insert = {}
+        for key in mck_info.keys():
+            insert[key] = ":"+key
+        insert['MCK_ID'] = "seq_mck_table_id.NEXTVAL" # from sequence
+        insert['MCK_CREATION_DATE'] = "CURRENT_TIMESTAMP"
+
+        # construct input strings for the query
+        key_str = ", ".join(insert.keys())
+        val_str = ", ".join(insert.values())
+
+        # construct the query
+        # new values will be inserted into mck_info for future access if so desired
+        query = "INSERT INTO mck_table (%s) VALUES (%s) RETURNING mck_id, mck_creation_date into :MCK_ID, :MCK_CREATION_DATE" % (key_str, val_str)
+
+        # prepare the mck_info
+        # we must ensure that mck_info variables are of the correct object-type to accept the new mck data
+        mck_info['MCK_ID'] = self.cursor.var(cx_Oracle.NUMBER)
+        mck_info['MCK_CREATION_DATE'] = self.cursor.var(cx_Oracle.TIMESTAMP)
+
+        # insert the mck data, and get the new mck_id
+        self.insert(query,mck_info)
+
+        # now we need to add the new data back into mck_info in useful formats
+        mck_info['MCK_ID'] = mck_info['MCK_ID'].getvalue()
+        mck_info['MCK_CREATION_DATE'] = mck_info['MCK_CREATION_DATE'].getvalue()
+
+        # return the new mck_id
+        return mck_info['MCK_ID']
+
+
+    def upload_smck(self,smck_info):
+        # check if smck_info is already in database
+        # if so, return existing smck_id
+        # else upload smck_info and return new smck_id
+
+        # check if smck_info is already in database
+        smck_id = self.check_if_smck_exists(smck_info)
+
+        # if it is not in the database
+        if smck_id == -1:
+
+            # get new smck_tool_patch_version
+            smck_info['SMCK_TOOL_PATCH_VERSION'] = self.get_next_smck_tool_patch_version(smck_info['SMCK_TOOL_TYPE'])
+
+            # prepare insert data
+            insert = {}
+            for key in smck_info.keys():
+                #if key != 'SMCK_CONFIG':
+                    insert[key] = ":"+key
+
+            # prepare insert data for things that are set by the server
+            insert['SMCK_ID'] = "seq_smck_table_id.NEXTVAL" # from sequence
+            insert['SMCK_CREATION_DATE'] = "CURRENT_TIMESTAMP"
+
+            # construct input strings for the query
+            key_str = ", ".join(insert.keys())
+            val_str = ", ".join(insert.values())
+
+            # construct the query
+            # new values will be inserted into smck_info for future access if so desired
+            query = "INSERT INTO smck_table (%s) VALUES (%s) RETURNING smck_id, smck_creation_date into :SMCK_ID, :SMCK_CREATION_DATE" % (key_str, val_str)
+
+            # prepare the smck_info
+            # we must ensure that smck_info variables are of the correct object-type to accept the new smck data
+            smck_info['SMCK_ID'] = self.cursor.var(cx_Oracle.NUMBER)
+            smck_info['SMCK_CREATION_DATE'] = self.cursor.var(cx_Oracle.TIMESTAMP)
+
+            # we must build an smck_info_to_submit, 
+            # as smck_info contains a dict for smck_config, 
+            # but we need to insert a json string
+            smck_info_to_submit = smck_info.copy()
+            smck_info_to_submit['SMCK_CONFIG'] = json.dumps(smck_info['SMCK_CONFIG'], ensure_ascii=True, sort_keys=True)
+
+            # insert the smck data, and get the new smck_id
+            self.insert(query,smck_info_to_submit)
+
+            # now we need to add the new data back into smck_info in useful formats
+            smck_info['SMCK_ID'] = int(smck_info_to_submit['SMCK_ID'].getvalue())
+            smck_info['SMCK_CREATION_DATE'] = smck_info_to_submit['SMCK_CREATION_DATE'].getvalue()
+
+            # return the new smck_id
+            return smck_info['SMCK_ID']
+            
+        # if the smck_info is already in database
+        else:
+
+            # get existing smck_info
+            existing_smck_info = self.read_smck_info_from_db(smck_id)
+
+            # fill with the existing values
+            #smck_info = existing_smck_info # why doesn't this work?
+            smck_info['SMCK_ID'] = smck_id
+            smck_info['SMCK_TOOL_PATCH_VERSION'] = existing_smck_info['SMCK_TOOL_PATCH_VERSION']
+
+            # return existing smck_id
+            return smck_id
+
+
+    def upload_mck_smck_link(self,mck_id,smck_id):
+        # create link between mck_id and smck_id
+
+        # make query to insert link
+        query = """ INSERT INTO mck_to_smck_link \
+        (link_mck, link_smck) VALUES (:MCK_ID, :SMCK_ID)"""
+
+        # create dictionary of input parameter
+        parameters_dict = {}
+        parameters_dict['MCK_ID'] = mck_id
+        parameters_dict['SMCK_ID'] = smck_id
+
+        # insert this into the database
+        self.insert(query,parameters_dict)
+
+
+    def check_if_exact_mck_exists(self,smck_ids):
+        # check if exactly this mck exists (if an mck exists with exactly the same smck links)
+        # if not, return -1
+
+        # check if mck already exists
+        search_results = self.check_if_mck_exists(smck_ids)
+
+        # check if there is an exact match
+        # if not, leave mck_id = -1
+        mck_id = -1
+
+        # first check if links were found
+        if search_results != -1:
+
+            # loop over possible matches
+            for row_mck_id in search_results:
+
+                # check if this is an exact match
+                if len(self.read_mck_links_from_db(row_mck_id)) == len(smck_ids):
+
+                    # if so, this is the existing mck
+                    mck_id = row_mck_id
+                    break
+
+        # return mck_id
+        return mck_id
+
+
+    def check_if_mck_exists(self,smck_ids):
+        # check if an mck exists that links to all the smck in the input smck_ids list
+        # if it does, return the search results (list of mck_id), otherwise return -1
+        # note: multiple mck_id may be found
+
+        # construct a query with a dummy 1=1 so that all subsequent lines start with AND
+        query = "SELECT mck_table.mck_id FROM mck_table WHERE 1=1 "
+        
+        # for each smck_id, find mck that link to that smck
+        # returned mck_id must link to all smck
+        for smck_id in smck_ids:
+            
+            # add a sub-query for each smck
+            query += "AND mck_table.mck_id IN (SELECT mck_to_smck_link.link_mck FROM mck_to_smck_link WHERE mck_to_smck_link.link_smck = "+str(smck_id)+" ) " 
+
+        # search with this query
+        search_results = self.fetch(query)
+
+        # if there are results, return them (mck_id)
+        # otherwise return -1
+        if len(search_results) > 0:
+            # return list of mck_id
+            # need to use read_mck_links_from_db(mck_id) on each mck_id to check if that mck is an exact match (links to the same, ie. same number of, smck as in the input smck_ids)
+            return_list = []
+            for row in search_results:
+                return_list.append(row[0])
+            return return_list
+        else:
+            return -1
+
+
+    def check_if_smck_exists(self,smck_info):
+        # check if an existing smck contains the same config as the input smck_info
+        # if so, return the existing smck_id
+        # else return -1
+
+        # construct a query
+        query = """SELECT smck_table.smck_id FROM smck_table \
+        WHERE smck_table.smck_config_hash = :SMCK_CONFIG_HASH \
+        AND   smck_table.smck_tool_type = :SMCK_TOOL_TYPE \
+        AND   smck_table.smck_processing_step = :SMCK_PROCESSING_STEP """
+
+        # construct a dict for the input parameter (smck_config)
+        parameters_dict = {}
+        #parameters_dict['SMCK_CONFIG'] = json.dumps(smck_info['SMCK_CONFIG'], ensure_ascii=True, sort_keys=True)
+        parameters_dict['SMCK_CONFIG_HASH'] = smck_info['SMCK_CONFIG_HASH']
+        parameters_dict['SMCK_TOOL_TYPE'] = smck_info['SMCK_TOOL_TYPE']
+        parameters_dict['SMCK_PROCESSING_STEP'] = smck_info['SMCK_PROCESSING_STEP']
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+        
+        # if there are results, return them (smck_id)
+        # otherwise return -1
+        if len(search_results) > 0:
+            # in the case that there are multiple smck_id (there shouldn't be), then we just return the first
+            if len(search_results) > 1:
+                print "More than 1 smck_id found that matches input smck_info:"
+                print "SMCK_TOOL_TYPE: ", smck_info['SMCK_TOOL_TYPE']
+                print "SMCK_PROCESSING_STEP: ", smck_info['SMCK_PROCESSING_STEP']
+                print "SMCK_CONFIG: ", smck_info['SMCK_CONFIG']
+                print "SMCK_CONFIG_HASH: ", smck_info['SMCK_CONFIG_HASH']
+                for row in search_results:
+                    print "SMCK_ID: ", row[0]
+            return search_results[0][0]
+        else:
+            return -1
+
+
+    def read_mck_links_from_db(self,mck_id):
+        # return list of smck linked to this mck
+
+        # construct the query
+        query = """SELECT mck_to_smck_link.link_smck FROM mck_to_smck_link \
+        WHERE mck_to_smck_link.link_mck = :MCK_ID"""
+
+        # construct the dict of the input mck_id
+        parameters_dict = {}
+        parameters_dict['MCK_ID'] = mck_id
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+
+        # if there are results, return them (smck_id)
+        # otherwise return -1
+        if len(search_results) > 0:
+            # return list of smck_id
+            return_list = []
+            for row in search_results:
+                return_list.append(row[0])
+            return return_list
+        else:
+            return -1
+
+
+    def read_mck_info_from_db(self,mck_id):
+        # return mck_info of mck with mck_id
+
+        # construct the query
+        query = """SELECT * FROM mck_table \
+        WHERE mck_table.mck_id = :MCK_ID"""
+
+        # construct the dict of the input mck_id
+        parameters_dict = {}
+        parameters_dict['MCK_ID'] = mck_id
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+
+        # if there are results, return them 
+        # otherwise return -1
+        if len(search_results) > 0:
+
+            # create mck_info with returned data
+            mck_info = {}
+            mck_info['MCK_ID'] =             search_results[0][0]
+            mck_info['MCK_DEFAULT'] =        search_results[0][1]            
+            mck_info['MCK_ATHENA_VERSION'] = search_results[0][2]
+            mck_info['MCK_CREATOR'] =        search_results[0][3]
+            mck_info['MCK_CREATION_DATE'] =  search_results[0][4]
+            mck_info['MCK_COMMENT'] =        search_results[0][5]
+
+            # return filled mck_info
+            return mck_info
+        
+        else:
+            return -1
+
+
+    def read_smck_info_from_db(self,smck_id):
+        # return smck_info of smck with smck_id
+
+        # construct the query
+        query = """SELECT * FROM smck_table \
+        WHERE smck_table.smck_id = :SMCK_ID"""
+
+        # construct the dict of the input mck_id
+        parameters_dict = {}
+        parameters_dict['SMCK_ID'] = smck_id
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+
+        # if there are results, return them 
+        # otherwise return -1
+        if len(search_results) > 0:
+
+            # create smck_info with returned data
+            smck_info = {}
+            smck_info['SMCK_ID'] =                 search_results[0][0]
+            smck_info['SMCK_SLICE_TYPE'] =         search_results[0][1]
+            smck_info['SMCK_TOOL_TYPE'] =          search_results[0][2]
+            smck_info['SMCK_TOOL_PATCH_VERSION'] = search_results[0][3]
+            smck_info['SMCK_PROCESSING_STEP'] =    search_results[0][4]
+            smck_info['SMCK_CONFIG'] =             json.loads(search_results[0][5].read()) # read() required to extract CLOB data
+            smck_info['SMCK_CONFIG_HASH'] =        search_results[0][6]
+            smck_info['SMCK_DEFAULT'] =            search_results[0][7]
+            smck_info['SMCK_ATHENA_VERSION'] =     search_results[0][8]
+            smck_info['SMCK_CREATOR'] =            search_results[0][9]
+            smck_info['SMCK_CREATION_DATE'] =      search_results[0][10]
+            smck_info['SMCK_COMMENT'] =            search_results[0][11]
+
+            # return filled smck_info
+            return smck_info
+        
+        else:
+            return -1
+
+
+    def get_smck_id_from_smck_tool_patch_version(self,smck_tool_patch_version):
+        # given an smck_tool_patch_version, return the smck_id
+
+        # construct the query
+        query = """SELECT smck_table.smck_id FROM smck_table \
+        WHERE smck_table.smck_tool_patch_version = :SMCK_TOOL_PATCH_VERSION"""
+
+        # construct the dict of the input smck_tool_patch_version
+        parameters_dict = {}
+        parameters_dict['SMCK_TOOL_PATCH_VERSION'] = smck_tool_patch_version
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+
+        # if there are results, return them (smck_id)
+        # otherwise return -1
+        if len(search_results) > 0:
+            # return the smck_id
+            return search_results[0][0]
+        else:
+            return -1
+
+
+    def get_smck_tool_patch_version_from_smck_id(self,smck_id):
+        # given an smck_id, return the smck_tool_patch_version
+
+        # construct the query
+        query = """SELECT smck_table.smck_tool_patch_version FROM smck_table \
+        WHERE smck_table.smck_id = :SMCK_ID"""
+
+        # construct the dict of the input smck_id
+        parameters_dict = {}
+        parameters_dict['SMCK_ID'] = smck_id
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+
+        # if there are results, return them (smck_tool_patch_version)
+        # otherwise return -1
+        if len(search_results) > 0:
+            # return the smck_tool_patch_version
+            return search_results[0][0]
+        else:
+            return -1
+
+
+    def get_next_smck_tool_patch_version(self,input_tool_type):
+
+        # construct a query to find all smck_tool_patch_version of this input_tool_type
+        query = """SELECT smck_table.smck_tool_patch_version \
+        FROM smck_table \
+        WHERE smck_table.smck_tool_type=:INPUT_TOOL_TYPE \
+        AND ROWNUM <= 1 \
+        ORDER BY smck_table.smck_id DESC """
+
+        # construct the dict of the input smck_id
+        parameters_dict = {}
+        parameters_dict['INPUT_TOOL_TYPE'] = input_tool_type
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+
+        # get largest smck_tool_patch_version
+        largest_smck_tool_patch_version = -1
+        for row in search_results:
+            largest_smck_tool_patch_version = row[0]
+
+        # make new smck_tool_patch_version
+        if largest_smck_tool_patch_version < 0:
+            return input_tool_type+"_1"
+        else:
+            version_number = int(largest_smck_tool_patch_version.split("_")[1])
+            return input_tool_type+"_"+str(version_number+1)
+
+
+    def get_db_tables_and_columns(self):
+
+        # get data on all columns, and associated table names and column data
+        # construct a query
+        query = "SELECT table_name, column_name, data_type, data_length FROM USER_TAB_COLUMNS"
+
+        # perform the search
+        search_results = self.fetch(query)
+
+        # return a list of dictionaries
+        return_list = []
+
+        # loop over rows in search results
+        for row in search_results:
+
+            # construct a dictionary of the results
+            row_dict = {}
+            row_dict['TABLE_NAME'] = row[0].upper()
+            row_dict['COLUMN_NAME'] = row[1].upper()
+            row_dict['DATA_TYPE'] = row[2].upper()
+            row_dict['DATA_LENGTH'] = row[3]
+
+            # add this dictionary to the return list
+            return_list.append(row_dict)
+
+        # return the return list
+        return return_list
+
+
+    def column_search(self,input1,table_name,column_name):
+
+        # search in table table_name for entries/rows where column_name=input1
+        # return a list of dictionaries (one dict per row)
+
+        # list to return
+        return_list = []
+
+        # first we get the schema
+        database_schema = self.get_db_tables_and_columns()
+
+        # test that the table and column names are valid
+        table_and_column_match = False
+        for row in database_schema:
+
+            # check if the table and column match
+            if row['TABLE_NAME'] == table_name.upper() and row['COLUMN_NAME'] == column_name.upper():
+
+                # we have a match
+                table_and_column_match = True
+                break
+
+        # if there was no match
+        if not table_and_column_match:
+
+            # no match, so return the empty list
+            return return_list
+            
+        # now we construct a query
+        query = "SELECT * FROM "+table_name+" WHERE "+column_name+" = :INPUT1"
+
+        # construct the dict of the various input
+        parameters_dict = {}
+        parameters_dict['INPUT1'] = input1
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+
+        # get list of columns in this table
+        column_list = []
+        for schema_row in database_schema:
+
+            # if this row belongs to the desired table
+            if schema_row['TABLE_NAME'] == table_name:
+
+                # then we add this column name to our list
+                # we need to add it to the start of the list (insert before element 0), 
+                # as the database table/column search function returns the results in reversed order
+                column_list.insert(0,schema_row['COLUMN_NAME'])
+
+        # loop over results
+        for result_row in search_results:
+
+            # the length of column_list should be the same as the length of result_row
+            if len(column_list) != len(result_row):
+
+                # something has gone very wrong
+                print "ERROR in OracleInterface.column_search(",input1,",",table_name,",",column_name,")"
+                print "column_list =",column_list
+                print "result_row =",result_row
+                return return_list
+
+            # we can zip up column_list and result_row to make a dictionary
+            row_dict = dict( zip( column_list, result_row) )
+
+            # need to check if this is an smck,
+            # in which case we need to turn the json CLOB into a dict
+            if table_name == 'SMCK_TABLE':
+                
+                # first we read out the CLOB
+                # then we turn the json string into a dict
+                row_dict['SMCK_CONFIG'] = json.loads(row_dict['SMCK_CONFIG'].read())
+
+            # add this dict to the return list
+            return_list.append(row_dict)
+                
+        # now our list of dictionaries should be complete, so we return it
+        return return_list
+
+
+    def check_if_smk_link_exists(self,smk):
+        # if this smk has been linked to an mck, then return that mck, otherwise return -1
+
+        # construct the query
+        query = """SELECT smk_link_mck FROM mck_to_smk_link \
+        WHERE mck_to_smk_link.smk_link_smk = :SMK"""
+
+        # construct the dict of the input mck_id and smk
+        parameters_dict = {}
+        parameters_dict['SMK'] = smk
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+
+        # if there are results, return them
+        # otherwise return -1
+        if len(search_results) > 0:
+            # we have found a match, so return the mck
+            return search_results[0][0]
+        else:
+            # no match has been found, so return -1
+            return -1
+
+
+
+    def check_if_mck_to_smk_link_exists(self,mck_id,smk):
+        # return True of False, depending on whether this exact mck-smk link exists
+
+        # construct the query
+        query = """SELECT * FROM mck_to_smk_link \
+        WHERE mck_to_smk_link.smk_link_mck = :MCK_ID \
+        AND mck_to_smk_link.smk_link_smk = :SMK"""
+
+        # construct the dict of the input mck_id and smk
+        parameters_dict = {}
+        parameters_dict['MCK_ID'] = mck_id
+        parameters_dict['SMK'] = smk
+
+        # perform the search
+        search_results = self.fetch(query,parameters_dict)
+
+        # if there are results, return True
+        # otherwise return False
+        if len(search_results) > 0:
+            # we have found a match, so return True
+            return True
+        else:
+            # no match has been found, so return False
+            return False
+
+
+    def upload_mck_to_smk_link(self,mck_id,smk,creator,comment=""):
+        # create link between mck_id and smk
+
+        # make query to insert link
+        query = """ INSERT INTO mck_to_smk_link \
+        (smk_link_mck, smk_link_smk, smk_link_creator, smk_link_creation_date, smk_link_comment) \
+        VALUES (:MCK_ID, :SMK, :CREATOR, CURRENT_TIMESTAMP, :USER_COMMENT)"""
+
+        # create dictionary of input parameter
+        parameters_dict = {}
+        parameters_dict['MCK_ID'] = mck_id
+        parameters_dict['SMK'] = smk
+        parameters_dict['CREATOR'] = creator
+        parameters_dict['USER_COMMENT'] = comment
+
+        # insert this into the database
+        self.insert(query,parameters_dict)
+
+
+    def get_all_mck_to_smk_links(self):
+
+        # construct a query to find all mck_to_smk links, ordered by smk
+        query = """SELECT * \
+        FROM mck_to_smk_link \
+        ORDER BY mck_to_smk_link.smk_link_smk DESC """
+
+        # perform the search
+        search_results = self.fetch(query)
+
+        # for each mck_to_smk link, add a dict to the return list
+        return_list = []
+        for row in search_results:
+            this_dict = {}
+            this_dict['MCK'] = row[0]
+            this_dict['SMK'] = row[1]
+            return_list.append(this_dict)
+
+        # return the list of link dicts
+        return return_list
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/PackagesToInterrogate.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/PackagesToInterrogate.py
new file mode 100644
index 0000000000000000000000000000000000000000..b64ce2ca180195ce49c8cc789a7f9031d559baa7
--- /dev/null
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/PackagesToInterrogate.py
@@ -0,0 +1,28 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+#
+# Author: Ben Smart (bsmart@cern.ch)
+#
+
+# put all storage of information on tool package names etc. in a single class
+class PackagesToInterrogate:
+
+    PackagesToInterrogate_test = {
+        "Muon"        : { "PackageName" : "TrigMuonMonitoring.TrigMuonMonitoringConfig",       "ToolName" : "TrigMuonMonitoringTool",      "Dll" : "TrigMuonMonitoring"}
+    }
+
+    PackagesToInterrogate = {
+        "General"     : { "PackageName" : "TrigHLTMonitoring.TrigHLTMonitoringConfig",         "ToolName" : "HLTGeneralTool",              "Dll" : "TrigHLTMonitoring"},
+        "Bjet"        : { "PackageName" : "TrigBjetMonitoring.TrigBjetMonitoringConfig",       "ToolName" : "TrigBjetMonitoringConfig",    "Dll" : "TrigBjetMonitoring"},
+        "Bphys"       : { "PackageName" : "TrigBphysMonitoring.TrigBphysMonitoringConfig",     "ToolName" : "TrigBphysMonitoringTool",     "Dll" : "TrigBphysMonitoring"}, 
+        "Calo"        : { "PackageName" : "TrigCaloMonitoring.TrigCaloMonitoringConfig",       "ToolName" : "HLTCaloMonitoringTool",       "Dll" : "TrigCaloMonitoring"}, 
+        "Egamma"      : { "PackageName" : "TrigEgammaMonitoring.TrigEgammaMonitoringConfig",   "ToolName" : "HLTEgammaMonitoringTool",     "Dll" : "TrigEgammaMonitoring"}, 
+        "IDJpsi"      : { "PackageName" : "TrigIDJpsiMonitoring.TrigIDJpsiMonitoringConfig",   "ToolName" : "TrigIDJpsiMonitoringTool",    "Dll" : "TrigIDJpsiMonitoring"}, 
+        "IDtrk"       : { "PackageName" : "TrigIDtrkMonitoring.TrigIDtrkMonitoringConfig",     "ToolName" : "TrigIDtrkMonitoringTool",     "Dll" : "TrigInDetAnalysisExample"}, 
+        "Jet"         : { "PackageName" : "TrigJetMonitoring.TrigJetMonitoringConfig",         "ToolName" : "TrigJetMonitoringTool",       "Dll" : "TrigJetMonitoring"}, 
+        "MET"         : { "PackageName" : "TrigMETMonitoring.TrigMETMonitoringConfig",         "ToolName" : "HLTMETMonitoringTool",        "Dll" : "TrigMETMonitoring"}, 
+        "MinBias"     : { "PackageName" : "TrigMinBiasMonitoring.TrigMinBiasMonitoringConfig", "ToolName" : "TrigMinBiasMonitoringTool",   "Dll" : "TrigMinBiasMonitoring"}, 
+        "Muon"        : { "PackageName" : "TrigMuonMonitoring.TrigMuonMonitoringConfig",       "ToolName" : "TrigMuonMonitoringTool",      "Dll" : "TrigMuonMonitoring"}, 
+        "Tau"         : { "PackageName" : "TrigTauMonitoring.TrigTauMonitoringConfig",         "ToolName" : "TrigTauMonitoringTool",       "Dll" : "TrigTauMonitoring"},
+        "Egamma_Dump" : { "PackageName" : "TrigEgammaMonitoring.TrigEgammaMonitoringConfig",   "ToolName" : "HLTEgammaMonitoringDumpTool", "Dll" : "TrigEgammaMonitoring"}
+    }
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/python/ToolInterrogator.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/ToolInterrogator.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f42beac22f859122de268a579dc0c55f2f1f92a
--- /dev/null
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/python/ToolInterrogator.py
@@ -0,0 +1,233 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+#
+# Author: Ben Smart (bsmart@cern.ch)
+#
+
+import sys
+# needed for getting package names etc. of tools
+from TrigHLTMonitoring.PackagesToInterrogate import PackagesToInterrogate
+# needed to access the tools in ToolSvc
+from AthenaCommon.AppMgr import ToolSvc
+#import json to dump and load each smck_config, so that any odd characters are consistently encoded in the output smck_config dictionary
+import json
+
+# put all local-package interrogation tools in a single class
+class ToolInterrogator:
+    """A class to find locally running trigger-monitoring tools, and extract their configurations from them.
+    Also provides functionality to set up all trigger-monitoring tools, such that they can be interrogated."""
+
+
+    def __init__(self):
+        """Set up ToolInterrogator.
+        Get dictionary of trigger-monitoring tools from PackagesToInterrogate"""
+
+        # Packages to interrogate
+        self.packages_to_interrogate = PackagesToInterrogate()
+
+
+    def load_all_tools(self):
+        "Set up all trigger-monitoring tools, such that they can be interrogated."
+
+        # info for user
+        print "Now setting up all trigger-monitoring tools."
+        print ""
+        print "#"*10
+        print ""
+
+        # nasty hacks to fix problems in HLTMonTriggerList.py in Athena 20.1.5.8
+        #
+        # line 60, in config
+        # self.primary_single_ele_iso = egamma.primary_single_ele_iso
+        # AttributeError: 'module' object has no attribute 'primary_single_ele_iso'
+        #
+        # line 84, in config
+        # self.monitoring_muonEFFS = muon.monitoring_muonEFFS
+        # AttributeError: 'module' object has no attribute 'monitoring_muonEFFS'
+        #
+        # so we give these objects the offending attributes...
+        # import TrigEgammaMonitoring.TrigEgammaMonitCategory as egamma
+        # egamma.primary_single_ele_iso = ['e24_lhmedium_iloose_L1EM18VH','e24_lhmedium_iloose_L1EM20VH','e24_lhtight_iloose_L1EM20VH','e24_lhtight_iloose','e26_lhtight_iloose']
+        # egamma.primary_single_ele_cutbased_iso = ['e24_medium_iloose_L1EM18VH','e24_medium_iloose_L1EM20VH','e24_tight_iloose_L1EM20VH','e24_tight_iloose','e26_tight_iloose']
+        # import TrigMuonMonitoring.TrigMuonMonitCategory as muon
+        # muon.monitoring_muonEFFS = ['HLT_mu18_mu8noL1']
+
+        # load all tools in PackagesToInterrogate
+        for key, value in self.packages_to_interrogate.PackagesToInterrogate.iteritems():
+
+            # first import the tool from the package
+            exec "from %s import %s" % (value['PackageName'],value['ToolName'])
+
+            # then initialise the tool
+            exec "%s()" % (value['ToolName'])
+
+        # info for user
+        print ""
+        print "#"*10
+        print ""
+        print "All trigger-monitoring tools have now been set up."
+
+
+    def get_available_trigger_monitoring_tools(self):
+        "Get list of currently running trigger-monitoring tools"
+
+        # get list of all tools in ToolSvc
+        tool_list = ToolSvc.getAllChildren()
+
+        # list of monitoring tools
+        mon_tools = []
+
+        # loop over tools
+        for tool in tool_list:
+
+            # skip tools with names that will garble our commands
+            # (no TrigMonTools do this)
+            # specifically skip tools with scope resolution operators in their names (why do they have these here!?)
+            if "::" in tool.getName():
+                print "Skipping",tool.getName(),"as its name contains a scope resolution operator '::'."
+                continue
+
+            # find if Trig and Monitoring are in the tool Dll name
+            if "Trig" in tool.getDlls() and "Monitoring" in tool.getDlls():
+                mon_tools.append(tool.getName())
+
+            # of if the tool Dll name is TrigInDetAnalysisExample, as is the case for the IDtrk tools
+            if 'TrigInDetAnalysisExample' == tool.getDlls():
+                mon_tools.append(tool.getName())
+
+        # return the list of monitoring tools
+        return mon_tools
+
+
+    def check_if_valid_tool(self,tool_ToolSvc_name):
+        "Is this tool a locally running tool (is it in ToolSvc)."
+
+        # check that we have been given a valid name
+        if not hasattr(ToolSvc,tool_ToolSvc_name):
+            print "ToolSvc does not contain a tool named",tool_ToolSvc_name
+            return -1
+
+
+    def get_smck_config_from_ToolSvc_tool_name(self,tool_ToolSvc_name):
+        "Get the configuration and all other pertinent information related to a locally running trigger-monitoring tool."
+
+        # check if this is a valid tool
+        if self.check_if_valid_tool(tool_ToolSvc_name) == -1:
+            return -1
+
+        # get tool Dll
+        tool_Dll = ""
+        exec "tool_Dll = ToolSvc.%s.getDlls()" % (tool_ToolSvc_name)
+
+        # the slice_name, the key in packages_to_interrogate.PackagesToInterrogate
+        slice_name = ''
+
+        # the smck_config to fill and return
+        smck_config = {}
+
+        # we first need to check whether this is the HLTEgammaMon or HLTEgammaDump tool
+        if tool_ToolSvc_name == 'HLTEgammaMon':
+            slice_name = 'Egamma'
+        elif tool_ToolSvc_name == 'HLTEgammaDump':
+            slice_name = 'Egamma_Dump'
+        else:
+            for key, value in self.packages_to_interrogate.PackagesToInterrogate.iteritems():
+                if value['Dll'] == tool_Dll:
+                    slice_name = key
+
+        # check if slice has been found or not
+        if slice_name == '':
+
+            #if not, return -1
+            print "Additional info not found for ToolSvc tool ", tool_ToolSvc_name
+            return -1
+
+        else:
+
+            # fill the smck_config
+            smck_config['ToolSvcName'] = tool_ToolSvc_name
+            smck_config['PackageName'] = self.packages_to_interrogate.PackagesToInterrogate[slice_name]['PackageName']
+            smck_config['ToolName'] =    self.packages_to_interrogate.PackagesToInterrogate[slice_name]['ToolName']
+            smck_config['ToolInfo'] =    self.interrogate(tool_ToolSvc_name)
+            smck_config['SliceType'] =   slice_name
+
+            # json dump and load, 
+            # so that any odd characters are consistently encoded in the output smck_config dictionary
+            smck_config = json.loads( json.dumps(smck_config, ensure_ascii=True, sort_keys=True) )
+
+            # return filled smck_config
+            return smck_config
+
+
+    def interrogate(self,tool_ToolSvc_name):
+        "Extract the configuration from a locally running trigger-monitoring tool."
+
+        # check if this is a valid tool
+        if self.check_if_valid_tool(tool_ToolSvc_name) == -1:
+            return -1
+
+        # now we begin interrogating the tool...
+            
+        # This is the dictionary where we will store all our good gathered information
+        tool_info = {}
+    
+        # first we get its properties
+        tool_properties = ""
+        exec "tool_properties = ToolSvc.%s.properties()" % (tool_ToolSvc_name)
+    
+        # we also get the property 'no value' string for this tool (to use later)
+        tool_novalue = ""
+        exec "tool_novalue = ToolSvc.%s.propertyNoValue" % (tool_ToolSvc_name)
+
+        # and we get the default property values
+        tool_default_properties = ""
+        exec "tool_default_properties = ToolSvc.%s.getDefaultProperties()" % (tool_ToolSvc_name)
+    
+        # then we check these properties
+        for prop,value in tool_properties.iteritems():
+        
+            # does the tool really have this property?
+            tool_property_truth = False
+            exec "tool_property_truth = hasattr(ToolSvc.%s,prop)" % (tool_ToolSvc_name)
+        
+            # if it does not...
+            # then the value has likely not been set
+            # ie. the default value is to be used
+            if tool_property_truth == False:
+            
+                # does this property hold a 'no value' value?
+                if value == tool_novalue:
+                
+                    # if so, use the default value
+                    if tool_default_properties.__contains__(prop):
+                        value = tool_default_properties[prop]
+                    else:
+                        continue
+
+                # else if this tool does not contain this property
+                # and this property is also not just currently set to 'no value'
+                # then skip this property
+                else:
+                    continue
+    
+            # test if this value is JSON serializable
+            try:
+
+                # json dump and load,
+                # if this is sucessful, then the value can be stored in the SQL database CLOB
+                # if it fails, then the try except block will catch this, and the value will not be included in data be be stored
+                throwaway_test_object = json.loads( json.dumps(value, ensure_ascii=True, sort_keys=True) )
+                                
+                # if we've made it this far
+                # then this information is good, and should be stored in our dictionary
+                tool_info[prop] = value
+
+            # if this value is not JSON serializable
+            except:
+
+                # do nothing, as we shall not store this value
+                continue
+
+        # return the info we have collected
+        return tool_info
+
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/share/HLTMonitoring_topOptions.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/share/HLTMonitoring_topOptions.py
index e7e0942b12fbb24f0270c28854ed95447fa6d379..9684c11c672eeec40df6dfda20f40bef9891acd4 100755
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/share/HLTMonitoring_topOptions.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/share/HLTMonitoring_topOptions.py
@@ -27,6 +27,8 @@ if DQMonFlags.monManEnvironment == 'tier0Raw':
   HLTMonFlags.doDump     = False
   HLTMonFlags.doOfflineTauTTP = False
   HLTMonFlags.doIDJpsiMon  = False
+  #HLTMonFlags.doMaM      = True # default is False
+  #HLTMonFlags.MCK        = -1 # default is -1
 elif DQMonFlags.monManEnvironment == 'tier0ESD':
   # we are in ESD->AOD step
   # run all tools *except* the following (these are run in RAW->ESD)
@@ -38,4 +40,6 @@ else :
   HLTMonFlags.doGeneral = False
   HLTMonFlags.doMonTier0 = False
 
+# temporarily disabling IDJpsiMon to deal with ATR-12037
+HLTMonFlags.doIDJpsiMon = False
 include( "TrigHLTMonitoring/addMonTools.py" )
diff --git a/Trigger/TrigMonitoring/TrigHLTMonitoring/share/addMonTools.py b/Trigger/TrigMonitoring/TrigHLTMonitoring/share/addMonTools.py
index dece58a94e6982c9f7cef2a0e40e7de53bc61b37..d5c5cdd854cbca6edd34b7cbbe2d824dd6a04f10 100644
--- a/Trigger/TrigMonitoring/TrigHLTMonitoring/share/addMonTools.py
+++ b/Trigger/TrigMonitoring/TrigHLTMonitoring/share/addMonTools.py
@@ -158,6 +158,65 @@ if HLTMonFlags.doDump:
       print "Problems with OfflineTauTTP, tool not enabled"
 ############################################
 
+########## Menu-aware Monitoring ###########
+
+if HLTMonFlags.doMaM:
+
+    # set up menu-aware monitoring
+    from TrigHLTMonitoring.MenuAwareMonitoring import MenuAwareMonitoring
+    mam = MenuAwareMonitoring()
+
+    # if a specific Monitoring Configuration Key (MCK) has been set, then use it
+    if HLTMonFlags.MCK.StoredValue > 0:
+
+        print "Using trigger Monitoring Configuration Key (MCK)",HLTMonFlags.MCK.StoredValue
+        mam.apply_mck( HLTMonFlags.MCK.StoredValue )
+
+    # if HLTMonFlags.MCK is -1 (the default) then we pick the MCK based on the SMK
+    if HLTMonFlags.MCK.StoredValue == -1:
+
+        # MaM will translate the SMK into an MCK, and apply it
+        from RecExConfig.InputFilePeeker import inputFileSummary
+        #print "inputFileSummary =",inputFileSummary
+        if inputFileSummary.__contains__('bs_metadata'):
+            # get the run number and lumi_block for the input
+            run_number = inputFileSummary['bs_metadata']['run_number']
+            lumi_block = inputFileSummary['bs_metadata']['LumiBlock']
+            runiov = (int(run_number)<<32) + int(lumi_block)
+
+            # get the database instance for getting the SMK
+            from IOVDbSvc.IOVDbSvcConf import IOVDbSvc
+            #print "svcMgr.IOVDbSvc.properties() =",svcMgr.IOVDbSvc.properties()
+            DBInstance = svcMgr.IOVDbSvc.properties()['DBInstance']
+
+            # try to connect to the COOL database
+            from PyCool import cool
+            from CoolConvUtilities.AtlCoolLib import indirectOpen
+            connstring = "COOLONL_TRIGGER/"+str(DBInstance)
+            trigDB=indirectOpen(connstring,oracle='True')
+            if trigDB is None:
+                print "Unable to connect to",connstring
+            else:
+                # get the SMK out of COOL
+                folder=trigDB.getFolder('/TRIGGER/HLT/HltConfigKeys')
+                retrieved_obj=folder.findObject(runiov,0)
+                retrieved_payload=retrieved_obj.payload()
+                retrieved_format=retrieved_payload['MasterConfigurationKey']
+                SuperMasterKey = int(retrieved_format)
+
+                print "SuperMasterKey =",SuperMasterKey
+
+                # We now have the required input info. Use mam to get the appropriate MCK
+                HLTMonFlags.MCK.StoredValue = mam.get_mck_id_from_smk(SuperMasterKey)
+                print "Have used Menu-aware monitoring to extract MCK",HLTMonFlags.MCK.StoredValue,"linked to SMK",SuperMasterKey
+
+                # If the MCK is > 0 then apply it, otherwise use the default tool configurations
+                if HLTMonFlags.MCK.StoredValue > 0:
+                    print "Using trigger Monitoring Configuration Key (MCK)",HLTMonFlags.MCK.StoredValue
+                    mam.apply_mck( HLTMonFlags.MCK.StoredValue )
+
+############################################
+
 HLTMonManager.FileKey = "GLOBAL"
 
 print HLTMonManager;