diff --git a/Tools/PyUtils/bin/meta-reader.py b/Tools/PyUtils/bin/meta-reader.py
index 4cc96927c85d86d2ce8e4910c47b75fb4ac76775..32a7388e5e64ffbbe334981d2cb931dacd0681ce 100755
--- a/Tools/PyUtils/bin/meta-reader.py
+++ b/Tools/PyUtils/bin/meta-reader.py
@@ -1,4 +1,5 @@
 #!/usr/bin/env python
+# -*- coding: utf-8 -*-
 # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
 # This script reads metadata from a given file
 
@@ -12,112 +13,149 @@ msg = logging.getLogger('MetaReader')
 
 from PyUtils.MetaReader import read_metadata
 
-def __pretty_print(content, indent=4, fd=sys.stdout, level=0):
-#    print(type(content))
-    if isinstance(content, dict):
-        for key, value in content.items():
-            print >> fd,' ' * indent  * level + str(key)
-            __pretty_print(value, indent, fd, level + 1)
 
-    elif isinstance(content, list):
-        for value in content:
-            __pretty_print(value, indent, fd, level + 1)
+def __tree_print(content, indent = 2, pad = 0, list_max_items = -1, dict_sort=None):
+	s = ''
 
-    elif isinstance(content, tuple):
+	if isinstance(content, dict):
 
-        for value in content:
-            __pretty_print(value, indent, fd, level + 1)
-        
-    else:
-        print >> fd, ' ' * indent * (level + 1) + str(content)
+		if dict_sort == 'key':
+			items = list(sorted(content.items(), key = lambda t: t[0]))
+		elif dict_sort == 'value':
+			items = list(sorted(content.items(), key =  lambda t: t[1]))
+		else:
+			items = list(content.items())
 
+		items_count = len(items)
+		for i in range(items_count):
 
-def __format_output(metadata):
-    for key, value in metadata.items():
-        print('{key:>15}: {value}'.format(key = key, value = value))
+			key, value = items[i]
+
+			last = i == items_count - 1
+
+			if pad == 0:
+				skey = str(key)
+			elif pad < 0:
+				skey = str(key).rjust(-pad)
+			else:
+				skey = str(key).ljust(pad)
+
+			s += ('├' if not last else '└') + '─' * indent + ' ' + skey + ': '
+
+			lines = __tree_print(value, indent=indent, pad = pad, dict_sort = dict_sort, list_max_items = list_max_items).split('\n')
+
+			if len(lines) == 1:
+				s += lines[0] + '\n'
+			else:
+				for line in lines:
+					if line.strip():
+						s += '\n' + ('│' if not last else ' ') + ' ' * indent + ' ' + str(line)
+
+				s += '\n'
+	elif isinstance(content, (list, tuple)) and list_max_items >= 0 and len(content) > list_max_items:
+
+		items = list(content)
+		items_count = len(items)
+
+		for i in range(items_count):
+			value = items[i]
+
+			last = i == items_count - 1
+
+			s += ('├' if not last else '└') + '─' * indent + ' ' + str(value) + '\n'
+	else:
+		s += str(content)
+
+	return s
 
 
 def __main():
-    # Parsing the arguments provided by user
-    parser = argparse.ArgumentParser(description='This script reads metadata from a given file')
-    parser.add_argument('filenames',
-                        nargs = '+',
-                        help= 'The filenames to read. User can provide a single file or a list of files.')
-    parser.add_argument('-v',
-                        '--verbose',
-                        action='store_true',
-                        help='print detailed output on screen')
-    parser.add_argument('-o',
-                        '--output',
-                        metavar='FILE',
-                        default=None,
-                        help="Saves the output in a file. By default, the output is written on the screen (stdout) in a prettier format for better readabiilty.")
-    parser.add_argument('--json',
-                        action='store_true',
-                        help="Sets the output file format as json.")
-    parser.add_argument('--indent',
-                        metavar='N',
-                        type=int,
-                        default=2,
-                        help="Sets the indent spaces in the output either on screen (without -o flag) either on file (with -o flag). By default, uses two spaces as indent.")
-    parser.add_argument('-m',
-                        '--mode',
-                        default= 'lite',
-                        metavar='MODE',
-                        type=str,
-                        choices=['tiny', 'lite', 'full'],
-                        help="This flag provides the user capability to select the amount of metadata retrieved. There three options: "
-                             "tiny (only those values used in PyJobTransforms), "
-                             "lite (same output as dump-athfile) "
-                             "and full ( all  available data found) ")
-    parser.add_argument('-t',
-                        '--type',
-                        default= None,
-                        metavar='TYPE',
-                        type=str,
-                        choices=['POOL', 'BS'],
-                        help="The file type of the input filename. By default, it tries to determine itself the file type of the input.")
-    args = parser.parse_args()
-
-    verbose = args.verbose
-    filenames = args.filenames
-    output = args.output
-    is_json = args.json
-    indent = args.indent
-    mode = args.mode
-    file_type = args.type
-
-    msg.setLevel(logging.INFO if verbose else logging.WARNING)
-    # create a stream handler
-    handler = logging.StreamHandler()
-    handler.setLevel(logging.INFO if verbose else logging.WARNING)
-    # create a logging format
-    formatter = logging.Formatter('%(name)s                       %(levelname)s %(message)s')
-    handler.setFormatter(formatter)
-    # add the handlers to the logger
-    msg.addHandler(handler)
-
-    startTime = time.time()
-    msg.info('Imported headers in: {0} miliseconds'.format((time.time() - startTime) * 1e3)) 
-    msg.info('The output file is: {0}'.format(output))
-
-    metadata = read_metadata(filenames, file_type, mode= mode)
-    
-    if output is None:
-        if is_json:
-            print(json.dumps(metadata, indent=indent))
-        else:
-            __format_output(metadata)
-            # __pretty_print(metadata, indent=indent)
-    else:
-        if is_json:
-            with open(output, 'w') as fd:
-                print >> fd, json.dumps(metadata, indent=indent)
-        else:
-            with open(output, 'w') as fd:
-                __pretty_print(metadata, indent=indent, fd=fd)
-    
-    msg.info('Done!')
+	# Parsing the arguments provided by user
+	parser = argparse.ArgumentParser(description='This script reads metadata from a given file')
+	parser.add_argument('filenames',
+						nargs = '+',
+						help= 'The filenames to read. User can provide a single file or a list of files.')
+	parser.add_argument('-v',
+						'--verbose',
+						action='store_true',
+						help='print detailed output on screen')
+	parser.add_argument('-o',
+						'--output',
+						metavar='FILE',
+						default=None,
+						help="Saves the output in a file. By default, the output is written on the screen (stdout) in a prettier format for better readabiilty.")
+	parser.add_argument('--json',
+						action='store_true',
+						help="Sets the output file format as json.")
+	parser.add_argument('--indent',
+						metavar='N',
+						type=int,
+						default=2,
+						help="Sets the indent spaces in the output either on screen (without -o flag) either on file (with -o flag). By default, uses two spaces as indent.")
+	parser.add_argument('-m',
+						'--mode',
+						default= 'lite',
+						metavar='MODE',
+						type=str,
+						choices=['tiny', 'lite', 'full'],
+						help="This flag provides the user capability to select the amount of metadata retrieved. There three options: "
+							 "tiny (only those values used in PyJobTransforms), "
+							 "lite (same output as dump-athfile) "
+							 "and full ( all  available data found) ")
+	parser.add_argument('-t',
+						'--type',
+						default= None,
+						metavar='TYPE',
+						type=str,
+						choices=['POOL', 'BS'],
+						help="The file type of the input filename. By default, it tries to determine itself the file type of the input.")
+	args = parser.parse_args()
+
+	verbose = args.verbose
+	filenames = args.filenames
+	output = args.output
+	is_json = args.json
+	indent = args.indent
+	mode = args.mode
+	file_type = args.type
+
+	msg.setLevel(logging.INFO if verbose else logging.WARNING)
+	# create a stream handler
+	handler = logging.StreamHandler()
+	handler.setLevel(logging.INFO if verbose else logging.WARNING)
+	# create a logging format
+	formatter = logging.Formatter('%(name)s                       %(levelname)s %(message)s')
+	handler.setFormatter(formatter)
+	# add the handlers to the logger
+	msg.addHandler(handler)
+
+	startTime = time.time()
+	msg.info('Imported headers in: {0} miliseconds'.format((time.time() - startTime) * 1e3))
+	msg.info('The output file is: {0}'.format(output))
+
+	metadata = read_metadata(filenames, file_type, mode= mode)
+
+	if output is None:
+		if is_json:
+			print(json.dumps(metadata, indent= indent))
+		else:
+			print(__tree_print(metadata, indent= indent, pad= 18, dict_sort='key', list_max_items = 8))
+
+	else:
+		if is_json:
+			with open(output, 'w') as fd:
+				print >> fd, json.dumps(metadata, indent=indent)
+		else:
+			with open(output, 'w') as fd:
+				print >> fd, __tree_print(metadata, indent = indent, pad = 18, dict_sort = 'key', list_max_items = 8)
+
+	msg.info('Done!')
 
 if __name__ == '__main__':
-    __main()
+	__main()
+
+
+
+
+
+
diff --git a/Tools/PyUtils/python/MetaReader.py b/Tools/PyUtils/python/MetaReader.py
index ebea1ac30ee48e3016cb249b99dd792c1451582f..ff805952b0e1594d27fa4ac265247d0e04ec1054 100644
--- a/Tools/PyUtils/python/MetaReader.py
+++ b/Tools/PyUtils/python/MetaReader.py
@@ -12,9 +12,9 @@ from PyCool import coral
 def read_metadata(filenames, file_type=None, mode='lite'):
     """
     This tool is independent of Athena framework and returns the metadata from a given file.
-    :param filename: the input file from which metadata needs to be extracted.
+    :param filenames: the input file from which metadata needs to be extracted.
     :param file_type: the type of file. POOL or BS (bytestream: RAW, DRAW) files.
-    :param full: if true, will return all metadata associated with the filename. By default, is false and this will
+    :param mode: if true, will return all metadata associated with the filename. By default, is false and this will
     return a "lite" version which have only the following keys: 'file_guid', 'file_size', 'file_type', 'nentries'.
     :return: a dictionary of metadata for the given input file.
     """