From 20b626af6b7d6a06b4f37ee3543218798e4aa9ce Mon Sep 17 00:00:00 2001
From: Piotr Sowinski <piotr.sowinski@cern.ch>
Date: Thu, 8 Feb 2018 17:19:23 +0100
Subject: [PATCH 1/2] NXCALS-1467 Initial cleanup after Sonar checks

---
 .../nxcals/common/config/SparkContext.java    | 17 ++++++++++++++++
 .../common/config/SparkPropertiesConfig.java  | 13 ++++++------
 .../nxcals/common/spark/SparkUtils.java       | 16 ++++++++++++---
 .../cern/accsoft/nxcals/common/Schemas.java   |  6 +++---
 .../accsoft/nxcals/common/SystemFields.java   |  6 +++---
 .../avro/BytesToGenericRecordDecoder.java     |  6 +++---
 .../avro/GenericRecordToBytesEncoder.java     | 10 +++++++---
 .../common/concurrent/AutoCloseableLock.java  |  4 ++--
 .../common/converters/TimeConverterImpl.java  |  4 ++--
 .../common/domain/impl/EntityDataImpl.java    |  1 -
 .../common/domain/impl/ResourceDataImpl.java  |  2 --
 .../common/security/KerberosRelogin.java      |  6 +++---
 .../nxcals/common/utils/AvroUtils.java        |  4 ++--
 .../nxcals/common/utils/HdfsPathDecoder.java  | 10 ++++++++--
 .../accsoft/nxcals/common/utils/Lazy.java     |  4 ++--
 .../common/utils/ObjectSizeCalculator.java    | 20 +++++++++----------
 .../nxcals/common/utils/TimeMeasure.java      |  1 -
 .../accsoft/nxcals/common/utils/Utils.java    |  8 ++++----
 18 files changed, 85 insertions(+), 53 deletions(-)

diff --git a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkContext.java b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkContext.java
index 63e8281f06..e6d0133869 100644
--- a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkContext.java
+++ b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkContext.java
@@ -15,22 +15,39 @@ import org.springframework.context.annotation.DependsOn;
 @Configuration
 public class SparkContext {
 
+    /**
+     * Creates Spark configuration
+     * @param config Spark properties config
+     * @return Configuration for a Spark application
+     */
     @Bean
     @DependsOn("kerberos")
     public SparkConf createSparkConf(SparkPropertiesConfig config) {
         return SparkUtils.createSparkConf(config);
     }
 
+    /**
+     * Creates Spark session
+     * @param conf Configuration for a Spark application
+     * @return The entry point to programming Spark with the Dataset and DataFrame API.
+     */
     @Bean
     @DependsOn("kerberos")
     public SparkSession createSparkSession(SparkConf conf) {
         //FIXME - currently there are problems with
         return SparkSession.builder().config(conf)./*enableHiveSupport().*/
                 //FIXME - to be understood what is this directory really???
+                        // Use spark.sql.warehouse.dir Spark property to change the location of Hive's
+                        // hive.metastore.warehouse.dir property, i.e. the location of the Hive local/embedded metastore database (using Derby).
                         config("spark.sql.warehouse.dir", "/tmp/nxcals/spark/warehouse-" + System.currentTimeMillis()).
                         getOrCreate();
     }
 
+    /**
+     * Creates Spark properties config
+     * from application name, master type, list of jars and list of properties
+     * @return Spark properties config
+     */
     @ConfigurationProperties(prefix = "spark")
     @Bean
     public SparkPropertiesConfig createSparkPropertiesConfig() {
diff --git a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkPropertiesConfig.java b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkPropertiesConfig.java
index 22d28c08df..981d184a5a 100644
--- a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkPropertiesConfig.java
+++ b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkPropertiesConfig.java
@@ -24,24 +24,23 @@ public class SparkPropertiesConfig {
         return this.appName;
     }
 
-    public void setAppName(String _appName) {
-        this.appName = _appName;
+    public void setAppName(String newAppName) {
+        this.appName = newAppName;
     }
 
     public String getMasterType() {
         return this.masterType;
     }
 
-    public void setMasterType(String _masterType) {
-        this.masterType = _masterType;
+    public void setMasterType(String newMasterType) {
+        this.masterType = newMasterType;
     }
 
     public String[] getJars() {
         return this.jars;
     }
 
-    public void setJars(String[] _jars) {
-        this.jars = _jars;
+    public void setJars(String[] newJars) {
+        this.jars = newJars;
     }
-
 }
diff --git a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/spark/SparkUtils.java b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/spark/SparkUtils.java
index 1f9582edf1..e0b65c7cc8 100644
--- a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/spark/SparkUtils.java
+++ b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/spark/SparkUtils.java
@@ -6,10 +6,20 @@ import org.apache.spark.SparkConf;
 /**
  * Created by jwozniak on 05/04/17.
  */
-final public class SparkUtils {
-    public final static SparkConf createSparkConf(SparkPropertiesConfig config) {
+public final class SparkUtils {
+
+    private SparkUtils() {
+        throw new IllegalStateException("SparkUtils class");
+    }
+
+    /**
+     * Creates Spark configuration
+     * @param config Spark properties config
+     * @return Configuration for a Spark application
+     */
+    public static SparkConf createSparkConf(SparkPropertiesConfig config) {
         SparkConf conf = new SparkConf().setAppName(config.getAppName()).setMaster(config.getMasterType());
-        config.getProperties().forEach((k, v) -> conf.set(k, v));
+        config.getProperties().forEach(conf::set);
         final String[] jars = config.getJars();
         if (jars != null && jars.length > 0) {
             conf.setJars(jars);
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/Schemas.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/Schemas.java
index aa2928af0e..79410ed38a 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/Schemas.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/Schemas.java
@@ -37,9 +37,9 @@ public enum Schemas {
     private final Schema schema;
     private final String fieldName;
 
-    Schemas(Schema _schema, String _fieldName) {
-        this.schema = _schema;
-        this.fieldName = _fieldName;
+    Schemas(Schema newSchema, String newFieldName) {
+        this.schema = newSchema;
+        this.fieldName = newFieldName;
     }
 
     public Schema getSchema() {
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/SystemFields.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/SystemFields.java
index f5d6c76967..68b948d0b4 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/SystemFields.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/SystemFields.java
@@ -32,8 +32,8 @@ public enum SystemFields {
 
     private final String value;
 
-    SystemFields(String _value) {
-        this.value = _value;
+    SystemFields(String newValue) {
+        this.value = newValue;
     }
 
     public String getValue() {
@@ -41,7 +41,7 @@ public enum SystemFields {
     }
 
     public static Set<String> getAllSystemFieldNames() {
-        return Arrays.stream(SystemFields.values()).map(e -> e.getValue()).collect(Collectors.toSet());
+        return Arrays.stream(SystemFields.values()).map(SystemFields::getValue).collect(Collectors.toSet());
     }
 
 }
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/BytesToGenericRecordDecoder.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/BytesToGenericRecordDecoder.java
index a823cb979c..d76776163f 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/BytesToGenericRecordDecoder.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/BytesToGenericRecordDecoder.java
@@ -34,7 +34,7 @@ public class BytesToGenericRecordDecoder implements Function<byte[], GenericReco
     // FIXME - we should see if this caching is ok here. baseSchemaCache holds the schemas for the client record.
     // (jwozniak)
     // Remarks (msobiesz) - it's duplication of what is cached in the schema provider
-    private final ConcurrentHashMap<Long, Schema> schemaCache = new ConcurrentHashMap<Long, Schema>();
+    private final ConcurrentHashMap<Long, Schema> schemaCache = new ConcurrentHashMap<>();
 
     public BytesToGenericRecordDecoder(Function<Long, SchemaData> schemaProvider) {
         this.schemaProvider = Objects.requireNonNull(schemaProvider);
@@ -49,7 +49,7 @@ public class BytesToGenericRecordDecoder implements Function<byte[], GenericReco
                 schemaKey -> new Schema.Parser().parse(this.schemaProvider.apply(schemaId).getSchemaJson()));
 
         if (schema == null) {
-            throw new RuntimeException("Unknown schema data for id " + schemaId);
+            throw new IllegalArgumentException("Unknown schema data for id " + schemaId);
         }
         return this.decodeData(buffer, schema);
     }
@@ -69,7 +69,7 @@ public class BytesToGenericRecordDecoder implements Function<byte[], GenericReco
     private ByteBuffer getByteBuffer(byte[] data) {
         ByteBuffer buffer = ByteBuffer.wrap(data);
         if (buffer.get() != MAGIC_BYTE) {
-            throw new RuntimeException("Unknown magic byte!");
+            throw new IllegalArgumentException("Unknown magic byte!");
         }
         return buffer;
     }
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/GenericRecordToBytesEncoder.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/GenericRecordToBytesEncoder.java
index fb3875119e..d42824f879 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/GenericRecordToBytesEncoder.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/GenericRecordToBytesEncoder.java
@@ -19,6 +19,10 @@ import java.nio.ByteBuffer;
 public class GenericRecordToBytesEncoder {
     private static final byte MAGIC_BYTE = 0x0;
 
+    private GenericRecordToBytesEncoder() {
+        throw new IllegalStateException("GenericRecordToBytesEncoder class");
+    }
+
     public static byte[] convertToBytes(GenericRecord record) {
         try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
             out.write(MAGIC_BYTE);
@@ -33,10 +37,10 @@ public class GenericRecordToBytesEncoder {
         }
     }
 
-    private static long getSchemaIdFrom(GenericRecord _record) {
-        Object id = _record.get(Schemas.SCHEMA_ID.getFieldName());
+    private static long getSchemaIdFrom(GenericRecord newRecord) {
+        Object id = newRecord.get(Schemas.SCHEMA_ID.getFieldName());
         if (!(id instanceof Long)) {
-            throw new RuntimeException("Cannot find schema id in record");
+            throw new IllegalStateException("Cannot find schema id in record");
         }
         return (long) id;
     }
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/concurrent/AutoCloseableLock.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/concurrent/AutoCloseableLock.java
index b8dd58aca6..7b06467817 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/concurrent/AutoCloseableLock.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/concurrent/AutoCloseableLock.java
@@ -13,8 +13,8 @@ import java.util.concurrent.locks.Lock;
 public class AutoCloseableLock implements AutoCloseable {
     private final Lock lock;
 
-    private AutoCloseableLock(Lock _lock) {
-        this.lock = Objects.requireNonNull(_lock);
+    private AutoCloseableLock(Lock newLock) {
+        this.lock = Objects.requireNonNull(newLock);
     }
 
     public static AutoCloseableLock getFor(Lock lock) {
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/converters/TimeConverterImpl.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/converters/TimeConverterImpl.java
index 62e48109e5..6a0fff9760 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/converters/TimeConverterImpl.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/converters/TimeConverterImpl.java
@@ -22,10 +22,10 @@ public class TimeConverterImpl implements TimeConverter {
             if (LONG_TYPE.equals(field.schema().getType())) {
                 return (Long) record.get(field.pos());
             } else {
-                throw new RuntimeException("Unsupported field type for time conversion " + field);
+                throw new IllegalArgumentException("Unsupported field type for time conversion " + field);
             }
         } else {
-            throw new RuntimeException("Unsupported field list for time conversion " + fields);
+            throw new IllegalArgumentException("Unsupported field list for time conversion " + fields);
         }
     }
 }
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/EntityDataImpl.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/EntityDataImpl.java
index b3d1972bb9..8091fcbf52 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/EntityDataImpl.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/EntityDataImpl.java
@@ -16,7 +16,6 @@ import lombok.EqualsAndHashCode;
 import lombok.NonNull;
 
 import java.util.Collections;
-import java.util.Objects;
 import java.util.SortedSet;
 
 /**
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/ResourceDataImpl.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/ResourceDataImpl.java
index aad760fa22..9b73c8ebab 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/ResourceDataImpl.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/ResourceDataImpl.java
@@ -15,8 +15,6 @@ import java.util.Objects;
 import java.util.Set;
 
 /**
- * FIXME - missing docs, what is Long here in the Map<Long,...>?
- *
  * @author ntsvetko
  */
 @ToString
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/security/KerberosRelogin.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/security/KerberosRelogin.java
index e035c05916..2129d4e240 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/security/KerberosRelogin.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/security/KerberosRelogin.java
@@ -76,8 +76,8 @@ public class KerberosRelogin {
             UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
 
             LOGGER.info("Logged in from keytab to hadoop as {}", loginUser);
-        } catch (Throwable e) {
-            LOGGER.error("Error while logging user from keytab ", e);
+        } catch (Exception e) {
+            LOGGER.error("Error while relogging user from keytab ", e);
         }
     }
 
@@ -86,7 +86,7 @@ public class KerberosRelogin {
             UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
             LOGGER.info("Re-Login attempted from keytab to hadoop as {}", loginUser);
             loginUser.checkTGTAndReloginFromKeytab();
-        } catch (Throwable e) {
+        } catch (Exception e) {
             LOGGER.error("Error while relogging user from keytab ", e);
         }
     }
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/AvroUtils.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/AvroUtils.java
index be8d4d7b27..879849f96d 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/AvroUtils.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/AvroUtils.java
@@ -22,7 +22,7 @@ import static org.apache.avro.Schema.Type.RECORD;
  */
 public final class AvroUtils {
     private AvroUtils() {
-        throw new RuntimeException("No instances allowed");
+        throw new IllegalStateException("No instances allowed");
     }
 
     /**
@@ -78,7 +78,7 @@ public final class AvroUtils {
                 continue;
             }
             if (!f.equals(field)) {
-                throw new RuntimeException("Conflict between schemas detected with field " + f + " and field " + field);
+                throw new IllegalStateException("Conflict between schemas detected with field " + f + " and field " + field);
             }
         }
     }
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/HdfsPathDecoder.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/HdfsPathDecoder.java
index 8e3c9095a6..420600fc39 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/HdfsPathDecoder.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/HdfsPathDecoder.java
@@ -13,6 +13,12 @@ import java.util.regex.Pattern;
  * @author ntsvetko
  */
 public abstract class HdfsPathDecoder {
+
+
+    private HdfsPathDecoder() {
+        throw new IllegalStateException("HdfsPathDecoder class");
+    }
+
     /**
      * Finds systemId from provided NXCALS HDFS path
      *
@@ -76,7 +82,7 @@ public abstract class HdfsPathDecoder {
         formatter.parse(path.getFileName().toString());
     }
 
-    private static void verifyIdDirectories(Path path) throws IllegalArgumentException {
+    private static void verifyIdDirectories(Path path) {
         String[] pathDirs = path.toString().split(File.separator);
         Pattern datePattern = Pattern.compile("^\\d{4}-\\d{2}-\\d{2}$");
 
@@ -85,7 +91,7 @@ public abstract class HdfsPathDecoder {
 
         // Skips system directories and filter out date directories
         long numIds = Arrays.stream(pathDirs).skip(numSystemDirs).filter(v -> !datePattern.matcher(v).matches())
-                .mapToLong(v -> Long.valueOf(v)).count();
+                .mapToLong(Long::valueOf).count();
 
         // Expect to have 3 ID directories in the path
         if (numIds != 3) {
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Lazy.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Lazy.java
index abe7c4eebb..c94fad92b9 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Lazy.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Lazy.java
@@ -16,8 +16,8 @@ public class Lazy<T> implements Supplier<T> {
     private Supplier<T> supplier;
     private T value;
 
-    public Lazy(Supplier<T> _supplier) {
-        this.supplier = Objects.requireNonNull(_supplier);
+    public Lazy(Supplier<T> newSupplier) {
+        this.supplier = Objects.requireNonNull(newSupplier);
     }
 
     @Override
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/ObjectSizeCalculator.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/ObjectSizeCalculator.java
index 4c6203f039..631a566cf7 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/ObjectSizeCalculator.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/ObjectSizeCalculator.java
@@ -263,31 +263,31 @@ public class ObjectSizeCalculator {
         private final Field[] referenceFields;
 
         public ClassSizeInfo(Class<?> clazz) {
-            long fieldsSize = 0;
-            final List<Field> referenceFields = new LinkedList<Field>();
+            long newFieldsSize = 0;
+            final List<Field> newReferenceFields = new LinkedList<>();
             for (Field f : clazz.getDeclaredFields()) {
                 if (Modifier.isStatic(f.getModifiers())) {
                     continue;
                 }
                 final Class<?> type = f.getType();
                 if (type.isPrimitive()) {
-                    fieldsSize += getPrimitiveFieldSize(type);
+                    newFieldsSize += getPrimitiveFieldSize(type);
                 } else {
                     f.setAccessible(true);
-                    referenceFields.add(f);
-                    fieldsSize += ObjectSizeCalculator.this.referenceSize;
+                    newReferenceFields.add(f);
+                    newFieldsSize += ObjectSizeCalculator.this.referenceSize;
                 }
             }
             final Class<?> superClass = clazz.getSuperclass();
             if (superClass != null) {
                 final ClassSizeInfo superClassInfo = ObjectSizeCalculator.this.classSizeInfos.getUnchecked(superClass);
-                fieldsSize += roundTo(superClassInfo.fieldsSize, ObjectSizeCalculator.this.superclassFieldPadding);
-                referenceFields.addAll(Arrays.asList(superClassInfo.referenceFields));
+                newFieldsSize += roundTo(superClassInfo.fieldsSize, ObjectSizeCalculator.this.superclassFieldPadding);
+                newReferenceFields.addAll(Arrays.asList(superClassInfo.referenceFields));
             }
-            this.fieldsSize = fieldsSize;
-            this.objectSize = roundTo(ObjectSizeCalculator.this.objectHeaderSize + fieldsSize,
+            this.fieldsSize = newFieldsSize;
+            this.objectSize = roundTo(ObjectSizeCalculator.this.objectHeaderSize + newFieldsSize,
                     ObjectSizeCalculator.this.objectPadding);
-            this.referenceFields = referenceFields.toArray(new Field[referenceFields.size()]);
+            this.referenceFields = newReferenceFields.toArray(new Field[newReferenceFields.size()]);
         }
 
         void visit(Object obj, ObjectSizeCalculator calc) {
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/TimeMeasure.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/TimeMeasure.java
index af6970d244..5e548336af 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/TimeMeasure.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/TimeMeasure.java
@@ -21,7 +21,6 @@ public class TimeMeasure {
         long start = System.nanoTime();
         Object retVal = pjp.proceed();
         long stop = System.nanoTime() - start;
-        //System.err.println("Measure " + stop);
 
         LOGGER.trace("Method {} took {} us", pjp, stop / 1000);
         return retVal;
diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Utils.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Utils.java
index 64ca04c9a4..8fe6975756 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Utils.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Utils.java
@@ -22,7 +22,7 @@ public final class Utils {
     private static final ObjectMapper mapper = new ObjectMapper();
 
     private Utils() {
-        throw new RuntimeException("No instances allowed");
+        throw new IllegalStateException("No instances allowed");
     }
 
     public static String encodeUrl(String param) {
@@ -41,7 +41,7 @@ public final class Utils {
         try {
             return sup.get();
         } catch (UnsupportedEncodingException e) {
-            throw new RuntimeException(e.getMessage(), e);
+            throw new IllegalStateException(e.getMessage(), e);
         }
 
     }
@@ -56,7 +56,7 @@ public final class Utils {
             return mapper.readValue(json, new TypeReference<Map<String, String>>() {/**/
             });
         } catch (Exception e) {
-            throw new RuntimeException("Exception while converting json to Map<String,String> json=" + json, e);
+            throw new IllegalArgumentException("Exception while converting json to Map<String,String> json=" + json, e);
         }
 
     }
@@ -69,7 +69,7 @@ public final class Utils {
         try {
             return mapper.writeValueAsString(entityKeyValues);
         } catch (JsonProcessingException e) {
-            throw new RuntimeException("Exception while converting Map<String,String> to json=" + entityKeyValues, e);
+            throw new IllegalArgumentException("Exception while converting Map<String,String> to json=" + entityKeyValues, e);
         }
     }
 }
-- 
GitLab


From 9717953222b23c0459a9dc06d50adc3e6f2c7e65 Mon Sep 17 00:00:00 2001
From: Piotr Sowinski <piotr.sowinski@cern.ch>
Date: Fri, 23 Mar 2018 15:35:00 +0100
Subject: [PATCH 2/2] NXCALS-1467 Fixing conflicts with develop 2

---
 .../common/domain/impl/EntityDataImpl.java    | 29 +++++++++----------
 1 file changed, 13 insertions(+), 16 deletions(-)

diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/EntityDataImpl.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/EntityDataImpl.java
index 8091fcbf52..7ad62bcdd4 100644
--- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/EntityDataImpl.java
+++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/EntityDataImpl.java
@@ -1,13 +1,9 @@
-/**
+/*
  * Copyright (c) 2016 European Organisation for Nuclear Research (CERN), All Rights Reserved.
  */
 package cern.accsoft.nxcals.common.domain.impl;
 
-import cern.accsoft.nxcals.common.domain.EntityData;
-import cern.accsoft.nxcals.common.domain.EntityHistoryData;
-import cern.accsoft.nxcals.common.domain.PartitionData;
-import cern.accsoft.nxcals.common.domain.SchemaData;
-import cern.accsoft.nxcals.common.domain.SystemData;
+import cern.accsoft.nxcals.common.domain.*;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonManagedReference;
 import com.fasterxml.jackson.annotation.JsonProperty;
@@ -16,6 +12,8 @@ import lombok.EqualsAndHashCode;
 import lombok.NonNull;
 
 import java.util.Collections;
+import java.util.Map;
+import java.util.Objects;
 import java.util.SortedSet;
 
 /**
@@ -28,7 +26,7 @@ public class EntityDataImpl implements EntityData {
     private final long recVersion;
     private final long id;
     @NonNull
-    private final String entityKeyValues;
+    private final Map<String, Object> entityKeyValues;
     @NonNull
     private final SchemaData schemaData;
     @NonNull
@@ -41,7 +39,8 @@ public class EntityDataImpl implements EntityData {
     @JsonManagedReference
     private final SortedSet<EntityHistoryData> entityHistoryData;
 
-    public EntityDataImpl(@JsonProperty("id") long id, @JsonProperty("entityKeyValues") String entityKeyValues,
+    public EntityDataImpl(@JsonProperty("id") long id,
+            @JsonProperty("entityKeyValues") Map<String, Object> entityKeyValues,
             @JsonProperty("systemData") SystemData systemData,
             @JsonProperty("partitionData") PartitionData partitionData,
             @JsonProperty("schemaData") SchemaData schemaData,
@@ -49,13 +48,12 @@ public class EntityDataImpl implements EntityData {
             @JsonProperty(value = "lockUntilEpochNanos") Long lockUntilEpochNanos,
             @JsonProperty("version") long recVersion) {
         this.id = id;
-        this.entityKeyValues = entityKeyValues;
-        this.systemData = systemData;
-        this.partitionData = partitionData;
-        this.schemaData = schemaData;
+        this.entityKeyValues = Objects.requireNonNull(entityKeyValues);
+        this.systemData = Objects.requireNonNull(systemData);
+        this.partitionData = Objects.requireNonNull(partitionData);
+        this.schemaData = Objects.requireNonNull(schemaData);
         this.entityHistoryData = entityHistoryData;
         this.lockUntilEpochNanos = lockUntilEpochNanos;
-
         this.recVersion = recVersion;
     }
 
@@ -74,7 +72,7 @@ public class EntityDataImpl implements EntityData {
         @NonNull
         private final EntityData entityData;
 
-        private String entityKeyValues;
+        private Map<String, Object> entityKeyValues;
 
         private Long lockUntilEpochNanos;
 
@@ -89,7 +87,7 @@ public class EntityDataImpl implements EntityData {
          * @param entityKeyValues
          * @return
          */
-        public Builder withNewEntityKeyValues(String entityKeyValues) {
+        public Builder withNewEntityKeyValues(Map<String, Object> entityKeyValues) {
             this.entityKeyValues = entityKeyValues;
             return this;
         }
@@ -122,6 +120,5 @@ public class EntityDataImpl implements EntityData {
                     entityData.getPartitionData(), entityData.getSchemaData(), entityData.getEntityHistoryData(),
                     lockUntilEpochNanos, entityData.getRecVersion());
         }
-
     }
 }
-- 
GitLab