diff --git a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkContext.java b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkContext.java index 63e8281f06e84a7b99756b20c1bb6fab8b173633..e6d0133869e6aea17bdbe9d3922f4678b9d107c8 100644 --- a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkContext.java +++ b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkContext.java @@ -15,22 +15,39 @@ import org.springframework.context.annotation.DependsOn; @Configuration public class SparkContext { + /** + * Creates Spark configuration + * @param config Spark properties config + * @return Configuration for a Spark application + */ @Bean @DependsOn("kerberos") public SparkConf createSparkConf(SparkPropertiesConfig config) { return SparkUtils.createSparkConf(config); } + /** + * Creates Spark session + * @param conf Configuration for a Spark application + * @return The entry point to programming Spark with the Dataset and DataFrame API. + */ @Bean @DependsOn("kerberos") public SparkSession createSparkSession(SparkConf conf) { //FIXME - currently there are problems with return SparkSession.builder().config(conf)./*enableHiveSupport().*/ //FIXME - to be understood what is this directory really??? + // Use spark.sql.warehouse.dir Spark property to change the location of Hive's + // hive.metastore.warehouse.dir property, i.e. the location of the Hive local/embedded metastore database (using Derby). config("spark.sql.warehouse.dir", "/tmp/nxcals/spark/warehouse-" + System.currentTimeMillis()). getOrCreate(); } + /** + * Creates Spark properties config + * from application name, master type, list of jars and list of properties + * @return Spark properties config + */ @ConfigurationProperties(prefix = "spark") @Bean public SparkPropertiesConfig createSparkPropertiesConfig() { diff --git a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkPropertiesConfig.java b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkPropertiesConfig.java index 22d28c08df6a49568d7a9e5d706d1fe900360245..981d184a5a7ed6f497a51fbf279f121ed66135f0 100644 --- a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkPropertiesConfig.java +++ b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/config/SparkPropertiesConfig.java @@ -24,24 +24,23 @@ public class SparkPropertiesConfig { return this.appName; } - public void setAppName(String _appName) { - this.appName = _appName; + public void setAppName(String newAppName) { + this.appName = newAppName; } public String getMasterType() { return this.masterType; } - public void setMasterType(String _masterType) { - this.masterType = _masterType; + public void setMasterType(String newMasterType) { + this.masterType = newMasterType; } public String[] getJars() { return this.jars; } - public void setJars(String[] _jars) { - this.jars = _jars; + public void setJars(String[] newJars) { + this.jars = newJars; } - } diff --git a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/spark/SparkUtils.java b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/spark/SparkUtils.java index 1f9582edf19f9e872d38df5fe34e6108f6452be4..e0b65c7cc876e7e68508855c8e2391567ab241a5 100644 --- a/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/spark/SparkUtils.java +++ b/accsoft-nxcals-common-spark/src/main/java/cern/accsoft/nxcals/common/spark/SparkUtils.java @@ -6,10 +6,20 @@ import org.apache.spark.SparkConf; /** * Created by jwozniak on 05/04/17. */ -final public class SparkUtils { - public final static SparkConf createSparkConf(SparkPropertiesConfig config) { +public final class SparkUtils { + + private SparkUtils() { + throw new IllegalStateException("SparkUtils class"); + } + + /** + * Creates Spark configuration + * @param config Spark properties config + * @return Configuration for a Spark application + */ + public static SparkConf createSparkConf(SparkPropertiesConfig config) { SparkConf conf = new SparkConf().setAppName(config.getAppName()).setMaster(config.getMasterType()); - config.getProperties().forEach((k, v) -> conf.set(k, v)); + config.getProperties().forEach(conf::set); final String[] jars = config.getJars(); if (jars != null && jars.length > 0) { conf.setJars(jars); diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/Schemas.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/Schemas.java index ce68ce58f467c96dde091e8efef6b41ce639bf51..79410ed38a2e1cdf0f7278cc307116481de5d827 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/Schemas.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/Schemas.java @@ -1,4 +1,4 @@ -/* +/** * Copyright (c) 2016 European Organisation for Nuclear Research (CERN), All Rights Reserved. */ package cern.accsoft.nxcals.common; @@ -37,9 +37,9 @@ public enum Schemas { private final Schema schema; private final String fieldName; - Schemas(Schema sch, String fName) { - this.schema = sch; - this.fieldName = fName; + Schemas(Schema newSchema, String newFieldName) { + this.schema = newSchema; + this.fieldName = newFieldName; } public Schema getSchema() { diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/SystemFields.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/SystemFields.java index f5d6c76967b6f42968ba6f2d3d09714e9b517092..68b948d0b4b0a83b22f3c6070336628ac15866b5 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/SystemFields.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/SystemFields.java @@ -32,8 +32,8 @@ public enum SystemFields { private final String value; - SystemFields(String _value) { - this.value = _value; + SystemFields(String newValue) { + this.value = newValue; } public String getValue() { @@ -41,7 +41,7 @@ public enum SystemFields { } public static Set<String> getAllSystemFieldNames() { - return Arrays.stream(SystemFields.values()).map(e -> e.getValue()).collect(Collectors.toSet()); + return Arrays.stream(SystemFields.values()).map(SystemFields::getValue).collect(Collectors.toSet()); } } diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/BytesToGenericRecordDecoder.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/BytesToGenericRecordDecoder.java index a823cb979cad590e7052f5bf2128698fe3100617..d76776163f1f150075dbfac1c795e4a3ae1994a2 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/BytesToGenericRecordDecoder.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/BytesToGenericRecordDecoder.java @@ -34,7 +34,7 @@ public class BytesToGenericRecordDecoder implements Function<byte[], GenericReco // FIXME - we should see if this caching is ok here. baseSchemaCache holds the schemas for the client record. // (jwozniak) // Remarks (msobiesz) - it's duplication of what is cached in the schema provider - private final ConcurrentHashMap<Long, Schema> schemaCache = new ConcurrentHashMap<Long, Schema>(); + private final ConcurrentHashMap<Long, Schema> schemaCache = new ConcurrentHashMap<>(); public BytesToGenericRecordDecoder(Function<Long, SchemaData> schemaProvider) { this.schemaProvider = Objects.requireNonNull(schemaProvider); @@ -49,7 +49,7 @@ public class BytesToGenericRecordDecoder implements Function<byte[], GenericReco schemaKey -> new Schema.Parser().parse(this.schemaProvider.apply(schemaId).getSchemaJson())); if (schema == null) { - throw new RuntimeException("Unknown schema data for id " + schemaId); + throw new IllegalArgumentException("Unknown schema data for id " + schemaId); } return this.decodeData(buffer, schema); } @@ -69,7 +69,7 @@ public class BytesToGenericRecordDecoder implements Function<byte[], GenericReco private ByteBuffer getByteBuffer(byte[] data) { ByteBuffer buffer = ByteBuffer.wrap(data); if (buffer.get() != MAGIC_BYTE) { - throw new RuntimeException("Unknown magic byte!"); + throw new IllegalArgumentException("Unknown magic byte!"); } return buffer; } diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/GenericRecordToBytesEncoder.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/GenericRecordToBytesEncoder.java index fb3875119e1eeff454bb239b4f6ed3321cd13d6c..d42824f879d7f9196ed1f3b7844b541a9e44b456 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/GenericRecordToBytesEncoder.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/avro/GenericRecordToBytesEncoder.java @@ -19,6 +19,10 @@ import java.nio.ByteBuffer; public class GenericRecordToBytesEncoder { private static final byte MAGIC_BYTE = 0x0; + private GenericRecordToBytesEncoder() { + throw new IllegalStateException("GenericRecordToBytesEncoder class"); + } + public static byte[] convertToBytes(GenericRecord record) { try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { out.write(MAGIC_BYTE); @@ -33,10 +37,10 @@ public class GenericRecordToBytesEncoder { } } - private static long getSchemaIdFrom(GenericRecord _record) { - Object id = _record.get(Schemas.SCHEMA_ID.getFieldName()); + private static long getSchemaIdFrom(GenericRecord newRecord) { + Object id = newRecord.get(Schemas.SCHEMA_ID.getFieldName()); if (!(id instanceof Long)) { - throw new RuntimeException("Cannot find schema id in record"); + throw new IllegalStateException("Cannot find schema id in record"); } return (long) id; } diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/concurrent/AutoCloseableLock.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/concurrent/AutoCloseableLock.java index b8dd58aca61defa35081a2337ed47a900dd82243..7b064678176c35f01696259d158232c30867cc68 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/concurrent/AutoCloseableLock.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/concurrent/AutoCloseableLock.java @@ -13,8 +13,8 @@ import java.util.concurrent.locks.Lock; public class AutoCloseableLock implements AutoCloseable { private final Lock lock; - private AutoCloseableLock(Lock _lock) { - this.lock = Objects.requireNonNull(_lock); + private AutoCloseableLock(Lock newLock) { + this.lock = Objects.requireNonNull(newLock); } public static AutoCloseableLock getFor(Lock lock) { diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/converters/TimeConverterImpl.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/converters/TimeConverterImpl.java index 62e48109e5b9d7bec1b805c81b1f3a575ac70961..6a0fff9760f6dfce02726904c3b3e26dcd3235d8 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/converters/TimeConverterImpl.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/converters/TimeConverterImpl.java @@ -22,10 +22,10 @@ public class TimeConverterImpl implements TimeConverter { if (LONG_TYPE.equals(field.schema().getType())) { return (Long) record.get(field.pos()); } else { - throw new RuntimeException("Unsupported field type for time conversion " + field); + throw new IllegalArgumentException("Unsupported field type for time conversion " + field); } } else { - throw new RuntimeException("Unsupported field list for time conversion " + fields); + throw new IllegalArgumentException("Unsupported field list for time conversion " + fields); } } } diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/ResourceDataImpl.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/ResourceDataImpl.java index aad760fa2247cb208aee2d948b1c7b38ef16b6ce..9b73c8ebab00c24727ee3fbcb665fd9c12ed8dc5 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/ResourceDataImpl.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/domain/impl/ResourceDataImpl.java @@ -15,8 +15,6 @@ import java.util.Objects; import java.util.Set; /** - * FIXME - missing docs, what is Long here in the Map<Long,...>? - * * @author ntsvetko */ @ToString diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/security/KerberosRelogin.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/security/KerberosRelogin.java index e035c059168e8561fac1d2ecc043631a6f88f016..2129d4e24014ddb9bc9ffb93bc61f82b8741b420 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/security/KerberosRelogin.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/security/KerberosRelogin.java @@ -76,8 +76,8 @@ public class KerberosRelogin { UserGroupInformation loginUser = UserGroupInformation.getLoginUser(); LOGGER.info("Logged in from keytab to hadoop as {}", loginUser); - } catch (Throwable e) { - LOGGER.error("Error while logging user from keytab ", e); + } catch (Exception e) { + LOGGER.error("Error while relogging user from keytab ", e); } } @@ -86,7 +86,7 @@ public class KerberosRelogin { UserGroupInformation loginUser = UserGroupInformation.getLoginUser(); LOGGER.info("Re-Login attempted from keytab to hadoop as {}", loginUser); loginUser.checkTGTAndReloginFromKeytab(); - } catch (Throwable e) { + } catch (Exception e) { LOGGER.error("Error while relogging user from keytab ", e); } } diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/AvroUtils.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/AvroUtils.java index be8d4d7b27fb0dfda6c16598a76a8f40b2f75075..879849f96d0a19eb9d4dd0fc7c80a247be6814f1 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/AvroUtils.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/AvroUtils.java @@ -22,7 +22,7 @@ import static org.apache.avro.Schema.Type.RECORD; */ public final class AvroUtils { private AvroUtils() { - throw new RuntimeException("No instances allowed"); + throw new IllegalStateException("No instances allowed"); } /** @@ -78,7 +78,7 @@ public final class AvroUtils { continue; } if (!f.equals(field)) { - throw new RuntimeException("Conflict between schemas detected with field " + f + " and field " + field); + throw new IllegalStateException("Conflict between schemas detected with field " + f + " and field " + field); } } } diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/HdfsPathDecoder.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/HdfsPathDecoder.java index 8e3c9095a6dd88d15a546cf0f1e73f3164fca71f..420600fc39c4026aae84da5b06f4531b642510c2 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/HdfsPathDecoder.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/HdfsPathDecoder.java @@ -13,6 +13,12 @@ import java.util.regex.Pattern; * @author ntsvetko */ public abstract class HdfsPathDecoder { + + + private HdfsPathDecoder() { + throw new IllegalStateException("HdfsPathDecoder class"); + } + /** * Finds systemId from provided NXCALS HDFS path * @@ -76,7 +82,7 @@ public abstract class HdfsPathDecoder { formatter.parse(path.getFileName().toString()); } - private static void verifyIdDirectories(Path path) throws IllegalArgumentException { + private static void verifyIdDirectories(Path path) { String[] pathDirs = path.toString().split(File.separator); Pattern datePattern = Pattern.compile("^\\d{4}-\\d{2}-\\d{2}$"); @@ -85,7 +91,7 @@ public abstract class HdfsPathDecoder { // Skips system directories and filter out date directories long numIds = Arrays.stream(pathDirs).skip(numSystemDirs).filter(v -> !datePattern.matcher(v).matches()) - .mapToLong(v -> Long.valueOf(v)).count(); + .mapToLong(Long::valueOf).count(); // Expect to have 3 ID directories in the path if (numIds != 3) { diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Lazy.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Lazy.java index abe7c4eebb2fc983d767c1aad389c5442627b04b..c94fad92b93be7acebc9f1c00e33f63b6d61d67b 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Lazy.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Lazy.java @@ -16,8 +16,8 @@ public class Lazy<T> implements Supplier<T> { private Supplier<T> supplier; private T value; - public Lazy(Supplier<T> _supplier) { - this.supplier = Objects.requireNonNull(_supplier); + public Lazy(Supplier<T> newSupplier) { + this.supplier = Objects.requireNonNull(newSupplier); } @Override diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/ObjectSizeCalculator.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/ObjectSizeCalculator.java index 4c6203f039afcfa9b3dc1815ff980e4286a2faa0..631a566cf7cf6c081763d8de80374be4c7e4db17 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/ObjectSizeCalculator.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/ObjectSizeCalculator.java @@ -263,31 +263,31 @@ public class ObjectSizeCalculator { private final Field[] referenceFields; public ClassSizeInfo(Class<?> clazz) { - long fieldsSize = 0; - final List<Field> referenceFields = new LinkedList<Field>(); + long newFieldsSize = 0; + final List<Field> newReferenceFields = new LinkedList<>(); for (Field f : clazz.getDeclaredFields()) { if (Modifier.isStatic(f.getModifiers())) { continue; } final Class<?> type = f.getType(); if (type.isPrimitive()) { - fieldsSize += getPrimitiveFieldSize(type); + newFieldsSize += getPrimitiveFieldSize(type); } else { f.setAccessible(true); - referenceFields.add(f); - fieldsSize += ObjectSizeCalculator.this.referenceSize; + newReferenceFields.add(f); + newFieldsSize += ObjectSizeCalculator.this.referenceSize; } } final Class<?> superClass = clazz.getSuperclass(); if (superClass != null) { final ClassSizeInfo superClassInfo = ObjectSizeCalculator.this.classSizeInfos.getUnchecked(superClass); - fieldsSize += roundTo(superClassInfo.fieldsSize, ObjectSizeCalculator.this.superclassFieldPadding); - referenceFields.addAll(Arrays.asList(superClassInfo.referenceFields)); + newFieldsSize += roundTo(superClassInfo.fieldsSize, ObjectSizeCalculator.this.superclassFieldPadding); + newReferenceFields.addAll(Arrays.asList(superClassInfo.referenceFields)); } - this.fieldsSize = fieldsSize; - this.objectSize = roundTo(ObjectSizeCalculator.this.objectHeaderSize + fieldsSize, + this.fieldsSize = newFieldsSize; + this.objectSize = roundTo(ObjectSizeCalculator.this.objectHeaderSize + newFieldsSize, ObjectSizeCalculator.this.objectPadding); - this.referenceFields = referenceFields.toArray(new Field[referenceFields.size()]); + this.referenceFields = newReferenceFields.toArray(new Field[newReferenceFields.size()]); } void visit(Object obj, ObjectSizeCalculator calc) { diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/TimeMeasure.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/TimeMeasure.java index af6970d244561af3806f7cd7f86ae3f93cc79886..5e548336af8b2a86e1a01cf5a83ea1289ec807dd 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/TimeMeasure.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/TimeMeasure.java @@ -21,7 +21,6 @@ public class TimeMeasure { long start = System.nanoTime(); Object retVal = pjp.proceed(); long stop = System.nanoTime() - start; - //System.err.println("Measure " + stop); LOGGER.trace("Method {} took {} us", pjp, stop / 1000); return retVal; diff --git a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Utils.java b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Utils.java index 64ca04c9a4d4703ecea9086b10006e52b845a782..8fe6975756195df2f25d8e3b65643e5fef8cf0d4 100644 --- a/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Utils.java +++ b/accsoft-nxcals-common/src/main/java/cern/accsoft/nxcals/common/utils/Utils.java @@ -22,7 +22,7 @@ public final class Utils { private static final ObjectMapper mapper = new ObjectMapper(); private Utils() { - throw new RuntimeException("No instances allowed"); + throw new IllegalStateException("No instances allowed"); } public static String encodeUrl(String param) { @@ -41,7 +41,7 @@ public final class Utils { try { return sup.get(); } catch (UnsupportedEncodingException e) { - throw new RuntimeException(e.getMessage(), e); + throw new IllegalStateException(e.getMessage(), e); } } @@ -56,7 +56,7 @@ public final class Utils { return mapper.readValue(json, new TypeReference<Map<String, String>>() {/**/ }); } catch (Exception e) { - throw new RuntimeException("Exception while converting json to Map<String,String> json=" + json, e); + throw new IllegalArgumentException("Exception while converting json to Map<String,String> json=" + json, e); } } @@ -69,7 +69,7 @@ public final class Utils { try { return mapper.writeValueAsString(entityKeyValues); } catch (JsonProcessingException e) { - throw new RuntimeException("Exception while converting Map<String,String> to json=" + entityKeyValues, e); + throw new IllegalArgumentException("Exception while converting Map<String,String> to json=" + entityKeyValues, e); } } }