Formatted code with eclipse default formatter
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/AbstractDataProvider.java b/src/main/java/org/eclipse/mdm/mdfsorter/AbstractDataProvider.java
index 26935bd..4a1917d 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/AbstractDataProvider.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/AbstractDataProvider.java
@@ -14,17 +14,17 @@
 
 /**
  * Abstract Parent Class of MDF3DataProvider and MDF4DataProvider.
+ * 
  * @author Tobias Leemann
  */
 public interface AbstractDataProvider {
-	abstract void read(long globaloffset, ByteBuffer data)
-			throws IOException, DataFormatException;
+	abstract void read(long globaloffset, ByteBuffer data) throws IOException, DataFormatException;
 
-	ByteBuffer cachedRead(long globaloffset, int length)
-			throws IOException, DataFormatException;
+	ByteBuffer cachedRead(long globaloffset, int length) throws IOException, DataFormatException;
 
 	/**
 	 * Get the length of a data section.
+	 * 
 	 * @return The length of the data section.
 	 */
 	abstract long getLength();
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/ArgumentStruct.java b/src/main/java/org/eclipse/mdm/mdfsorter/ArgumentStruct.java
index b6a717b..e32ba2e 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/ArgumentStruct.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/ArgumentStruct.java
@@ -37,11 +37,9 @@
 	 * @throws IllegalArgumentException
 	 *             If the arguments are not valid.
 	 */
-	public static ArgumentStruct parseArgs(String[] argv)
-			throws IllegalArgumentException {
+	public static ArgumentStruct parseArgs(String[] argv) throws IllegalArgumentException {
 		if (argv.length < 3) {
-			throw new IllegalArgumentException(
-					"At least two arguments must be provided.");
+			throw new IllegalArgumentException("At least two arguments must be provided.");
 		} else {
 			ArgumentStruct args = new ArgumentStruct();
 			args.inputname = argv[1];
@@ -51,8 +49,7 @@
 				switch (splitted[0]) {
 				case "-unzip":
 					if (args.zipflagset) {
-						throw new MDFSorterArgException(
-								"Ambigous zip flags.");
+						throw new MDFSorterArgException("Ambigous zip flags.");
 					}
 					args.unzip = true;
 					args.zipflagset = true;
@@ -65,8 +62,7 @@
 					break;
 				case "-zip":
 					if (args.zipflagset) {
-						throw new MDFSorterArgException(
-								"Ambigous zip flags.");
+						throw new MDFSorterArgException("Ambigous zip flags.");
 					}
 					args.unzip = false;
 					args.zipflagset = true;
@@ -75,15 +71,13 @@
 					}
 				case "-maxblocksize":
 					if (splitted.length < 2) {
-						throw new MDFSorterArgException(
-								"Argument must be provided after \"-maxblocksize=\" flag.");
+						throw new MDFSorterArgException("Argument must be provided after \"-maxblocksize=\" flag.");
 					} else {
 						args.maxblocksize = parseLong(splitted[1]);
 					}
 					break;
 				default:
-					throw new MDFSorterArgException(
-							"Unknown Argument " + splitted[0]);
+					throw new MDFSorterArgException("Unknown Argument " + splitted[0]);
 				}
 			}
 
@@ -97,12 +91,10 @@
 		}
 	}
 
-	public static ArgumentStruct parseArgsCheck(String[] argv)
-			throws IllegalArgumentException {
+	public static ArgumentStruct parseArgsCheck(String[] argv) throws IllegalArgumentException {
 		ArgumentStruct args = new ArgumentStruct();
 		if (argv.length < 1) {
-			throw new MDFSorterArgException(
-					"At least one arguments must be provided.");
+			throw new MDFSorterArgException("At least one arguments must be provided.");
 		}
 		args.inputname = argv[1];
 
@@ -137,8 +129,7 @@
 		// Numerical value only
 		if (c > 47 && c < 58) {
 			return Long.parseLong(arg);
-		} else if (c == 'M' || c == 'm' || c == 'K' || c == 'k' || c == 'G'
-				|| c == 'g') {
+		} else if (c == 'M' || c == 'm' || c == 'K' || c == 'k' || c == 'G' || c == 'g') {
 			String numval = arg.substring(0, arg.length() - 1);
 			long l = Long.parseLong(numval);
 			switch (c) {
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/DataBlockBuffer.java b/src/main/java/org/eclipse/mdm/mdfsorter/DataBlockBuffer.java
index c52a528..e708701 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/DataBlockBuffer.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/DataBlockBuffer.java
@@ -21,7 +21,7 @@
  * @see WriteWorker
  *
  */
-public class DataBlockBuffer implements AutoCloseable{
+public class DataBlockBuffer implements AutoCloseable {
 
 	/**
 	 * This list contains the pointers to the data, and an integer with the
@@ -85,7 +85,9 @@
 	}
 
 	/*
-	 * Puts null into the buffer, which causes the write tread using this buffer to finish.
+	 * Puts null into the buffer, which causes the write tread using this buffer
+	 * to finish.
+	 * 
 	 * @see java.lang.AutoCloseable#close()
 	 */
 	@Override
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/MDFAbstractProcessWriter.java b/src/main/java/org/eclipse/mdm/mdfsorter/MDFAbstractProcessWriter.java
index 5e7d7f7..3a84700 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/MDFAbstractProcessWriter.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/MDFAbstractProcessWriter.java
@@ -48,8 +48,7 @@
 	 */
 	protected WriteDataCache myCache;
 
-	public abstract void processAndWriteOut()
-			throws IOException, DataFormatException;
+	public abstract void processAndWriteOut() throws IOException, DataFormatException;
 
 	public abstract void writeSpacer(long length);
 
@@ -88,10 +87,8 @@
 		boolean ret = false;
 		for (MDFXGenBlock blk : filestructure.getList()) {
 			for (int i = 0; i < blk.getLinkCount(); i++) {
-				if (blk.getLink(i) != null
-						&& blk.getLink(i).getProblems() != null) {
-					for (MDFCompatibilityProblem p : blk.getLink(i)
-							.getProblems()) {
+				if (blk.getLink(i) != null && blk.getLink(i).getProblems() != null) {
+					for (MDFCompatibilityProblem p : blk.getLink(i).getProblems()) {
 						p.setParentnode(blk);
 					}
 					ret = true;
@@ -111,8 +108,7 @@
 	 * @throws IOException
 	 *             If an I/O error occurs.
 	 */
-	public void copyBlock(MDFXGenBlock blk, FileChannel reader)
-			throws IOException {
+	public void copyBlock(MDFXGenBlock blk, FileChannel reader) throws IOException {
 		reader.position(blk.getPos());
 		blk.setOutputpos(writeptr);
 
@@ -124,8 +120,7 @@
 		do {
 			int bytesread;
 			if (written + MAX_OUTPUTBLOCKSIZE > length) {
-				ByteBuffer custombuffer = ByteBuffer
-						.allocate((int) (length - written));
+				ByteBuffer custombuffer = ByteBuffer.allocate((int) (length - written));
 				bytesread = reader.read(custombuffer);
 				performPut(custombuffer, bytesread, false);
 			} else {
@@ -136,8 +131,7 @@
 			written += bytesread;
 		} while (written < length);
 		if (length != written) {
-			throw new IOException("written length not equal to blocklength: "
-					+ length + "/" + written);
+			throw new IOException("written length not equal to blocklength: " + length + "/" + written);
 		}
 		// insert space if length%8!=0
 		if (length % 8 != 0) {
@@ -159,8 +153,7 @@
 	 * @throws IOException
 	 *             If an Output error occurs.
 	 */
-	public void writeBlock(MDFXGenBlock blk, byte[] appendData)
-			throws IOException {
+	public void writeBlock(MDFXGenBlock blk, byte[] appendData) throws IOException {
 		blk.setOutputpos(writeptr);
 
 		performPut(blk.getHeaderBytes());
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/MDFFileContent.java b/src/main/java/org/eclipse/mdm/mdfsorter/MDFFileContent.java
index 164ff4f..4f16d98 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/MDFFileContent.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/MDFFileContent.java
@@ -38,20 +38,17 @@
 
 	/**
 	 * True, if the parsed File is of MDF 3.x Format; False if it is an MDF 4.x
-	 * file.
-	 * The value of this attribute also defines the generic type parameter of this object.
-	 * If this attribute is set to true, the FileContent can be securely casted to
-	 * FileContent&lt;MDF3GenBlock&gt;.
-	 * If this attribute is set to false, the FileContent can be securely casted to
+	 * file. The value of this attribute also defines the generic type parameter
+	 * of this object. If this attribute is set to true, the FileContent can be
+	 * securely casted to FileContent&lt;MDF3GenBlock&gt;. If this attribute is
+	 * set to false, the FileContent can be securely casted to
 	 * FileContent&lt;MDF4GenBlock&gt;.
 	 */
 	private boolean isMDF3 = false;
 
-
 	/**
-	 * Only used in MDF3.
-	 * BigEndian value from the IDBLOCK.
-	 * True if numbers are stored as LittleEndian. False if they are stored BigEndian encoding.
+	 * Only used in MDF3. BigEndian value from the IDBLOCK. True if numbers are
+	 * stored as LittleEndian. False if they are stored BigEndian encoding.
 	 */
 	private boolean isBigEndian = false;
 
@@ -68,8 +65,7 @@
 	 * @param isMDF3
 	 *            True, if the file is of version 3.x, false if it is 4.x.
 	 */
-	public MDFFileContent(FileChannel in, T blk, LinkedList<T> list,
-			boolean isMDF3) {
+	public MDFFileContent(FileChannel in, T blk, LinkedList<T> list, boolean isMDF3) {
 		this.input = in;
 		this.root = blk;
 		this.list = list;
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/MDFGenBlock.java b/src/main/java/org/eclipse/mdm/mdfsorter/MDFGenBlock.java
index f8d05b2..f6f589f 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/MDFGenBlock.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/MDFGenBlock.java
@@ -17,7 +17,7 @@
 	/** Header section */
 
 	// Block type identifier, e.g. "##HD", "##MD", in MDF4 or "HD", "CG" in MDF3
-	protected String id ="";
+	protected String id = "";
 
 	// the position of the block within the input MDF file
 	protected final long pos;
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/MDFParser.java b/src/main/java/org/eclipse/mdm/mdfsorter/MDFParser.java
index bb3a5d1..24f6450 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/MDFParser.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/MDFParser.java
@@ -29,8 +29,7 @@
 public abstract class MDFParser {
 
 	@SuppressWarnings("unchecked")
-	public static MDFFileContent<? extends MDFGenBlock> serializeFile(
-			FileChannel in) throws IOException {
+	public static MDFFileContent<? extends MDFGenBlock> serializeFile(FileChannel in) throws IOException {
 		// some IDBLOCK Checks.
 		char[] versionnum = new char[8];
 		byte[] idblock = readBytes(64, in);
@@ -50,19 +49,16 @@
 		}
 
 		int version = MDF4Util.readUInt16(getDataBuffer(idblock, 28, 30));
-		MDFSorter.log.log(Level.FINE, "Found MDF Version "
-				+ String.valueOf(versionnum) + " (" + version + ")");
+		MDFSorter.log.log(Level.FINE, "Found MDF Version " + String.valueOf(versionnum) + " (" + version + ")");
 
 		@SuppressWarnings("rawtypes")
 		MDFAbstractParser myParser = null;
 
 		if (version < 300 || version > 411) {
-			MDFSorter.log.severe("MDF Version " + String.valueOf(versionnum)
-			+ "is not supported. Aborting.");
+			MDFSorter.log.severe("MDF Version " + String.valueOf(versionnum) + "is not supported. Aborting.");
 			throw new IllegalArgumentException("Unsupported MDF Version.");
 		} else if (version < 400) {
-			boolean bigendian = MDF4Util
-					.readUInt16(getDataBuffer(idblock, 24, 26)) != 0;
+			boolean bigendian = MDF4Util.readUInt16(getDataBuffer(idblock, 24, 26)) != 0;
 			myParser = new MDF3Parser(in, bigendian);
 		} else {
 			myParser = new MDF4Parser(in);
@@ -83,13 +79,11 @@
 	 * @throws IOException
 	 *             If an input error occurs.
 	 */
-	private static byte[] readBytes(int bytes, FileChannel in)
-			throws IOException {
+	private static byte[] readBytes(int bytes, FileChannel in) throws IOException {
 		ByteBuffer chunk = ByteBuffer.allocate(bytes);
 		int bytesread = 0;
 		if ((bytesread = in.read(chunk)) != bytes) {
-			System.err.println(
-					"Read only " + bytesread + " Bytes instead of " + bytes);
+			System.err.println("Read only " + bytesread + " Bytes instead of " + bytes);
 		}
 		return chunk.array();
 	}
@@ -108,13 +102,11 @@
 	 */
 	public static ByteBuffer getDataBuffer(byte[] data, int start, int end) {
 		if (start >= 0 && end <= data.length) {
-			return java.nio.ByteBuffer
-					.wrap(Arrays.copyOfRange(data, start, end));
+			return java.nio.ByteBuffer.wrap(Arrays.copyOfRange(data, start, end));
 		} else {
 			// just for testing
 			throw new ArrayIndexOutOfBoundsException(
-					"Tried to access bytes " + start + " to " + end
-					+ "with array length " + data.length);
+					"Tried to access bytes " + start + " to " + end + "with array length " + data.length);
 		}
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/MDFSorter.java b/src/main/java/org/eclipse/mdm/mdfsorter/MDFSorter.java
index a4e7b71..3e64cd5 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/MDFSorter.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/MDFSorter.java
@@ -71,10 +71,8 @@
 					printUsage();
 					return;
 				case "check":
-					ArgumentStruct structchk = ArgumentStruct
-					.parseArgsCheck(args);
-					checkForProblems(structchk.inputname,
-							structchk.maxblocksize, structchk.unzip);
+					ArgumentStruct structchk = ArgumentStruct.parseArgsCheck(args);
+					checkForProblems(structchk.inputname, structchk.maxblocksize, structchk.unzip);
 					return;
 				case "process":
 					setUpLogging();
@@ -110,8 +108,7 @@
 	 *            if all data should be Zipped (Stored in DZBlocks) in the
 	 *            output file.
 	 */
-	public static void sortMDF(String inputfile, String outputfile,
-			long maxblocksize, boolean unzip) {
+	public static void sortMDF(String inputfile, String outputfile, long maxblocksize, boolean unzip) {
 		setUpLogging();
 		ArgumentStruct struct = new ArgumentStruct();
 		struct.inputname = inputfile;
@@ -121,8 +118,7 @@
 
 		// Larger blocks cannot be zipped (see Specification of MDF4.1)
 		if (!unzip && maxblocksize > 4L * 1024L * 1024L) {
-			log.log(Level.WARNING,
-					"Setting maxblocksize to 4MB. Larger blocks are not allowed for zipped data.");
+			log.log(Level.WARNING, "Setting maxblocksize to 4MB. Larger blocks are not allowed for zipped data.");
 			struct.maxblocksize = 4L * 1024L * 1024L;
 		}
 
@@ -145,10 +141,9 @@
 	 * @throws IOException
 	 *             (File not found)
 	 */
-	public static boolean checkForProblems(String inputfile, long maxblocksize,
-			boolean unzip) throws IOException {
+	public static boolean checkForProblems(String inputfile, long maxblocksize, boolean unzip) throws IOException {
 		setUpLogging();
-		//wrap arguments.
+		// wrap arguments.
 		ArgumentStruct args = new ArgumentStruct();
 		args.unzip = unzip;
 		args.maxblocksize = maxblocksize;
@@ -172,17 +167,18 @@
 	 * @throws IOException
 	 *             (File not found)
 	 */
-	public static boolean checkForProblems(String inputfile, long maxblocksize)
-			throws IOException {
+	public static boolean checkForProblems(String inputfile, long maxblocksize) throws IOException {
 		return checkForProblems(inputfile, maxblocksize, true);
 	}
 
-
 	/**
 	 * Internally called Method that really performs the "check" operation.
-	 * @param struct The Arguments for this call
+	 * 
+	 * @param struct
+	 *            The Arguments for this call
 	 * @return True, if problems were found, false if not.
-	 * @throws IOException If an I/O error occurs.
+	 * @throws IOException
+	 *             If an I/O error occurs.
 	 */
 	static boolean checkForProblems(ArgumentStruct struct) throws IOException {
 		setUpLogging();
@@ -190,21 +186,17 @@
 		bufstream = new FileInputStream(struct.inputname);
 		log.log(Level.INFO, "File opened.");
 		// 1. Parse file and get Content-Struct
-		MDFFileContent<? extends MDFGenBlock> con = MDFParser
-				.serializeFile(bufstream.getChannel());
-
+		MDFFileContent<? extends MDFGenBlock> con = MDFParser.serializeFile(bufstream.getChannel());
 
 		// 2. Check for Problems.
 		boolean ret = false;
 		if (!con.isMDF3()) {
 			@SuppressWarnings("unchecked")
-			MDF4ProcessWriter pw = new MDF4ProcessWriter(
-					(MDFFileContent<MDF4GenBlock>) con, struct);
+			MDF4ProcessWriter pw = new MDF4ProcessWriter((MDFFileContent<MDF4GenBlock>) con, struct);
 			ret = pw.checkProblems();
 		} else {
 			@SuppressWarnings("unchecked")
-			MDF3ProcessWriter pw = new MDF3ProcessWriter(
-					(MDFFileContent<MDF3GenBlock>) con, struct);
+			MDF3ProcessWriter pw = new MDF3ProcessWriter((MDFFileContent<MDF3GenBlock>) con, struct);
 			ret = pw.checkProblems();
 		}
 
@@ -235,19 +227,16 @@
 			bufstream = new FileInputStream(struct.inputname);
 			log.log(Level.INFO, "File opened.");
 			// 1. Parse file and get Content-Struct
-			MDFFileContent<? extends MDFGenBlock> con = MDFParser
-					.serializeFile(bufstream.getChannel());
+			MDFFileContent<? extends MDFGenBlock> con = MDFParser.serializeFile(bufstream.getChannel());
 
 			// 2. Init processing and write out
 			@SuppressWarnings("rawtypes")
 			MDFAbstractProcessWriter processorwriter;
 
 			if (con.isMDF3()) {
-				processorwriter = new MDF3ProcessWriter(
-						(MDFFileContent<MDF3GenBlock>) con, struct);
+				processorwriter = new MDF3ProcessWriter((MDFFileContent<MDF3GenBlock>) con, struct);
 			} else {
-				processorwriter = new MDF4ProcessWriter(
-						(MDFFileContent<MDF4GenBlock>) con, struct);
+				processorwriter = new MDF4ProcessWriter((MDFFileContent<MDF4GenBlock>) con, struct);
 			}
 			processorwriter.processAndWriteOut();
 			bufstream.close();
@@ -302,22 +291,19 @@
 		System.out.println(
 				"\tProcess an MDF4 file for usage with an ASAM ODS Server.\n\tThis call requires the following parameters:\n\t <inputfile> <outputfile> [<flags>]");
 		System.out.println("\tInputfile: The MDF4-File to process");
-		System.out.println(
-				"\tOutputfile: The MDF-File where the output will be written.");
-		System.out.println(
-				"\tFlags: Other parameters. Ordering of flags is not important.");
+		System.out.println("\tOutputfile: The MDF-File where the output will be written.");
+		System.out.println("\tFlags: Other parameters. Ordering of flags is not important.");
 		System.out.println("\t\t-zip: Zip all Data found. ");
 		System.out.println("\t\t-unzip: Unzip all Data found.");
-		System.out.println(
-				"\t\t-maxblocksize=<Value>: Maximum size of a DataBlock. \n\t\te.g. \"200M\", \"3K\", \"1G\"");
-		System.out.println(
-				"\tExample: process infile.mf4 outfile.mf4 -maxblocksize=20m -zip");
+		System.out
+				.println("\t\t-maxblocksize=<Value>: Maximum size of a DataBlock. \n\t\te.g. \"200M\", \"3K\", \"1G\"");
+		System.out.println("\tExample: process infile.mf4 outfile.mf4 -maxblocksize=20m -zip");
 		System.out.println("\"check\":");
 		System.out.println(
 				"\tCheck if processing an MDF4 file for usage with an ASAM ODS Server\n\tis necessary. This call requires the following parameters:\n\t <inputfile> [<maxblocksize>] [<zipflag>]");
 		System.out.println("\tInputfile: The MDF4-File to process");
-		System.out.println(
-				"\t\t-maxblocksize=<Value>: Maximum size of a DataBlock. \n\t\te.g. \"200M\", \"3K\", \"1G\"");
+		System.out
+				.println("\t\t-maxblocksize=<Value>: Maximum size of a DataBlock. \n\t\te.g. \"200M\", \"3K\", \"1G\"");
 		System.out.println(
 				"\tzipflag: \"-zip\" or \"-unzip\", zip if all data will be zipped,\n\t\tunzipped if all data block will be unzipped.");
 		System.out.println("\tExample: check infile.mf4 4M -zip");
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/MDFSorterArgException.java b/src/main/java/org/eclipse/mdm/mdfsorter/MDFSorterArgException.java
index 8dc9c3a..1f1c915 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/MDFSorterArgException.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/MDFSorterArgException.java
@@ -10,10 +10,11 @@
 
 /**
  * Exception thrown by the MDFSorter Argument parser.
+ * 
  * @author Tobias Leemann
  *
  */
-public class MDFSorterArgException extends IllegalArgumentException{
+public class MDFSorterArgException extends IllegalArgumentException {
 
 	public MDFSorterArgException(String string) {
 		super(string);
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/ReadDataCache.java b/src/main/java/org/eclipse/mdm/mdfsorter/ReadDataCache.java
index 0e408a8..bfa9965 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/ReadDataCache.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/ReadDataCache.java
@@ -12,7 +12,6 @@
 import java.nio.ByteBuffer;
 import java.util.zip.DataFormatException;
 
-
 public class ReadDataCache {
 	private ByteBuffer cache;
 	private long cachestart = -1;
@@ -27,8 +26,7 @@
 		maxreadlen = prov.getLength();
 	}
 
-	public ByteBuffer read(long startoffset, int length)
-			throws IOException, DataFormatException {
+	public ByteBuffer read(long startoffset, int length) throws IOException, DataFormatException {
 		if (length < CACHESIZE) {
 			if (startoffset >= cachestart && startoffset + length < cacheend) {
 				// cache hit!
@@ -42,12 +40,9 @@
 				if (cacheend > maxreadlen) {
 					// maximum reached.
 					cacheend = maxreadlen;
-					cache = ByteBuffer
-							.allocate((int) (maxreadlen - startoffset));
+					cache = ByteBuffer.allocate((int) (maxreadlen - startoffset));
 					if (maxreadlen - startoffset < length) {
-						throw new RuntimeException("Length " + length
-								+ " Bytes are not available from "
-								+ startoffset);
+						throw new RuntimeException("Length " + length + " Bytes are not available from " + startoffset);
 					}
 				} else if (cache.capacity() < CACHESIZE) {
 					cache = ByteBuffer.allocate(CACHESIZE);
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/WriteDataCache.java b/src/main/java/org/eclipse/mdm/mdfsorter/WriteDataCache.java
index 458cd60..9b24707 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/WriteDataCache.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/WriteDataCache.java
@@ -22,8 +22,7 @@
  * will be splitted into smaller pieces, slowing down the write operation.
  */
 public class WriteDataCache {
-	private static final int WRITE_CACHE_SIZE = MDF4ProcessWriter.MAX_OUTPUTBLOCKSIZE
-			/ 2;
+	private static final int WRITE_CACHE_SIZE = MDF4ProcessWriter.MAX_OUTPUTBLOCKSIZE / 2;
 
 	/**
 	 * The buffer the data is send to, once the cache is full
@@ -68,8 +67,7 @@
 				}
 				int spaceremaining = WRITE_CACHE_SIZE - cachewriteposition;
 
-				int bytesfirstsection = spaceremaining < length ? spaceremaining
-						: length;
+				int bytesfirstsection = spaceremaining < length ? spaceremaining : length;
 				// maybe data needs to be split up
 				data.get(cache, cachewriteposition, bytesfirstsection);
 				cachewriteposition += bytesfirstsection;
@@ -107,12 +105,10 @@
 			}
 			int spaceremaining = WRITE_CACHE_SIZE - cachewriteposition;
 
-			int bytesfirstsection = spaceremaining < length ? spaceremaining
-					: length;
+			int bytesfirstsection = spaceremaining < length ? spaceremaining : length;
 			// maybe data needs to be split up
 
-			System.arraycopy(data, 0, cache, cachewriteposition,
-					bytesfirstsection);
+			System.arraycopy(data, 0, cache, cachewriteposition, bytesfirstsection);
 			cachewriteposition += bytesfirstsection;
 
 			CheckAndWriteout();
@@ -120,8 +116,7 @@
 			// write second part if needed
 			if (bytesfirstsection < length) {
 				int bytessecond = length - bytesfirstsection;
-				System.arraycopy(data, bytesfirstsection, cache,
-						cachewriteposition, bytessecond);
+				System.arraycopy(data, bytesfirstsection, cache, cachewriteposition, bytessecond);
 				cachewriteposition += bytessecond;
 			}
 		}
@@ -133,8 +128,7 @@
 	public void flush() {
 		// Cache has been flushed?
 		if (cache != null) {
-			buf.putData(new AbstractMap.SimpleEntry<byte[], Integer>(cache,
-					cachewriteposition));
+			buf.putData(new AbstractMap.SimpleEntry<byte[], Integer>(cache, cachewriteposition));
 			cachewriteposition = 0;
 			cache = null;
 		}
@@ -143,8 +137,7 @@
 	public void CheckAndWriteout() {
 		if (cachewriteposition == WRITE_CACHE_SIZE) {
 			// write all data out.
-			buf.putData(
-					new AbstractMap.SimpleEntry<byte[], Integer>(cache, -1));
+			buf.putData(new AbstractMap.SimpleEntry<byte[], Integer>(cache, -1));
 			cache = new byte[WRITE_CACHE_SIZE];
 			cachewriteposition = 0;
 		}
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/WriteWorker.java b/src/main/java/org/eclipse/mdm/mdfsorter/WriteWorker.java
index eacb37c..8531dd0 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/WriteWorker.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/WriteWorker.java
@@ -38,8 +38,8 @@
 	}
 
 	/*
-	 * Takes bytes of Data from the buffer until a null-Pointer signals, that no more data
-	 * will be expected.
+	 * Takes bytes of Data from the buffer until a null-Pointer signals, that no
+	 * more data will be expected.
 	 *
 	 * @see java.lang.Runnable#run()
 	 */
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CCBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CCBLOCK.java
index 08b72b2..46ad31e 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CCBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CCBLOCK.java
@@ -16,6 +16,7 @@
 
 /**
  * The Conversion Block
+ * 
  * @author Tobias Leemann, Christian Rechner
  */
 @SuppressWarnings("unused")
@@ -49,14 +50,17 @@
 	// 65535 = 1:1 conversion formula (Int = Phys)
 	private int formulaIdent;
 
-	// UINT16 1 Number of value pairs for conversion formulas 1, 2, 11 and 12 or number of
+	// UINT16 1 Number of value pairs for conversion formulas 1, 2, 11 and 12 or
+	// number of
 	// parameters
 	private int noOfValuePairsForFormula;
 
 	private byte[] conversionData;
 
-	// ... Parameter (for type 0,6,7,8,9) or table (for type 1, 2, 11, or 12) or text (for type
-	// 10), depending on the conversion formula identifier. See formula-specific block
+	// ... Parameter (for type 0,6,7,8,9) or table (for type 1, 2, 11, or 12) or
+	// text (for type
+	// 10), depending on the conversion formula identifier. See formula-specific
+	// block
 	// supplement.
 	private double[] valuePairsForFormula; // formula = 0,6,7,8,9
 
@@ -68,7 +72,6 @@
 	private double[] upperRangeKeysForTextRangeTable; // formula = 12
 	private String[] valuesForTextRangeTable; // formula = 12
 
-
 	/**
 	 * Parse a CCBLOCK from an existing MDFGenBlock
 	 *
@@ -152,7 +155,6 @@
 		return valuesForTextTable;
 	}
 
-
 	private void setValuesForTextTable(String[] valuesForTextTable) {
 		this.valuesForTextTable = valuesForTextTable;
 	}
@@ -193,8 +195,9 @@
 	public String toString() {
 		return "CCBLOCK [knownPhysValue=" + knownPhysValue + ", minPhysValue=" + minPhysValue + ", maxPhysValue="
 				+ maxPhysValue + ", physUnit=" + physUnit + ", formulaIdent=" + formulaIdent
-				+ ", noOfValuePairsForFormula=" + noOfValuePairsForFormula+"]";
+				+ ", noOfValuePairsForFormula=" + noOfValuePairsForFormula + "]";
 	}
+
 	/*
 	 * (non-Javadoc)
 	 *
@@ -203,7 +206,7 @@
 	@Override
 	public void parse(byte[] content) throws IOException {
 		// BOOL 1 Value range – known physical value
-		setKnownPhysValue(MDF3Util.readBool(MDFParser.getDataBuffer(content, 0, 2),isBigEndian()));
+		setKnownPhysValue(MDF3Util.readBool(MDFParser.getDataBuffer(content, 0, 2), isBigEndian()));
 
 		// REAL 1 Value range – minimum physical value
 		setMinPhysValue(MDF4Util.readReal(MDFParser.getDataBuffer(content, 2, 10)));
@@ -214,7 +217,6 @@
 		// CHAR 20 Physical unit
 		setPhysUnit(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 18, 38), 20));
 
-
 		// UINT16 1 Conversion formula identifier
 		// 0 = parametric, linear
 		// 1 = tabular with interpolation
@@ -231,34 +233,34 @@
 		// 65535 = 1:1 conversion formula (Int = Phys)
 		setFormulaIdent(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 38, 40), isBigEndian()));
 
-		if(content.length>40){
-			// UINT16 1 Number of value pairs for conversion formulas 1, 2, 11 and 12 or number of parameters
+		if (content.length > 40) {
+			// UINT16 1 Number of value pairs for conversion formulas 1, 2, 11
+			// and 12 or number of parameters
 			setNoOfValuePairsForFormula(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 40, 42), isBigEndian()));
 		}
 
-		if(content.length > 42){
-			conversionData = new byte[content.length-42];
+		if (content.length > 42) {
+			conversionData = new byte[content.length - 42];
 			System.arraycopy(content, 42, conversionData, 0, conversionData.length);
 		}
 	}
 
 	@Override
 	public void updateLinks(RandomAccessFile r) throws IOException {
-		if(getLinkCount()==0) {
+		if (getLinkCount() == 0) {
 			return;
 		}
 
-		if(formulaIdent!= 12){
+		if (formulaIdent != 12) {
 			throw new RuntimeException("Only a CC block with formula type 12 can have links.");
 		}
 
 		MDF3GenBlock linkedblock;
 		for (int i = 0; i < getLinkCount(); i++) {
-			r.seek(getOutputpos() + 4L +42L + 20L*i +16L);
-			//position of links, see specification.
+			r.seek(getOutputpos() + 4L + 42L + 20L * i + 16L);
+			// position of links, see specification.
 			if ((linkedblock = getLink(i)) != null) {
-				r.write(MDF3Util.getBytesLink(linkedblock.getOutputpos(),
-						isBigEndian()));
+				r.write(MDF3Util.getBytesLink(linkedblock.getOutputpos(), isBigEndian()));
 			} else {
 				r.write(MDF3Util.getBytesLink(0, isBigEndian()));
 			}
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CDBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CDBLOCK.java
index 9f8de94..98eb9de 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CDBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CDBLOCK.java
@@ -14,10 +14,9 @@
 import org.eclipse.mdm.mdfsorter.MDFParser;
 
 /**
- * @author Tobias Leemann
- * The Channel Dependency Block
+ * @author Tobias Leemann The Channel Dependency Block
  */
-public class CDBLOCK extends MDF3GenBlock{
+public class CDBLOCK extends MDF3GenBlock {
 
 	/**
 	 * Dependency Type
@@ -85,14 +84,16 @@
 		// UINT16 number of dependencies
 		setNoDependencies(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 2, 4), isBigEndian()));
 
-		if(dependancyType > 256){ //if dependency type is n-dimensional, the sizes of each dimesions are stored after the links.
-			if(content.length > 4 + getNoDependencies()*4){
-				int numvalues = (content.length - (4 + getNoDependencies()*4)) /2;
+		if (dependancyType > 256) { // if dependency type is n-dimensional, the
+									// sizes of each dimesions are stored after
+									// the links.
+			if (content.length > 4 + getNoDependencies() * 4) {
+				int numvalues = (content.length - (4 + getNoDependencies() * 4)) / 2;
 				int[] sizes = new int[numvalues];
-				int readptr = 4 + getNoDependencies()*4;
-				for(int i = 0; i < numvalues; i++){
-					sizes[i]= MDF3Util.readUInt16(MDFParser.getDataBuffer(content, readptr, readptr+2), bigendian);
-					readptr +=2;
+				int readptr = 4 + getNoDependencies() * 4;
+				for (int i = 0; i < numvalues; i++) {
+					sizes[i] = MDF3Util.readUInt16(MDFParser.getDataBuffer(content, readptr, readptr + 2), bigendian);
+					readptr += 2;
 				}
 			}
 		}
@@ -100,13 +101,12 @@
 
 	@Override
 	public void updateLinks(RandomAccessFile r) throws IOException {
-		r.seek(getOutputpos() + 4L +4L);
+		r.seek(getOutputpos() + 4L + 4L);
 		MDF3GenBlock linkedblock;
 		for (int i = 0; i < getLinkCount(); i++) {
-			//position of links, see specification.
+			// position of links, see specification.
 			if ((linkedblock = getLink(i)) != null) {
-				r.write(MDF3Util.getBytesLink(linkedblock.getOutputpos(),
-						isBigEndian()));
+				r.write(MDF3Util.getBytesLink(linkedblock.getOutputpos(), isBigEndian()));
 			} else {
 				r.write(MDF3Util.getBytesLink(0, isBigEndian()));
 			}
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CGBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CGBLOCK.java
index b7fc3b4..7f0a77e 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CGBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CGBLOCK.java
@@ -13,7 +13,6 @@
 
 import org.eclipse.mdm.mdfsorter.MDFParser;
 
-
 /**
  * The Channel Group Block
  *
@@ -112,9 +111,8 @@
 
 	@Override
 	public String toString() {
-		return "CGBLOCK [recordId=" + recordId + ", cycleCount=" + cycleCount
-				+ ", dataBytes=" + dataBytes + ", numChannels=" + numChannels
-				+ "]";
+		return "CGBLOCK [recordId=" + recordId + ", cycleCount=" + cycleCount + ", dataBytes=" + dataBytes
+				+ ", numChannels=" + numChannels + "]";
 	}
 
 	/*
@@ -125,21 +123,17 @@
 	@Override
 	public void parse(byte[] content) throws IOException {
 		// UINT16: Record ID
-		setRecordId(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 0, 2),
-				isBigEndian()));
+		setRecordId(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 0, 2), isBigEndian()));
 
 		// UINT16: Number of Channel
-		setNumChannels(MDF3Util.readUInt16(
-				MDFParser.getDataBuffer(content, 2, 4), isBigEndian()));
+		setNumChannels(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 2, 4), isBigEndian()));
 
 		// UINT16: Number of data Bytes (after record ID) used for signal values
 		// in record.
-		setDataBytes(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 4, 6),
-				isBigEndian()));
+		setDataBytes(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 4, 6), isBigEndian()));
 
 		// UINT32: Number of cycles
-		setCycleCount(MDF3Util.readUInt32(
-				MDFParser.getDataBuffer(content, 6, 10), isBigEndian()));
+		setCycleCount(MDF3Util.readUInt32(MDFParser.getDataBuffer(content, 6, 10), isBigEndian()));
 	}
 
 	@Override
@@ -169,24 +163,20 @@
 		System.arraycopy(recID, 0, ret, 0, 2);
 
 		// UINT16 Number of Channels
-		byte[] channelCount = MDF3Util.getBytesUInt16(getNumChannels(),
-				isBigEndian());
+		byte[] channelCount = MDF3Util.getBytesUInt16(getNumChannels(), isBigEndian());
 		System.arraycopy(channelCount, 0, ret, 2, 2);
 
 		// UINT16 Size of a record in bytes
-		byte[] databytes = MDF3Util.getBytesUInt16(getDataBytes(),
-				isBigEndian());
+		byte[] databytes = MDF3Util.getBytesUInt16(getDataBytes(), isBigEndian());
 		System.arraycopy(databytes, 0, ret, 4, 2);
 
 		// UINT32
-		byte[] cyccount = MDF3Util.getBytesUInt32(getCycleCount(),
-				isBigEndian());
+		byte[] cyccount = MDF3Util.getBytesUInt32(getCycleCount(), isBigEndian());
 		System.arraycopy(cyccount, 0, ret, 6, 4);
 
 		// Last link address!
 		if (getLinkCount() == 4) {
-			byte[] lnk = MDF3Util.getBytesLink(links[3].getOutputpos(),
-					isBigEndian());
+			byte[] lnk = MDF3Util.getBytesLink(links[3].getOutputpos(), isBigEndian());
 			System.arraycopy(lnk, 0, ret, 10, 2);
 		}
 		// LINK
@@ -201,8 +191,7 @@
 		// Update first three blocks normally
 		for (int i = 0; i < 3; i++) {
 			if ((linkedblock = getLink(i)) != null) {
-				r.write(MDF3Util.getBytesLink((int) linkedblock.getOutputpos(),
-						isBigEndian()));
+				r.write(MDF3Util.getBytesLink((int) linkedblock.getOutputpos(), isBigEndian()));
 			} else {
 				r.write(MDF3Util.getBytesLink(0, isBigEndian()));
 			}
@@ -210,8 +199,7 @@
 		// update fourth link manually
 		if (getLinkCount() == 4) {
 			r.seek(getOutputpos() + 4L + 3L * 4L + 10L);
-			r.write(MDF3Util.getBytesLink(getLink(4).getOutputpos(),
-					isBigEndian()));
+			r.write(MDF3Util.getBytesLink(getLink(4).getOutputpos(), isBigEndian()));
 		}
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CNBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CNBLOCK.java
index 488dad6..a94c633 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CNBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/CNBLOCK.java
@@ -203,14 +203,11 @@
 
 	@Override
 	public String toString() {
-		return "CNBLOCK [ channelType=" + channelType + ", signalName="
-				+ signalName + ", signalDescription=" + signalDescription
-				+ ", numberOfFirstBits=" + numberOfFirstBits + ", numberOfBits="
-				+ numberOfBits + ", signalDataType=" + signalDataType
-				+ ", knownImplValue=" + knownImplValue + ", minImplValue="
-				+ minImplValue + ", maxImplValue=" + maxImplValue
-				+ ", sampleRate=" + sampleRate + " byteOffset=" + byteOffset
-				+ "]";
+		return "CNBLOCK [ channelType=" + channelType + ", signalName=" + signalName + ", signalDescription="
+				+ signalDescription + ", numberOfFirstBits=" + numberOfFirstBits + ", numberOfBits=" + numberOfBits
+				+ ", signalDataType=" + signalDataType + ", knownImplValue=" + knownImplValue + ", minImplValue="
+				+ minImplValue + ", maxImplValue=" + maxImplValue + ", sampleRate=" + sampleRate + " byteOffset="
+				+ byteOffset + "]";
 	}
 
 	@Override
@@ -232,8 +229,7 @@
 		MDF3GenBlock linkedblock;
 		for (int i = 0; i < 5; i++) {
 			if ((linkedblock = getLink(i)) != null) {
-				r.write(MDF3Util.getBytesLink((int) linkedblock.getOutputpos(),
-						isBigEndian()));
+				r.write(MDF3Util.getBytesLink((int) linkedblock.getOutputpos(), isBigEndian()));
 			} else {
 				r.write(MDF3Util.getBytesLink(0, isBigEndian()));
 			}
@@ -242,12 +238,8 @@
 		// update last two links manually
 		if (getLinkCount() == 7) {
 			r.seek(getOutputpos() + 4L + 20L + 194L);
-			r.write(MDF3Util.getBytesLink(
-					getLink(5) != null ? getLink(5).getOutputpos() : 0,
-							isBigEndian()));
-			r.write(MDF3Util.getBytesLink(
-					getLink(6) != null ? getLink(6).getOutputpos() : 0,
-							isBigEndian()));
+			r.write(MDF3Util.getBytesLink(getLink(5) != null ? getLink(5).getOutputpos() : 0, isBigEndian()));
+			r.write(MDF3Util.getBytesLink(getLink(6) != null ? getLink(6).getOutputpos() : 0, isBigEndian()));
 		}
 
 	}
@@ -264,27 +256,22 @@
 		// 1 = time channel for all signals of this group (in each channel
 		// group, exactly one
 		// channel must be defined as time channel)
-		setChannelType(MDF3Util.readUInt16(
-				MDFParser.getDataBuffer(content, 0, 2), isBigEndian()));
+		setChannelType(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 0, 2), isBigEndian()));
 
 		// CHAR 32 Signal name, i.e. the first 32 characters of the ASAM-MCD
 		// unique name
-		setSignalName(MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(content, 2, 34), 32));
+		setSignalName(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 2, 34), 32));
 
 		// CHAR 128 Signal description
-		setSignalDescription(MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(content, 34, 162), 128));
+		setSignalDescription(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 34, 162), 128));
 
 		// UINT16 1 Number of the first bits [0..n] (bit position within a byte:
 		// bit 0 is the least significant
 		// bit, bit 7 is the most significant bit)
-		setNumberOfFirstBits(MDF3Util.readUInt16(
-				MDFParser.getDataBuffer(content, 162, 164), isBigEndian()));
+		setNumberOfFirstBits(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 162, 164), isBigEndian()));
 
 		// UINT16 1 Number of bits
-		setNumberOfBits(MDF3Util.readUInt16(
-				MDFParser.getDataBuffer(content, 164, 166), isBigEndian()));
+		setNumberOfBits(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 164, 166), isBigEndian()));
 
 		// UINT16 1 Signal data type
 		// 0 = unsigned integer
@@ -292,24 +279,19 @@
 		// 2,3 = IEEE 754 floating-point format
 		// 7 = String (NULL terminated)
 		// 8 = Byte Array
-		setSignalDataType(MDF3Util.readUInt16(
-				MDFParser.getDataBuffer(content, 166, 168), isBigEndian()));
+		setSignalDataType(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 166, 168), isBigEndian()));
 
 		// BOOL 1 Value range – known implementation value
-		setKnownImplValue(MDF3Util.readBool(
-				MDFParser.getDataBuffer(content, 168, 170), isBigEndian()));
+		setKnownImplValue(MDF3Util.readBool(MDFParser.getDataBuffer(content, 168, 170), isBigEndian()));
 
 		// REAL 1 Value range – minimum implementation value
-		setMinImplValue(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 170, 178)));
+		setMinImplValue(MDF4Util.readReal(MDFParser.getDataBuffer(content, 170, 178)));
 
 		// REAL 1 Value range – maximum implementation value
-		setMaxImplValue(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 178, 186)));
+		setMaxImplValue(MDF4Util.readReal(MDFParser.getDataBuffer(content, 178, 186)));
 
 		// REAL 1 Rate in which the variable was sampled. Unit [s]
-		setSampleRate(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 186, 194)));
+		setSampleRate(MDF4Util.readReal(MDFParser.getDataBuffer(content, 186, 194)));
 
 		// skip two links (2* 4Bytes, they are already read.
 
@@ -321,8 +303,7 @@
 			// is larger than 8192 Bytes to ensure compatibility; it enables to
 			// write data blocks
 			// larger than 8kBytes
-			setByteOffset(MDF3Util.readUInt16(
-					MDFParser.getDataBuffer(content, 202, 204), isBigEndian()));
+			setByteOffset(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 202, 204), isBigEndian()));
 		}
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/DGBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/DGBLOCK.java
index e0f9596..c09df3c 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/DGBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/DGBLOCK.java
@@ -92,19 +92,16 @@
 
 	@Override
 	public String toString() {
-		return "DGBLOCK [numberOfRecId=" + numberOfRecId + ", channelGroups="
-				+ channelGroups + "]";
+		return "DGBLOCK [numberOfRecId=" + numberOfRecId + ", channelGroups=" + channelGroups + "]";
 	}
 
 	@Override
 	public void parse(byte[] content) throws IOException {
 		// UNINT16 Number of Channel Groups
-		setChannelGroups(MDF3Util.readUInt16(
-				MDFParser.getDataBuffer(content, 0, 2), isBigEndian()));
+		setChannelGroups(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 0, 2), isBigEndian()));
 
 		// UINT16 RecordIDLayout
-		setNumOfRecId(MDF3Util.readUInt16(
-				MDFParser.getDataBuffer(content, 2, 4), isBigEndian()));
+		setNumOfRecId(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 2, 4), isBigEndian()));
 
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/HDBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/HDBLOCK.java
index a639204..af94ee5 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/HDBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/HDBLOCK.java
@@ -187,59 +187,45 @@
 
 	@Override
 	public String toString() {
-		return "HDBLOCK [dateStarted=" + dateStarted + ", timeStarted="
-				+ timeStarted + ", author=" + author + ", department="
-				+ department + ", projectName=" + projectName + ", meaObject="
-				+ meaObject + ", timestamp=" + timestamp
-				+ ", utcTimeOffsetHours=" + utcTimeOffsetHours
-				+ ", timeQualityClass=" + timeQualityClass + ", timerIdent="
-				+ timerIdent + "]";
+		return "HDBLOCK [dateStarted=" + dateStarted + ", timeStarted=" + timeStarted + ", author=" + author
+				+ ", department=" + department + ", projectName=" + projectName + ", meaObject=" + meaObject
+				+ ", timestamp=" + timestamp + ", utcTimeOffsetHours=" + utcTimeOffsetHours + ", timeQualityClass="
+				+ timeQualityClass + ", timerIdent=" + timerIdent + "]";
 	}
 
 	@Override
 	public void parse(byte[] content) throws IOException {
 		// UNINT16 Number of Data Groups
-		setNumberOfDataGroups(MDF3Util.readUInt16(
-				MDFParser.getDataBuffer(content, 0, 2), isBigEndian()));
+		setNumberOfDataGroups(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 0, 2), isBigEndian()));
 
 		// CHAR 10 Date when the recording was started
-		setDateStarted(MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(content, 2, 12), 10));
+		setDateStarted(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 2, 12), 10));
 
 		// CHAR 8 Signal name, i.e. the first 32 characters of the ASAM-MCD
 		// unique name
-		setTimeStarted(MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(content, 12, 20), 8));
+		setTimeStarted(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 12, 20), 8));
 
-		setAuthor(MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(content, 20, 52), 32));
-		setDepartment(MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(content, 52, 84), 32));
-		setProjectName(MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(content, 84, 116), 32));
-		setMeaObject(MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(content, 116, 148), 32));
+		setAuthor(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 20, 52), 32));
+		setDepartment(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 52, 84), 32));
+		setProjectName(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 84, 116), 32));
+		setMeaObject(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 116, 148), 32));
 
 		if (content.length > 148) {
 			// UNINT 64 Timestamp
-			setTimestamp(MDF3Util.readUInt64(
-					MDFParser.getDataBuffer(content, 148, 156), isBigEndian()));
+			setTimestamp(MDF3Util.readUInt64(MDFParser.getDataBuffer(content, 148, 156), isBigEndian()));
 		}
 		if (content.length > 156) {
 			// INT16, Time zone offset in hours
-			setUtcTimeOffsetHours(MDF3Util.readInt16(
-					MDFParser.getDataBuffer(content, 156, 158), isBigEndian()));
+			setUtcTimeOffsetHours(MDF3Util.readInt16(MDFParser.getDataBuffer(content, 156, 158), isBigEndian()));
 		}
 
 		if (content.length > 158) {
 			// UINT16, Time Quality
-			setTimeQualityClass(MDF3Util.readUInt16(
-					MDFParser.getDataBuffer(content, 158, 160), isBigEndian()));
+			setTimeQualityClass(MDF3Util.readUInt16(MDFParser.getDataBuffer(content, 158, 160), isBigEndian()));
 		}
 
 		if (content.length > 160) {
-			setTimerIdent(MDF4Util.readCharsUTF8(
-					MDFParser.getDataBuffer(content, 160, 192), 32));
+			setTimerIdent(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 160, 192), 32));
 		}
 
 	}
@@ -269,13 +255,11 @@
 		}
 
 		if (arraylength > 156) {
-			b.put(MDF3Util.getBytesInt16(getUtcTimeOffsetHours(),
-					isBigEndian()));
+			b.put(MDF3Util.getBytesInt16(getUtcTimeOffsetHours(), isBigEndian()));
 		}
 
 		if (arraylength > 158) {
-			b.put(MDF3Util.getBytesUInt16(getTimeQualityClass(),
-					isBigEndian()));
+			b.put(MDF3Util.getBytesUInt16(getTimeQualityClass(), isBigEndian()));
 		}
 
 		if (arraylength > 160) {
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3BlocksSplittMerger.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3BlocksSplittMerger.java
index e7ec13c..fa386a9 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3BlocksSplittMerger.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3BlocksSplittMerger.java
@@ -70,8 +70,8 @@
 	 * @param prov
 	 *            The DataProvider to read from.
 	 */
-	public MDF3BlocksSplittMerger(MDF3ProcessWriter ps, MDF3GenBlock parentnode,
-			long totdatalength, MDF3DataProvider prov) {
+	public MDF3BlocksSplittMerger(MDF3ProcessWriter ps, MDF3GenBlock parentnode, long totdatalength,
+			MDF3DataProvider prov) {
 		this.ps = ps;
 		this.parentnode = parentnode;
 
@@ -97,8 +97,7 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public void splitmerge(long startaddress, long length)
-			throws IOException, DataFormatException {
+	public void splitmerge(long startaddress, long length) throws IOException, DataFormatException {
 		globalReadPtr = startaddress;
 		appendDataFromPos(length);
 	}
@@ -114,8 +113,7 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public void appendDataFromPos(long leftbytes)
-			throws IOException, DataFormatException {
+	public void appendDataFromPos(long leftbytes) throws IOException, DataFormatException {
 		// check if space in curr-Block is available, and fill with first data,
 		// or attach all data if it fits
 		if (curr == null) {
@@ -126,8 +124,7 @@
 			abstractcopy(leftbytes);
 			datawritten += leftbytes;
 		} else {
-			throw new RuntimeException(
-					"MDF3Merger got more data than space was reserved.");
+			throw new RuntimeException("MDF3Merger got more data than space was reserved.");
 		}
 	}
 
@@ -142,8 +139,7 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public ByteBuffer abstractread(int length)
-			throws IOException, DataFormatException {
+	public ByteBuffer abstractread(int length) throws IOException, DataFormatException {
 		return prov.cachedRead(globalReadPtr, length);
 	}
 
@@ -177,27 +173,23 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public void abstractcopy(long length)
-			throws IOException, DataFormatException {
+	public void abstractcopy(long length) throws IOException, DataFormatException {
 		long written = 0L;
 		do {
 			int bytesread = 0;
-			if (written
-					+ MDFAbstractProcessWriter.MAX_OUTPUTBLOCKSIZE > length) {
+			if (written + MDFAbstractProcessWriter.MAX_OUTPUTBLOCKSIZE > length) {
 				bytesread = (int) (length - written);
 				ByteBuffer custombuffer = abstractread(bytesread);
 				abstractput(custombuffer, bytesread);
 			} else {
-				ByteBuffer buffer = abstractread(
-						MDFAbstractProcessWriter.MAX_OUTPUTBLOCKSIZE);
+				ByteBuffer buffer = abstractread(MDFAbstractProcessWriter.MAX_OUTPUTBLOCKSIZE);
 				bytesread = MDFAbstractProcessWriter.MAX_OUTPUTBLOCKSIZE;
 				abstractput(buffer, bytesread);
 			}
 			written += bytesread;
 		} while (written < length);
 		if (length != written) {
-			throw new IOException("written length not equal to blocklength: "
-					+ length + "/" + written);
+			throw new IOException("written length not equal to blocklength: " + length + "/" + written);
 		}
 	}
 
@@ -205,8 +197,7 @@
 		if (parentnode instanceof DGBLOCK) {
 			parentnode.setLink(3, curr);
 		} else {
-			System.err.println(
-					"Unable to set link to data block. Parent block not recognized.");
+			System.err.println("Unable to set link to data block. Parent block not recognized.");
 		}
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3DataProvider.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3DataProvider.java
index 5dae284..603ebb8 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3DataProvider.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3DataProvider.java
@@ -15,6 +15,7 @@
 
 import org.eclipse.mdm.mdfsorter.AbstractDataProvider;
 import org.eclipse.mdm.mdfsorter.ReadDataCache;
+
 /**
  * This Class is used to read Data from a data section, with an given offset. If
  * Data is stored in a linked list or in a zipped block, it is this classes job
@@ -88,8 +89,7 @@
 
 		if (globaloffset + data.capacity() > sectionlength) {
 			throw new IllegalArgumentException(
-					"Invalid read access on Data Provider. Section is only "
-							+ sectionlength + " bytes long.");
+					"Invalid read access on Data Provider. Section is only " + sectionlength + " bytes long.");
 		}
 
 		if (dataarr != null) {
@@ -120,13 +120,11 @@
 	 *             here)
 	 */
 	@Override
-	public ByteBuffer cachedRead(long globaloffset, int length)
-			throws IOException, DataFormatException {
+	public ByteBuffer cachedRead(long globaloffset, int length) throws IOException, DataFormatException {
 		// argument check
 		if (globaloffset + length > sectionlength) {
 			throw new IllegalArgumentException(
-					"Invalid read access on Data Provider. Section is only "
-							+ sectionlength + " bytes long.");
+					"Invalid read access on Data Provider. Section is only " + sectionlength + " bytes long.");
 		}
 
 		if (dataarr != null) {
@@ -151,7 +149,7 @@
 	 * @return The length.
 	 */
 	private long calculateLength() {
-		if(datasectionhead!=null){
+		if (datasectionhead != null) {
 			return datasectionhead.getLength();
 		}
 		return 0;
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3GenBlock.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3GenBlock.java
index 2bd41b2..54b23b0 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3GenBlock.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3GenBlock.java
@@ -16,8 +16,7 @@
 import org.eclipse.mdm.mdfsorter.MDFGenBlock;
 import org.eclipse.mdm.mdfsorter.MDFProblemType;
 
-public class MDF3GenBlock extends MDFGenBlock
-implements Comparable<MDF3GenBlock> {
+public class MDF3GenBlock extends MDFGenBlock implements Comparable<MDF3GenBlock> {
 
 	// Number of links
 	private int linkCount;
@@ -54,8 +53,7 @@
 		super(pos);
 		// Check with UINT32.Maxvalue.
 		if (pos > 2L * Integer.MAX_VALUE + 1L) {
-			throw new IllegalArgumentException(
-					"Block address " + pos + " is too large for MDF3 format.");
+			throw new IllegalArgumentException("Block address " + pos + " is too large for MDF3 format.");
 		}
 		this.bigendian = bigendian;
 	}
@@ -194,8 +192,7 @@
 	 */
 	@Override
 	public String toString() {
-		return "BLOCK [pos=" + pos + ", id=" + id + ", length=" + length
-				+ ", linkCount=" + linkCount + "]";
+		return "BLOCK [pos=" + pos + ", id=" + id + ", length=" + length + ", linkCount=" + linkCount + "]";
 	}
 
 	/**
@@ -265,8 +262,7 @@
 	 *             If an input error occurs.
 	 */
 	public void parse(byte[] content) throws IOException {
-		throw new UnsupportedOperationException(
-				"parse not valid on unspecified block.");
+		throw new UnsupportedOperationException("parse not valid on unspecified block.");
 	}
 
 	/**
@@ -300,8 +296,7 @@
 		}
 		System.arraycopy(idtext, 0, ret, 0, 2);
 
-		byte[] length = MDF3Util.getBytesUInt16((int) this.length,
-				isBigEndian());
+		byte[] length = MDF3Util.getBytesUInt16((int) this.length, isBigEndian());
 		System.arraycopy(length, 0, ret, 2, 2);
 
 		return ret;
@@ -334,8 +329,7 @@
 			CGBLOCK blk = (CGBLOCK) dgblk.getLnkCgFirst();
 			if (blk.getLnkCgNext() != null) { // more than one channel group per
 				// datagroup! Unsorted.
-				addProblem(new MDFCompatibilityProblem(
-						MDFProblemType.UNSORTED_DATA_PROBLEM, this));
+				addProblem(new MDFCompatibilityProblem(MDFProblemType.UNSORTED_DATA_PROBLEM, this));
 				// which block will be touched?
 				// all channel groups!
 				dgblk.getLnkData().touch();
@@ -363,8 +357,7 @@
 		MDF3GenBlock linkedblock;
 		for (int i = 0; i < getLinkCount(); i++) {
 			if ((linkedblock = getLink(i)) != null) {
-				r.write(MDF3Util.getBytesLink(linkedblock.getOutputpos(),
-						isBigEndian()));
+				r.write(MDF3Util.getBytesLink(linkedblock.getOutputpos(), isBigEndian()));
 			} else {
 				r.write(MDF3Util.getBytesLink(0, isBigEndian()));
 			}
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3Parser.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3Parser.java
index 7c6297a..e9a5c16 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3Parser.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3Parser.java
@@ -41,13 +41,11 @@
 	 * @throws IOException
 	 *             If an input error occurs.
 	 */
-	private static byte[] readBytes(int bytes, FileChannel in)
-			throws IOException {
+	private static byte[] readBytes(int bytes, FileChannel in) throws IOException {
 		ByteBuffer chunk = ByteBuffer.allocate(bytes);
 		int bytesread = 0;
 		if ((bytesread = in.read(chunk)) != bytes) {
-			System.err.println(
-					"Read only " + bytesread + " Bytes instead of " + bytes);
+			System.err.println("Read only " + bytesread + " Bytes instead of " + bytes);
 		}
 		return chunk.array();
 	}
@@ -101,8 +99,8 @@
 
 		MDFSorter.log.log(Level.INFO, "Needed " + fileruns + " runs.");
 		MDFSorter.log.log(Level.INFO, "Found " + blocklist.size() + " blocks.");
-		MDFSorter.log.log(Level.FINE,
-				"ValidatorListSize: " + (foundblocks + 1)); // Expected number
+		MDFSorter.log.log(Level.FINE, "ValidatorListSize: " + (foundblocks + 1)); // Expected
+																					// number
 		// of node in Vector
 		// MDFValidators
 		// node list for
@@ -122,11 +120,9 @@
 		in.position(start.getPos());
 		byte[] head = readBytes(4, in);
 		// Read header of this block
-		String blktyp = MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(head, 0, 2), 2);
+		String blktyp = MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(head, 0, 2), 2);
 		start.setId(blktyp);
-		int blklength = MDF4Util
-				.readUInt16(MDFParser.getDataBuffer(head, 2, 4));
+		int blklength = MDF4Util.readUInt16(MDFParser.getDataBuffer(head, 2, 4));
 		start.setLength(blklength);
 
 		// set standard link-count
@@ -136,9 +132,7 @@
 		// Read links and create new blocks
 		head = readBytes(blklinkcount * 4, in);
 		for (int i = 0; i < blklinkcount; i++) {
-			long nextlink = MDF3Util.readLink(
-					MDFParser.getDataBuffer(head, i * 4, (i + 1) * 4),
-					isBigEndian);
+			long nextlink = MDF3Util.readLink(MDFParser.getDataBuffer(head, i * 4, (i + 1) * 4), isBigEndian);
 			if (nextlink != 0) {
 				if (blktyp.equals("DG") && i == 3) { // special case: pointer to
 					// data section (4th
@@ -155,8 +149,7 @@
 		// read possible extra links in CGBLOCK
 		if (blktyp.equals("CG") && blklength == 30) {
 			head = readBytes(14, in);
-			long nextlink = MDF3Util.readLink(
-					MDFParser.getDataBuffer(head, 10, 14), isBigEndian);
+			long nextlink = MDF3Util.readLink(MDFParser.getDataBuffer(head, 10, 14), isBigEndian);
 			start.moreLinks(4);
 			checkFoundLink(start, nextlink, 3);
 		}
@@ -164,8 +157,7 @@
 		// read possible extra links CNBLOCK
 		if (blktyp.equals("CN") && blklength > 218) {
 			head = readBytes(198, in);
-			long nextlink = MDF3Util.readLink(
-					MDFParser.getDataBuffer(head, 194, 198), isBigEndian);
+			long nextlink = MDF3Util.readLink(MDFParser.getDataBuffer(head, 194, 198), isBigEndian);
 			start.moreLinks(6);
 			if (nextlink != 0) {
 				checkFoundLink(start, nextlink, 5);
@@ -173,8 +165,7 @@
 
 			if (blklength > 222) {
 				head = readBytes(4, in);
-				nextlink = MDF3Util.readLink(
-						MDFParser.getDataBuffer(head, 0, 4), isBigEndian);
+				nextlink = MDF3Util.readLink(MDFParser.getDataBuffer(head, 0, 4), isBigEndian);
 				start.moreLinks(7);
 				if (nextlink != 0) {
 					checkFoundLink(start, nextlink, 6);
@@ -186,14 +177,15 @@
 		if (blktyp.equals("CC")) {
 			head = readBytes(40, in);
 			int convtype = MDF3Util.readUInt16(MDFParser.getDataBuffer(head, 38, 40), isBigEndian);
-			if(convtype == 12){
-				//TextTable has links to textblocks, get number
+			if (convtype == 12) {
+				// TextTable has links to textblocks, get number
 				head = readBytes(2, in);
 				int numberOfValues = MDF3Util.readUInt16(MDFParser.getDataBuffer(head, 0, 2), isBigEndian);
 				start.moreLinks(numberOfValues);
-				head = readBytes((8+8+4)*numberOfValues, in);
-				for(int i = 0; i < numberOfValues;i++){
-					long nextlink = MDF3Util.readLink(MDFParser.getDataBuffer(head, i*20+16, (i+1)*20), isBigEndian);
+				head = readBytes((8 + 8 + 4) * numberOfValues, in);
+				for (int i = 0; i < numberOfValues; i++) {
+					long nextlink = MDF3Util.readLink(MDFParser.getDataBuffer(head, i * 20 + 16, (i + 1) * 20),
+							isBigEndian);
 					if (nextlink != 0) {
 						checkFoundLink(start, nextlink, i);
 					}
@@ -201,15 +193,14 @@
 			}
 		}
 
-
 		// read possible extra links CDBLOCK
 		if (blktyp.equals("CD")) {
 			head = readBytes(4, in);
 			int numdep = MDF3Util.readUInt16(MDFParser.getDataBuffer(head, 2, 4), isBigEndian);
-			start.moreLinks(2*numdep);
-			head = readBytes(8*numdep, in);
-			for(int i = 0; i < 2*numdep;i++){
-				long nextlink = MDF3Util.readLink(MDFParser.getDataBuffer(head, 4*i, 4*(i+1)), isBigEndian);
+			start.moreLinks(2 * numdep);
+			head = readBytes(8 * numdep, in);
+			for (int i = 0; i < 2 * numdep; i++) {
+				long nextlink = MDF3Util.readLink(MDFParser.getDataBuffer(head, 4 * i, 4 * (i + 1)), isBigEndian);
 				if (nextlink != 0) {
 					checkFoundLink(start, nextlink, i);
 				}
@@ -258,8 +249,7 @@
 	 *            The number of the link from startblock to the found
 	 *            childblock.
 	 */
-	public void checkFoundDataBlockLink(MDF3GenBlock start, long address,
-			int chldnum) {
+	public void checkFoundDataBlockLink(MDF3GenBlock start, long address, int chldnum) {
 		foundblocks++;
 		if (blocklist.containsKey(address)) {
 			start.addLink(chldnum, blocklist.get(address));
@@ -286,8 +276,7 @@
 	 */
 	private void forceparse(MDF3GenBlock blk) throws IOException {
 
-		long sectionsize = blk.getLength() - 4
-				- 4 * MDF3Util.getLinkcount(blk.getId());
+		long sectionsize = blk.getLength() - 4 - 4 * MDF3Util.getLinkcount(blk.getId());
 
 		byte[] content = null;
 
@@ -355,17 +344,14 @@
 				MDF3GenBlock datasec = dgthis.getLnkData();
 				if (datasec != null) {
 					long datalength = 0;
-					MDF3GenBlock cgfirst =dgthis.getLnkCgFirst();
-					if(!(cgfirst instanceof CGBLOCK)){
+					MDF3GenBlock cgfirst = dgthis.getLnkCgFirst();
+					if (!(cgfirst instanceof CGBLOCK)) {
 						throw new RuntimeException("Error reading CGBLOCK");
 					}
 					CGBLOCK childcg = (CGBLOCK) dgthis.getLnkCgFirst();
 					do {
-						datalength += (childcg.getDataBytes()
-								+ dgthis.getNumOfRecId())
-								* childcg.getCycleCount();
-					} while ((childcg = (CGBLOCK) childcg
-							.getLnkCgNext()) != null);
+						datalength += (childcg.getDataBytes() + dgthis.getNumOfRecId()) * childcg.getCycleCount();
+					} while ((childcg = (CGBLOCK) childcg.getLnkCgNext()) != null);
 
 					datasec.setLength(datalength);
 				}
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3ProcessWriter.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3ProcessWriter.java
index fc8decb..19baab9 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3ProcessWriter.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3ProcessWriter.java
@@ -48,8 +48,7 @@
 	 * @param args
 	 *            The arguments of this programm call.
 	 */
-	public MDF3ProcessWriter(MDFFileContent<MDF3GenBlock> filestructure,
-			ArgumentStruct args) {
+	public MDF3ProcessWriter(MDFFileContent<MDF3GenBlock> filestructure, ArgumentStruct args) {
 		this.filestructure = filestructure;
 		this.args = args;
 		writtenblocks = new LinkedList<MDF3GenBlock>();
@@ -79,17 +78,18 @@
 		}
 		// Check if zip flag is not set
 		if (!args.unzip) {
-			throw new IllegalArgumentException(
-					"MDF3.x Files mustn't be zipped!");
+			throw new IllegalArgumentException("MDF3.x Files mustn't be zipped!");
 		}
 
 		// Open outputfile
 		FileOutputStream out = new FileOutputStream(args.outputname);
 
-		long start; Thread t; //Start time will be stored here later.
+		long start;
+		Thread t; // Start time will be stored here later.
 
-		try(DataBlockBuffer buf = new DataBlockBuffer()){
-			//automatically stop writer thread if exeptions occur (Writer Thread is stopped vie the DataBlock Buffer.
+		try (DataBlockBuffer buf = new DataBlockBuffer()) {
+			// automatically stop writer thread if exeptions occur (Writer
+			// Thread is stopped vie the DataBlock Buffer.
 
 			myCache = new WriteDataCache(buf);
 
@@ -124,8 +124,7 @@
 				} else {
 					if (blk.getProblems() != null) {
 						for (MDFCompatibilityProblem p : blk.getProblems()) {
-							MDFSorter.log.log(Level.FINE,
-									"Problem of Type: " + p.getType());
+							MDFSorter.log.log(Level.FINE, "Problem of Type: " + p.getType());
 						}
 						solveProblem(blk.getProblems());
 					} else {
@@ -139,18 +138,17 @@
 			// Flush Cache
 			myCache.flush();
 		}
-		//Wait for completion of write operation.
+		// Wait for completion of write operation.
 		try {
 			t.join();
 		} catch (InterruptedException e) {
 			e.printStackTrace();
 		}
-		//Close output stream.
+		// Close output stream.
 		out.close();
 
 		MDFSorter.log.log(Level.INFO, "Wrote " + writeptr / 1000 + " kB.");
-		MDFSorter.log.log(Level.INFO,
-				"Writing took " + (System.currentTimeMillis() - start) + " ms");
+		MDFSorter.log.log(Level.INFO, "Writing took " + (System.currentTimeMillis() - start) + " ms");
 
 		// Update links with RandomAccessFile
 		RandomAccessFile r = new RandomAccessFile(args.outputname, "rw");
@@ -171,8 +169,7 @@
 	 * @throws DataFormatException
 	 *             If zipped Data is in an invalid format
 	 */
-	public void solveProblem(List<MDFCompatibilityProblem> l)
-			throws IOException, DataFormatException {
+	public void solveProblem(List<MDFCompatibilityProblem> l) throws IOException, DataFormatException {
 		if (l.size() != 1) {
 			System.out.println("To many Problems.");
 			// This may be supported in later versions.
@@ -186,10 +183,8 @@
 				// We have more than one channel group in a single
 				// DataGroup. We have to create new DataGroups for each
 				// Channel Group.
-				LinkedList<CGBLOCK> groups = getChannelGroupsfromDataGroup(
-						(DGBLOCK) node);
-				MDFSorter.log.log(Level.INFO,
-						"Found " + groups.size() + " Channel Groups in DG.");
+				LinkedList<CGBLOCK> groups = getChannelGroupsfromDataGroup((DGBLOCK) node);
+				MDFSorter.log.log(Level.INFO, "Found " + groups.size() + " Channel Groups in DG.");
 				MDF3GenBlock datasection = ((DGBLOCK) node).getLnkData();
 				SortDataGroup(prob, groups, datasection);
 
@@ -200,8 +195,7 @@
 		}
 	}
 
-	public LinkedList<CGBLOCK> getChannelGroupsfromDataGroup(
-			DGBLOCK startDataGroup) {
+	public LinkedList<CGBLOCK> getChannelGroupsfromDataGroup(DGBLOCK startDataGroup) {
 		LinkedList<CGBLOCK> ret = new LinkedList<CGBLOCK>();
 		CGBLOCK next = (CGBLOCK) startDataGroup.getLnkCgFirst();
 		while (next != null) {
@@ -211,14 +205,12 @@
 		return ret;
 	}
 
-	public void SortDataGroup(MDFCompatibilityProblem prob,
-			LinkedList<CGBLOCK> groups, MDF3GenBlock datasection)
-					throws IOException, DataFormatException {
+	public void SortDataGroup(MDFCompatibilityProblem prob, LinkedList<CGBLOCK> groups, MDF3GenBlock datasection)
+			throws IOException, DataFormatException {
 
 		DGBLOCK datagroup = (DGBLOCK) prob.getStartnode();
 		// sort records.
-		MDF3DataProvider prov = new MDF3DataProvider(datasection,
-				filestructure.getInput());
+		MDF3DataProvider prov = new MDF3DataProvider(datasection, filestructure.getInput());
 
 		int idSize = 1;
 
@@ -236,7 +228,7 @@
 
 		for (CGBLOCK cgroup : groups) {
 
-			//Loop through records, and initialize variables
+			// Loop through records, and initialize variables
 			recCounters[i] = cgroup.getCycleCount();
 			int recID = cgroup.getRecordId();
 			recNumtoArrIdx.put(recID, i++);
@@ -256,8 +248,7 @@
 			last = copyChannelInfrastructure(last, cgroup);
 			long reclen = cgroup.getDataBytes();
 			newlength = cgroup.getCycleCount() * cgroup.getDataBytes();
-			MDF3BlocksSplittMerger splitmerger = new MDF3BlocksSplittMerger(
-					this, last, newlength, prov);
+			MDF3BlocksSplittMerger splitmerger = new MDF3BlocksSplittMerger(this, last, newlength, prov);
 
 			// write data sections.
 			for (long l : startaddresses[arridx]) {
@@ -267,27 +258,27 @@
 		}
 	}
 
-
-	public long[][] fillRecordArray(long[] recordCounters, Map<Integer, Integer> recNumtoArrIdx, Map<Integer,
-			Integer> recNumtoSize, AbstractDataProvider prov, boolean redundantids) throws IOException, DataFormatException{
+	public long[][] fillRecordArray(long[] recordCounters, Map<Integer, Integer> recNumtoArrIdx,
+			Map<Integer, Integer> recNumtoSize, AbstractDataProvider prov, boolean redundantids)
+			throws IOException, DataFormatException {
 
 		MDFSorter.log.info("Searching Records.");
 
 		long[][] startaddresses = new long[recordCounters.length][];
 		int idSize = 1;
 
-		//initilize array.
-		int counter =0;
-		long totalRecords = 0; //total number of records
-		for(long i : recordCounters){
-			totalRecords+=i;
+		// initilize array.
+		int counter = 0;
+		long totalRecords = 0; // total number of records
+		for (long i : recordCounters) {
+			totalRecords += i;
 			startaddresses[counter++] = new long[(int) i];
 		}
 
 		int[] foundrecCounters = new int[recordCounters.length];
 
-		long sectionoffset = 0; //our position in the data section
-		long foundrecords = 0; //number of records we found
+		long sectionoffset = 0; // our position in the data section
+		long foundrecords = 0; // number of records we found
 
 		ByteBuffer databuf;
 		while (foundrecords < totalRecords) {
@@ -295,22 +286,21 @@
 			databuf = prov.cachedRead(sectionoffset, idSize);
 			int foundID = MDF3Util.readUInt8(databuf);
 			Integer foundsize = recNumtoSize.get(foundID);
-			if(foundsize == null){ //Check if a size was found.
-				throw new RuntimeException("No Size known for record ID "+ foundID+ ".");
+			if (foundsize == null) { // Check if a size was found.
+				throw new RuntimeException("No Size known for record ID " + foundID + ".");
 			}
 			if (redundantids) {
 				// do a sanity check with the second id
 				databuf = prov.cachedRead(sectionoffset, idSize + foundsize);
 				int endID = MDF3Util.readUInt8(databuf);
 				if (endID != foundID) {
-					MDFSorter.log.warning("Found ID " + foundID
-							+ " at start of records, but ID " + endID
-							+ " at its end.");
+					MDFSorter.log
+							.warning("Found ID " + foundID + " at start of records, but ID " + endID + " at its end.");
 				}
 			}
 			Integer arridx = recNumtoArrIdx.get(foundID);
-			if(arridx == null){ //Check if an entry was found.
-				throw new RuntimeException("Record ID "+ foundID+ " is not known.");
+			if (arridx == null) { // Check if an entry was found.
+				throw new RuntimeException("Record ID " + foundID + " is not known.");
 			}
 			startaddresses[arridx][foundrecCounters[arridx]++] = sectionoffset; // remember
 			// start
@@ -328,8 +318,7 @@
 		return startaddresses;
 	}
 
-	public DGBLOCK copyChannelInfrastructure(MDF3GenBlock last, CGBLOCK towrite)
-			throws IOException {
+	public DGBLOCK copyChannelInfrastructure(MDF3GenBlock last, CGBLOCK towrite) throws IOException {
 		// Create new Data Group with default values, and write to file.
 		DGBLOCK newdg = new DGBLOCK(filestructure.isBigEndian());
 		newdg.setChannelGroups(1);
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3Util.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3Util.java
index e621b2a..8f597a4 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3Util.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf3/MDF3Util.java
@@ -27,8 +27,7 @@
 	 */
 	public static int getLinkcount(String blockID) {
 		if (blockID.length() != 2) {
-			throw new IllegalArgumentException(
-					"ID-Length = " + blockID.length());
+			throw new IllegalArgumentException("ID-Length = " + blockID.length());
 		}
 
 		switch (blockID) {
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/ATBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/ATBLOCK.java
index 16ce05b..1da9af4 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/ATBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/ATBLOCK.java
@@ -15,7 +15,8 @@
  * <p>
  * THE Attachment block<code>ATBLOCK</code>
  * </p>
- * The ATBLOCK specifies attached data, eiher by referencing an external file or by embedding the data in the MDF-File.
+ * The ATBLOCK specifies attached data, eiher by referencing an external file or
+ * by embedding the data in the MDF-File.
  *
  * @author Christian Rechner, Tobias Leemann
  */
@@ -28,7 +29,6 @@
 	// CHAR
 	private String txData;
 
-
 	/**
 	 * Parse a TXBLOCK from an existing MDFGenBlock
 	 *
@@ -44,25 +44,25 @@
 		parent.setPrec(this);
 	}
 
-	//Getters and Setters
+	// Getters and Setters
 
-	//Link to next ATBLOCK
-	public MDF4GenBlock getLnkAtNext(){
+	// Link to next ATBLOCK
+	public MDF4GenBlock getLnkAtNext() {
 		return links[0];
 	}
 
-	//Link to TextBlock with path of the referenced file
-	public MDF4GenBlock getLnkTxFilename(){
+	// Link to TextBlock with path of the referenced file
+	public MDF4GenBlock getLnkTxFilename() {
 		return links[1];
 	}
 
-	//Link to the MIME-Type (as text)
-	public MDF4GenBlock getLnkTxMIMEType(){
+	// Link to the MIME-Type (as text)
+	public MDF4GenBlock getLnkTxMIMEType() {
 		return links[2];
 	}
 
-	//Link to MDBlock with comment
-	public MDF4GenBlock getLnkMdComment(){
+	// Link to MDBlock with comment
+	public MDF4GenBlock getLnkMdComment() {
 		return links[3];
 	}
 
@@ -82,14 +82,16 @@
 	/**
 	 * Reads a TXBLOCK from its content.
 	 *
-	 * @param content The data section of this block
-	 * @throws IOException If an I/O error occurs.
+	 * @param content
+	 *            The data section of this block
+	 * @throws IOException
+	 *             If an I/O error occurs.
 	 */
 	@Override
 	public void parse(byte[] content) throws IOException {
 		// Read text String
 		setTxData(MDF4Util.readCharsUTF8(ByteBuffer.wrap(content), content.length));
-		//TODO: Bytes after zero termination?
+		// TODO: Bytes after zero termination?
 	}
 
 }
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CABLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CABLOCK.java
index 28f973d..86a7a63 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CABLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CABLOCK.java
@@ -23,7 +23,7 @@
  * @author EU2IYD9
  *
  */
-public class CABLOCK extends MDF4GenBlock{
+public class CABLOCK extends MDF4GenBlock {
 
 	/**
 	 * Defines what kind of array is described.
@@ -31,10 +31,8 @@
 	private byte arrayType;
 
 	/**
-	 * Defines how elements are stored -
-	 * 0 = CN Template
-	 * 1 = CG Template
-	 * 2 = DG Template
+	 * Defines how elements are stored - 0 = CN Template 1 = CG Template 2 = DG
+	 * Template
 	 */
 	private byte storageType;
 
@@ -64,7 +62,8 @@
 	private long[] dimSize;
 
 	/**
-	 * List of raw values fro axis points on each axis. Only present if FixedAxis flag is set.
+	 * List of raw values fro axis points on each axis. Only present if
+	 * FixedAxis flag is set.
 	 */
 	private double[] axisValues;
 
@@ -73,16 +72,17 @@
 	 */
 	private long[] cycCounts;
 
-
-	//Helper variables
+	// Helper variables
 
 	/**
-	 * The product of all sizes of dimensions. (dimSize[0]*dimSize[1]*dimSize[2]*...)
+	 * The product of all sizes of dimensions.
+	 * (dimSize[0]*dimSize[1]*dimSize[2]*...)
 	 */
 	private long volume;
 
 	/**
-	 * The sum of all sizes of dimensions. (dimSize[0]+dimSize[1]+dimSize[2]+...)
+	 * The sum of all sizes of dimensions.
+	 * (dimSize[0]+dimSize[1]+dimSize[2]+...)
 	 */
 	private int sumd;
 
@@ -101,15 +101,15 @@
 		parent.setPrec(this);
 	}
 
-	//Links
+	// Links
 
-	public MDF4GenBlock getLnkComposition(){
+	public MDF4GenBlock getLnkComposition() {
 		return links[0];
 	}
 
-	public MDF4GenBlock[] getLnkData(){
-		if(getStorageType()==2){ //DG Template
-			MDF4GenBlock[] data = new MDF4GenBlock[(int)volume];
+	public MDF4GenBlock[] getLnkData() {
+		if (getStorageType() == 2) { // DG Template
+			MDF4GenBlock[] data = new MDF4GenBlock[(int) volume];
 			System.arraycopy(links, 1, getStartPosition(1), 0, (int) volume);
 			return data;
 		} else {
@@ -117,112 +117,108 @@
 		}
 	}
 
-	public MDF4GenBlock[] getLnkDynamicSize(){
-		if(isDynamicSize()){
-			MDF4GenBlock[] data = new MDF4GenBlock[getChannelDimensions()*3];
-			System.arraycopy(links,  getStartPosition(2), data, 0, getChannelDimensions()*3);
+	public MDF4GenBlock[] getLnkDynamicSize() {
+		if (isDynamicSize()) {
+			MDF4GenBlock[] data = new MDF4GenBlock[getChannelDimensions() * 3];
+			System.arraycopy(links, getStartPosition(2), data, 0, getChannelDimensions() * 3);
 			return data;
-		}else{
+		} else {
 			return null;
 		}
 	}
 
-	public MDF4GenBlock[] getLnkInputQuantity(){
-		if(isInputQuantityFlag()){
-			MDF4GenBlock[] data = new MDF4GenBlock[getChannelDimensions()*3];
-			System.arraycopy(links, getStartPosition(3), data, 0, getChannelDimensions()*3);
+	public MDF4GenBlock[] getLnkInputQuantity() {
+		if (isInputQuantityFlag()) {
+			MDF4GenBlock[] data = new MDF4GenBlock[getChannelDimensions() * 3];
+			System.arraycopy(links, getStartPosition(3), data, 0, getChannelDimensions() * 3);
 			return data;
-		}else{
+		} else {
 			return null;
 		}
 	}
 
-	public MDF4GenBlock[] getLnkOutputQuantity(){
-		if(isOutputQuantityFlag()){
+	public MDF4GenBlock[] getLnkOutputQuantity() {
+		if (isOutputQuantityFlag()) {
 			MDF4GenBlock[] data = new MDF4GenBlock[3];
 			System.arraycopy(links, getStartPosition(4), data, 0, 3);
 			return data;
-		}else{
+		} else {
 			return null;
 		}
 	}
 
-	public MDF4GenBlock[] getLnkComparisonQuantity(){
-		if(isComparisonQuantityFlag()){
+	public MDF4GenBlock[] getLnkComparisonQuantity() {
+		if (isComparisonQuantityFlag()) {
 			MDF4GenBlock[] data = new MDF4GenBlock[3];
-			System.arraycopy(links, getStartPosition(5), data, 0,3);
+			System.arraycopy(links, getStartPosition(5), data, 0, 3);
 			return data;
-		}else{
+		} else {
 			return null;
 		}
 	}
 
-	public MDF4GenBlock[] getLnkAxisConversion(){
-		if(isAxis()){
+	public MDF4GenBlock[] getLnkAxisConversion() {
+		if (isAxis()) {
 			MDF4GenBlock[] data = new MDF4GenBlock[channelDimensions];
-			System.arraycopy(links, getStartPosition(6), data, 0,channelDimensions);
+			System.arraycopy(links, getStartPosition(6), data, 0, channelDimensions);
 			return data;
-		}else{
+		} else {
 			return null;
 		}
 	}
 
-	public MDF4GenBlock[] getLnkAxis(){
-		if(isAxis() && !isFixedAxis()){
-			MDF4GenBlock[] data = new MDF4GenBlock[channelDimensions*3];
-			System.arraycopy(links, getStartPosition(7), data, 0,3*channelDimensions);
+	public MDF4GenBlock[] getLnkAxis() {
+		if (isAxis() && !isFixedAxis()) {
+			MDF4GenBlock[] data = new MDF4GenBlock[channelDimensions * 3];
+			System.arraycopy(links, getStartPosition(7), data, 0, 3 * channelDimensions);
 			return data;
-		}else{
+		} else {
 			return null;
 		}
 	}
 
 	/**
-	 * This function calculates the start offset for the given link section.
-	 * 1 = LinkData
-	 * 2 = LinkDynamicSize
-	 * 3 = LinkInputQuantity
-	 * 4 = LinkOutputQuantity
-	 * 5 = LinkComparisionQuantity
-	 * 6 = LinkAxisConversion
-	 * 7 = LinkAxis
+	 * This function calculates the start offset for the given link section. 1 =
+	 * LinkData 2 = LinkDynamicSize 3 = LinkInputQuantity 4 = LinkOutputQuantity
+	 * 5 = LinkComparisionQuantity 6 = LinkAxisConversion 7 = LinkAxis
+	 * 
 	 * @param whichLinkSection
 	 */
-	private int getStartPosition(int whichLinkSection){
+	private int getStartPosition(int whichLinkSection) {
 		int startposition = 1;
-		for(int i = 1; i < 10; i++){
-			if(whichLinkSection == i){
+		for (int i = 1; i < 10; i++) {
+			if (whichLinkSection == i) {
 				return startposition;
 			}
-			switch(i){
+			switch (i) {
 			case 1:
-				if(getStorageType()==2){
-					startposition+=volume;
+				if (getStorageType() == 2) {
+					startposition += volume;
 				}
 				break;
 			case 2:
-				if(isDynamicSize()){
-					startposition+=3*channelDimensions;
+				if (isDynamicSize()) {
+					startposition += 3 * channelDimensions;
 				}
 				break;
 			case 3:
-				if(isInputQuantityFlag()){
-					startposition+=3*channelDimensions;
+				if (isInputQuantityFlag()) {
+					startposition += 3 * channelDimensions;
 				}
 				break;
 			case 4:
-				if(isOutputQuantityFlag()){
-					startposition+=3;
+				if (isOutputQuantityFlag()) {
+					startposition += 3;
 				}
 				break;
 			case 5:
-				if(isComparisonQuantityFlag()){
-					startposition+=3;
+				if (isComparisonQuantityFlag()) {
+					startposition += 3;
 				}
 				break;
 			case 6:
-				if(isAxis()){
-					startposition+=channelDimensions;
+				if (isAxis()) {
+					startposition += channelDimensions;
 				}
 				break;
 			}
@@ -230,6 +226,7 @@
 		return -1;
 
 	}
+
 	public byte getArrayType() {
 		return arrayType;
 	}
@@ -262,27 +259,27 @@
 		this.flags = flags;
 	}
 
-	public boolean isDynamicSize(){
+	public boolean isDynamicSize() {
 		return BigInteger.valueOf(getFlags()).testBit(0);
 	}
 
-	public boolean isInputQuantityFlag(){
+	public boolean isInputQuantityFlag() {
 		return BigInteger.valueOf(getFlags()).testBit(1);
 	}
 
-	public boolean isOutputQuantityFlag(){
+	public boolean isOutputQuantityFlag() {
 		return BigInteger.valueOf(getFlags()).testBit(2);
 	}
 
-	public boolean isComparisonQuantityFlag(){
+	public boolean isComparisonQuantityFlag() {
 		return BigInteger.valueOf(getFlags()).testBit(3);
 	}
 
-	public boolean isAxis(){
+	public boolean isAxis() {
 		return BigInteger.valueOf(getFlags()).testBit(4);
 	}
 
-	public boolean isFixedAxis(){
+	public boolean isFixedAxis() {
 		return BigInteger.valueOf(getFlags()).testBit(5);
 	}
 
@@ -351,33 +348,33 @@
 
 		// UINT64 * D: Size of each dimension
 		long[] dimSizes = new long[getChannelDimensions()];
-		volume= 1;
+		volume = 1;
 		sumd = 0;
-		for(int d = 0; d < getChannelDimensions(); d++){
-			dimSizes[d] = MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 16+8*d, 16+8*d +8));
-			volume*=dimSizes[d];
-			sumd+=dimSizes[d];
+		for (int d = 0; d < getChannelDimensions(); d++) {
+			dimSizes[d] = MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 16 + 8 * d, 16 + 8 * d + 8));
+			volume *= dimSizes[d];
+			sumd += dimSizes[d];
 		}
 		setDimSize(dimSizes);
 
-		int currpos = 16+getChannelDimensions()*8;
+		int currpos = 16 + getChannelDimensions() * 8;
 
-		if(isFixedAxis()){
-			//REAL * SUM(D)
+		if (isFixedAxis()) {
+			// REAL * SUM(D)
 			double[] axesval = new double[sumd];
-			for(int d = 0; d < sumd; d++){
-				axesval[d] = MDF4Util.readReal(MDFParser.getDataBuffer(content, currpos, currpos +8));
-				currpos+=8;
+			for (int d = 0; d < sumd; d++) {
+				axesval[d] = MDF4Util.readReal(MDFParser.getDataBuffer(content, currpos, currpos + 8));
+				currpos += 8;
 			}
 
 		}
 
-		if(getStorageType()==1 || getStorageType()==2){
-			//UINT64* PROD(D)
+		if (getStorageType() == 1 || getStorageType() == 2) {
+			// UINT64* PROD(D)
 			long[] cycCounters = new long[(int) volume];
-			for(int d = 0; d < sumd; d++){
-				cycCounters[d] = MDF4Util.readUInt64(MDFParser.getDataBuffer(content, currpos, currpos +8));
-				currpos+=8;
+			for (int d = 0; d < sumd; d++) {
+				cycCounters[d] = MDF4Util.readUInt64(MDFParser.getDataBuffer(content, currpos, currpos + 8));
+				currpos += 8;
 			}
 
 		}
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CCBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CCBLOCK.java
index 19ea233..ad602d8 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CCBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CCBLOCK.java
@@ -12,20 +12,20 @@
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 
-
 /**
  * <p>
  * THE CHANNEL CONVERSION BLOCK <code>CCBLOCK</code>
  * </p>
- * The data records can be used to store raw values (often also denoted as implementation values or internal values).
- * The CCBLOCK serves to specify a conversion formula to convert the raw values to physical values with a physical unit.
- * The result of a conversion always is either a floating-point value (REAL) or a character string (UTF-8).
+ * The data records can be used to store raw values (often also denoted as
+ * implementation values or internal values). The CCBLOCK serves to specify a
+ * conversion formula to convert the raw values to physical values with a
+ * physical unit. The result of a conversion always is either a floating-point
+ * value (REAL) or a character string (UTF-8).
  *
  * @author Christian Rechner
  */
 public class CCBLOCK extends MDF4GenBlock {
 
-
 	/** Data section */
 
 	// Conversion type (formula identifier)
@@ -45,8 +45,10 @@
 
 	// Precision for display of floating point values.
 	// 0xFF means unrestricted precision (infinite)
-	// Any other value specifies the number of decimal places to use for display of floating point values.
-	// Note: only valid if "precision valid" flag (bit 0) is set and if cn_precision of the parent CNBLOCK is invalid,
+	// Any other value specifies the number of decimal places to use for display
+	// of floating point values.
+	// Note: only valid if "precision valid" flag (bit 0) is set and if
+	// cn_precision of the parent CNBLOCK is invalid,
 	// otherwise cn_precision must be used.
 	// UINT8
 	private byte precision;
@@ -80,7 +82,6 @@
 	// REAL N
 	private double[] val;
 
-
 	/**
 	 * Parse a CCBLOCK from an existing MDFGenBlock
 	 *
@@ -101,26 +102,31 @@
 		return links[0];
 	}
 
-	// Link to TXBLOCK/MDBLOCK with physical unit of signal data (after conversion). (can be NIL)
+	// Link to TXBLOCK/MDBLOCK with physical unit of signal data (after
+	// conversion). (can be NIL)
 	public MDF4GenBlock getLnkMdUnit() {
 		return links[1];
 	}
 
-	// Link to TXBLOCK/MDBLOCK with comment of conversion and additional information. (can be NIL)
+	// Link to TXBLOCK/MDBLOCK with comment of conversion and additional
+	// information. (can be NIL)
 	public MDF4GenBlock getLnkMdComment() {
 		return links[2];
 	}
 
-	// Link to CCBLOCK for inverse formula (can be NIL, must be NIL for CCBLOCK of the inverse formula (no cyclic
+	// Link to CCBLOCK for inverse formula (can be NIL, must be NIL for CCBLOCK
+	// of the inverse formula (no cyclic
 	// reference allowed).
 	public MDF4GenBlock getLnkCcInverse() {
 		return links[3];
 	}
 
-	// List of additional links to TXBLOCKs with strings or to CCBLOCKs with partial conversion rules. Length of list is
-	// given by cc_ref_count. The list can be empty. Details are explained in formula-specific block supplement.
+	// List of additional links to TXBLOCKs with strings or to CCBLOCKs with
+	// partial conversion rules. Length of list is
+	// given by cc_ref_count. The list can be empty. Details are explained in
+	// formula-specific block supplement.
 	public MDF4GenBlock[] getLnkCcRef() {
-		MDF4GenBlock[] ret = new MDF4GenBlock[getLinkCount()-4];
+		MDF4GenBlock[] ret = new MDF4GenBlock[getLinkCount() - 4];
 		System.arraycopy(links, 4, ret, 0, ret.length);
 		return ret;
 	}
@@ -191,17 +197,18 @@
 
 	@Override
 	public String toString() {
-		return "CCBLOCK [type=" + type
-				+ ", precision=" + precision + ", flags=" + flags + ", refCount=" + refCount + ", valCount=" + valCount
-				+ ", phyRangeMin=" + phyRangeMin + ", phyRangeMax=" + phyRangeMax + ", val=" + Arrays.toString(val)
-				+ "]";
+		return "CCBLOCK [type=" + type + ", precision=" + precision + ", flags=" + flags + ", refCount=" + refCount
+				+ ", valCount=" + valCount + ", phyRangeMin=" + phyRangeMin + ", phyRangeMax=" + phyRangeMax + ", val="
+				+ Arrays.toString(val) + "]";
 	}
 
 	/**
 	 * Reads a CCBLOCK from its content.
 	 *
-	 * @param content The data section of this block
-	 * @throws IOException If an I/O error occurs.
+	 * @param content
+	 *            The data section of this block
+	 * @throws IOException
+	 *             If an I/O error occurs.
 	 */
 	@Override
 	public void parse(byte[] content) throws IOException {
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CGBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CGBLOCK.java
index 2f95332..39b570a 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CGBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CGBLOCK.java
@@ -205,10 +205,8 @@
 
 	@Override
 	public String toString() {
-		return "CGBLOCK [recordId=" + recordId + ", cycleCount=" + cycleCount
-				+ ", flags=" + flags + ", pathSeparator=" + pathSeparator
-				+ ", dataBytes=" + dataBytes + ", invalBytes=" + invalBytes
-				+ "]";
+		return "CGBLOCK [recordId=" + recordId + ", cycleCount=" + cycleCount + ", flags=" + flags + ", pathSeparator="
+				+ pathSeparator + ", dataBytes=" + dataBytes + ", invalBytes=" + invalBytes + "]";
 	}
 
 	/**
@@ -218,30 +216,25 @@
 	public void parse(byte[] content) throws IOException {
 
 		// UINT64: Record ID
-		setRecordId(
-				MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 0, 8)));
+		setRecordId(MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 0, 8)));
 
 		// UINT64: Number of cycles
-		setCycleCount(
-				MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 8, 16)));
+		setCycleCount(MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 8, 16)));
 
 		// UINT16: Flags
 		setFlags(MDF4Util.readUInt16(MDFParser.getDataBuffer(content, 16, 18)));
 
 		// UINT16: Value of character to be used as path separator, 0 if no path
 		// separator specified.
-		setPathSeparator(
-				MDF4Util.readUInt16(MDFParser.getDataBuffer(content, 18, 20)));
+		setPathSeparator(MDF4Util.readUInt16(MDFParser.getDataBuffer(content, 18, 20)));
 
 		// UINT32: Number of data Bytes (after record ID) used for signal values
 		// in record.
-		setDataBytes(
-				MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 24, 28)));
+		setDataBytes(MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 24, 28)));
 
 		// UINT32: Number of additional Bytes for record used for invalidation
 		// bits.
-		setInvalBytes(
-				MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 28, 32)));
+		setInvalBytes(MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 28, 32)));
 	}
 
 	@Override
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CHBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CHBLOCK.java
index 5f95208..8a08f50 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CHBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CHBLOCK.java
@@ -15,18 +15,20 @@
  * <p>
  * THE CHANNEL HIERARCHY BLOCK <code>CHBLOCK</code>
  * </p>
- * The CHBLOCKs describe a logical ordering of the channels in a tree-like structure. This only serves to structure the
- * channels and is totally independent to the data group and channel group structuring. A channel even may not be
- * referenced at all or more than one time.<br>
- * Each CHBLOCK can be seen as a node in a tree which has a number of channels as leafs and which has a reference to its
- * next sibling and its first child node (both CHBLOCKs). The reference to a channel is always a triple link to the
- * CNBLOCK of the channel and its parent CGBLOCK and DGBLOCK. Each CHBLOCK can have a name.
+ * The CHBLOCKs describe a logical ordering of the channels in a tree-like
+ * structure. This only serves to structure the channels and is totally
+ * independent to the data group and channel group structuring. A channel even
+ * may not be referenced at all or more than one time.<br>
+ * Each CHBLOCK can be seen as a node in a tree which has a number of channels
+ * as leafs and which has a reference to its next sibling and its first child
+ * node (both CHBLOCKs). The reference to a channel is always a triple link to
+ * the CNBLOCK of the channel and its parent CGBLOCK and DGBLOCK. Each CHBLOCK
+ * can have a name.
  *
  * @author Christian Rechner, Tobias Leemann
  */
 public class CHBLOCK extends MDF4GenBlock {
 
-
 	/**
 	 * Parse a CHBLOCK from an existing MDFGenBlock
 	 *
@@ -52,36 +54,35 @@
 	 */
 	private byte hirarchyType;
 
-
-	//Getters and Setters
-	//Link to next CHBLOCK
-	public MDF4GenBlock getLnkChNext(){
+	// Getters and Setters
+	// Link to next CHBLOCK
+	public MDF4GenBlock getLnkChNext() {
 		return links[0];
 	}
 
-	//Link to first child CHBLOCK
-	public MDF4GenBlock getLnkChFirst(){
+	// Link to first child CHBLOCK
+	public MDF4GenBlock getLnkChFirst() {
 		return links[1];
 	}
 
-	//Link to name of this block
-	public MDF4GenBlock getLnkTxName(){
+	// Link to name of this block
+	public MDF4GenBlock getLnkTxName() {
 		return links[2];
 	}
 
-	//Link to MDBlock
-	public MDF4GenBlock getLnkMdComment(){
+	// Link to MDBlock
+	public MDF4GenBlock getLnkMdComment() {
 		return links[3];
 	}
 
-	//Links to DG, CG, CN of element i
-	public MDF4GenBlock getLnkElement(int i){
-		if(i>= getElementCount()){
-			System.out.println("Invalid acces to element "+ i +".");
+	// Links to DG, CG, CN of element i
+	public MDF4GenBlock getLnkElement(int i) {
+		if (i >= getElementCount()) {
+			System.out.println("Invalid acces to element " + i + ".");
 			return null;
 		}
 		MDF4GenBlock[] ret = new MDF4GenBlock[3];
-		System.arraycopy(links, 4+3*i, ret, 0, 3);
+		System.arraycopy(links, 4 + 3 * i, ret, 0, 3);
 		return links[3];
 	}
 
@@ -104,8 +105,10 @@
 	/**
 	 * Reads a CHBLOCK from its content.
 	 *
-	 * @param content The data section of this block
-	 * @throws IOException If an I/O error occurs.
+	 * @param content
+	 *            The data section of this block
+	 * @throws IOException
+	 *             If an I/O error occurs.
 	 */
 	@Override
 	public void parse(byte[] content) throws IOException {
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CNBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CNBLOCK.java
index 14d87d1..4e934ef 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CNBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/CNBLOCK.java
@@ -71,7 +71,8 @@
 	// been applied.
 	// If zero, the signal value is 1-Byte aligned. A value different to zero is
 	// only allowed for Integer data types
-	// (cn_data_type ≤ 3) and if the Integer signal value fits into 8 contiguous
+	// (cn_data_type ≤ 3) and if the Integer signal value fits into 8
+	// contiguous
 	// Bytes (cn_bit_count + cn_bit_offset ≤
 	// 64). For all other cases, cn_bit_offset must be zero.
 	// UINT8
@@ -365,24 +366,17 @@
 
 	@Override
 	public String toString() {
-		return "CNBLOCK [lnkCnNext=" + getLnkCnNext().getPos()
-				+ ", lnkComposition=" + getLnkComposition().getPos()
-				+ ", lnkTxName=" + getLnkTxName().getPos() + ", lnkSiSource="
-				+ getLnkSiSource().getPos() + ", lnkCcConversion="
-				+ getLnkComposition().getPos() + ", lnkData="
-				+ getLnkData().getPos() + ", lnkMdUnit="
-				+ getLnkMdUnit().getPos() + ", lnkMdComment="
-				+ getLnkMdComment().getPos() + ", lnkAtReference=" + "{"
-				+ getLnkAtReference().length + "}" + ", lnkDefaultX=" + "{"
-				+ getLnkDefaultX().length + "}" + ", channelType=" + channelType
-				+ ", syncType=" + syncType + ", dataType=" + dataType
-				+ ", bitOffset=" + bitOffset + ", byteOffset=" + byteOffset
-				+ ", bitCount=" + bitCount + ", flags=" + flags
-				+ ", invalBitPos=" + invalBitPos + ", precision=" + precision
-				+ ", attachmentCount=" + attachmentCount + ", valRangeMin="
-				+ valRangeMin + ", valRangeMax=" + valRangeMax + ", limitMin="
-				+ limitMin + ", limitMax=" + limitMax + ", limitExtMin="
-				+ limitExtMin + ", limitExtMax=" + limitExtMax + "]";
+		return "CNBLOCK [lnkCnNext=" + getLnkCnNext().getPos() + ", lnkComposition=" + getLnkComposition().getPos()
+				+ ", lnkTxName=" + getLnkTxName().getPos() + ", lnkSiSource=" + getLnkSiSource().getPos()
+				+ ", lnkCcConversion=" + getLnkComposition().getPos() + ", lnkData=" + getLnkData().getPos()
+				+ ", lnkMdUnit=" + getLnkMdUnit().getPos() + ", lnkMdComment=" + getLnkMdComment().getPos()
+				+ ", lnkAtReference=" + "{" + getLnkAtReference().length + "}" + ", lnkDefaultX=" + "{"
+				+ getLnkDefaultX().length + "}" + ", channelType=" + channelType + ", syncType=" + syncType
+				+ ", dataType=" + dataType + ", bitOffset=" + bitOffset + ", byteOffset=" + byteOffset + ", bitCount="
+				+ bitCount + ", flags=" + flags + ", invalBitPos=" + invalBitPos + ", precision=" + precision
+				+ ", attachmentCount=" + attachmentCount + ", valRangeMin=" + valRangeMin + ", valRangeMax="
+				+ valRangeMax + ", limitMin=" + limitMin + ", limitMax=" + limitMax + ", limitExtMin=" + limitExtMin
+				+ ", limitExtMax=" + limitExtMax + "]";
 	}
 
 	@Override
@@ -401,19 +395,16 @@
 
 		// UINT32: Offset to first Byte in the data record that contains bits of
 		// the signal value.
-		setByteOffset(
-				MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 4, 8)));
+		setByteOffset(MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 4, 8)));
 
 		// UINT32: Number of bits for signal value in record.
-		setBitCount(
-				MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 8, 12)));
+		setBitCount(MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 8, 12)));
 
 		// UINT32: Flags
 		setFlags(MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 12, 16)));
 
 		// UINT32: Position of invalidation bit.
-		setInvalBitPos(
-				MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 16, 20)));
+		setInvalBitPos(MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 16, 20)));
 
 		// UINT8: Precision for display of floating point values.
 		setPrecision(content[20]);
@@ -422,36 +413,29 @@
 
 		// UINT16: Length N of cn_at_reference list, i.e. number of attachments
 		// for this channel. Can be zero.
-		setAttachmentCount(
-				MDF4Util.readUInt16(MDFParser.getDataBuffer(content, 22, 24)));
+		setAttachmentCount(MDF4Util.readUInt16(MDFParser.getDataBuffer(content, 22, 24)));
 
 		// REAL: Minimum signal value that occurred for this signal (raw value)
-		setValRangeMin(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 24, 32)));
+		setValRangeMin(MDF4Util.readReal(MDFParser.getDataBuffer(content, 24, 32)));
 
 		// REAL: Maximum signal value that occurred for this signal (raw value)
-		setValRangeMax(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 32, 40)));
+		setValRangeMax(MDF4Util.readReal(MDFParser.getDataBuffer(content, 32, 40)));
 
 		// REAL: Lower limit for this signal (physical value for numeric
 		// conversion rule, otherwise raw value)
-		setLimitMin(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 40, 48)));
+		setLimitMin(MDF4Util.readReal(MDFParser.getDataBuffer(content, 40, 48)));
 
 		// REAL: Upper limit for this signal (physical value for numeric
 		// conversion rule, otherwise raw value)
-		setLimitMax(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 48, 56)));
+		setLimitMax(MDF4Util.readReal(MDFParser.getDataBuffer(content, 48, 56)));
 
 		// REAL: Lower extended limit for this signal (physical value for
 		// numeric conversion rule, otherwise raw value)
-		setLimitExtMin(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 56, 64)));
+		setLimitExtMin(MDF4Util.readReal(MDFParser.getDataBuffer(content, 56, 64)));
 
 		// REAL: Upper extended limit for this signal (physical value for
 		// numeric conversion rule, otherwise raw value)
-		setLimitExtMax(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 64, 72)));
+		setLimitExtMax(MDF4Util.readReal(MDFParser.getDataBuffer(content, 64, 72)));
 
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DGBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DGBLOCK.java
index 3917f40..98a28bb 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DGBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DGBLOCK.java
@@ -85,10 +85,9 @@
 	 */
 	@Override
 	public String toString() {
-		return "DGBLOCK [lnkDgNext=" + getLnkDgNext().getPos() + ", lnkCgFirst="
-				+ getLnkCgFirst().getPos() + ", lnkData="
-				+ getLnkData().getPos() + ", lnkMdComment="
-				+ getLnkMdComment().getPos() + ", recIdSize=" + recIdSize + "]";
+		return "DGBLOCK [lnkDgNext=" + getLnkDgNext().getPos() + ", lnkCgFirst=" + getLnkCgFirst().getPos()
+				+ ", lnkData=" + getLnkData().getPos() + ", lnkMdComment=" + getLnkMdComment().getPos() + ", recIdSize="
+				+ recIdSize + "]";
 	}
 
 	/**
@@ -98,8 +97,7 @@
 	public void parse(byte[] content) throws IOException {
 
 		// UINT8: Number of Bytes used for record IDs in the data block.
-		setRecIdSize(
-				MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 0, 1)));
+		setRecIdSize(MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 0, 1)));
 
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DLBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DLBLOCK.java
index 72af43b..ddc8a25 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DLBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DLBLOCK.java
@@ -124,10 +124,9 @@
 	 */
 	@Override
 	public String toString() {
-		return "DLBLOCK [lnkDlNext=" + getLnkDlNext() + ", lnkDlData="
-				+ Arrays.toString(getLnkDlData()) + ", flags=" + flags
-				+ ", count=" + count + ", equalLength=" + equalLength
-				+ ", offset=" + Arrays.toString(offset) + "]";
+		return "DLBLOCK [lnkDlNext=" + getLnkDlNext() + ", lnkDlData=" + Arrays.toString(getLnkDlData()) + ", flags="
+				+ flags + ", count=" + count + ", equalLength=" + equalLength + ", offset=" + Arrays.toString(offset)
+				+ "]";
 	}
 
 	@Override
@@ -135,13 +134,11 @@
 		setFlags(MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 0, 1)));
 		setCount(MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 4, 8)));
 		if (isEqualLengthFlag()) {
-			setEqualLength(MDF4Util
-					.readUInt64(MDFParser.getDataBuffer(content, 8, 16)));
+			setEqualLength(MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 8, 16)));
 		} else {
 			long[] offset = new long[(int) getCount()];
 			for (int i = 0; i < offset.length; i++) {
-				offset[i] = MDF4Util.readUInt64(MDFParser.getDataBuffer(content,
-						8 + 8 * i, 16 + 8 * i));
+				offset[i] = MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 8 + 8 * i, 16 + 8 * i));
 			}
 			setOffset(offset);
 		}
@@ -187,12 +184,11 @@
 	 *         not.
 	 */
 	public boolean isImproveable(ArgumentStruct args) {
-		if(getCount() == 0){
+		if (getCount() == 0) {
 			return true;
 		}
 		// A data list that only contains one block is useless.
-		if (getCount() == 1
-				&& getLink(1).getLength() < args.maxblocksize) {
+		if (getCount() == 1 && getLink(1).getLength() < args.maxblocksize) {
 			return true;
 		}
 		// Split blocks if necessary
@@ -205,8 +201,7 @@
 			do {
 				for (int i = 0; i < curr.count; i++) {
 					String chldid = curr.getLink(i + 1).getId();
-					if (chldid.equals("##DT") || chldid.equals("##RD")
-							|| chldid.equals("##SD")) {
+					if (chldid.equals("##DT") || chldid.equals("##RD") || chldid.equals("##SD")) {
 						return true;
 					}
 				}
@@ -229,8 +224,13 @@
 				long datasectionlength = getOffset()[(int) (getCount() - 1)]; // Calculate
 				// last
 				// offset
-				datasectionlength += getLink((int) getCount()).getLength()
-						- 24L; // ... and add length of last block
+				datasectionlength += getLink((int) getCount()).getLength() - 24L; // ...
+																					// and
+																					// add
+																					// length
+																					// of
+																					// last
+																					// block
 				if (datasectionlength / args.maxblocksize + 1 < getCount()) {
 					// fewer blocks are possible
 					return true;
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DZBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DZBLOCK.java
index e6f2e3a..e924ae8 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DZBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/DZBLOCK.java
@@ -118,19 +118,15 @@
 	 */
 	@Override
 	public void parse(byte[] content) throws IOException {
-		setBlock_type(MDF4Util
-				.readCharsUTF8(MDFParser.getDataBuffer(content, 0, 2), 2));
+		setBlock_type(MDF4Util.readCharsUTF8(MDFParser.getDataBuffer(content, 0, 2), 2));
 
 		setZip_type(MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 2, 3)));
 
-		setZip_parameters(
-				MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 4, 8)));
+		setZip_parameters(MDF4Util.readUInt32(MDFParser.getDataBuffer(content, 4, 8)));
 
-		setOrg_data_length(
-				MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 8, 16)));
+		setOrg_data_length(MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 8, 16)));
 
-		setData_length(
-				MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 16, 24)));
+		setData_length(MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 16, 24)));
 
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/EVBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/EVBLOCK.java
index e14eef8..934c0ac 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/EVBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/EVBLOCK.java
@@ -14,8 +14,8 @@
  * <p>
  * THE Event BLOCK <code>EVBLOCK</code>
  * </p>
- * The TXBLOCK is very similar to the MDBLOCK but only contains a plain string encoded in UTF-8. The text length results
- * from the block size.
+ * The TXBLOCK is very similar to the MDBLOCK but only contains a plain string
+ * encoded in UTF-8. The text length results from the block size.
  *
  * @author Tobias Leemann
  */
@@ -23,8 +23,6 @@
 
 	/** Data section */
 
-
-
 	/**
 	 * Parse a TXBLOCK from an existing MDFGenBlock
 	 *
@@ -40,7 +38,6 @@
 		parent.setPrec(this);
 	}
 
-
 	@Override
 	public String toString() {
 		return "EVBLOCK";
@@ -49,8 +46,10 @@
 	/**
 	 * Reads a TXBLOCK from its content.
 	 *
-	 * @param content The data section of this block
-	 * @throws IOException If an I/O error occurs.
+	 * @param content
+	 *            The data section of this block
+	 * @throws IOException
+	 *             If an I/O error occurs.
 	 */
 	@Override
 	public void parse(byte[] content) throws IOException {
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/FHBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/FHBLOCK.java
index 52e9f5e..326088b 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/FHBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/FHBLOCK.java
@@ -97,12 +97,9 @@
 	@Override
 	public void parse(byte[] content) throws IOException {
 		setTime_ns(MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 0, 8)));
-		setTz_offset_min(
-				MDF4Util.readInt16(MDFParser.getDataBuffer(content, 8, 10)));
-		setDst_offset_min(
-				MDF4Util.readInt16(MDFParser.getDataBuffer(content, 10, 12)));
-		setTime_flags(
-				MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 12, 13)));
+		setTz_offset_min(MDF4Util.readInt16(MDFParser.getDataBuffer(content, 8, 10)));
+		setDst_offset_min(MDF4Util.readInt16(MDFParser.getDataBuffer(content, 10, 12)));
+		setTime_flags(MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 12, 13)));
 	}
 
 	@Override
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/HDBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/HDBLOCK.java
index 9cfdf53..799dbfa 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/HDBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/HDBLOCK.java
@@ -220,11 +220,9 @@
 
 	@Override
 	public String toString() {
-		return "HDBLOCK [ startTimeNs=" + startTimeNs + ", tzOffsetMin="
-				+ tzOffsetMin + ", dstOffsetMin=" + dstOffsetMin
-				+ ", timeFlags=" + timeFlags + ", timeClass=" + timeClass
-				+ ", flags=" + flags + ", startAngleRad=" + startAngleRad
-				+ ", startDistanceM=" + startDistanceM + "]";
+		return "HDBLOCK [ startTimeNs=" + startTimeNs + ", tzOffsetMin=" + tzOffsetMin + ", dstOffsetMin="
+				+ dstOffsetMin + ", timeFlags=" + timeFlags + ", timeClass=" + timeClass + ", flags=" + flags
+				+ ", startAngleRad=" + startAngleRad + ", startDistanceM=" + startDistanceM + "]";
 	}
 
 	@Override
@@ -232,24 +230,19 @@
 
 		// UINT64: Time stamp at start of measurement in nanoseconds elapsed
 		// since 00:00:00 01.01.1970
-		setStartTimeNs(
-				MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 0, 8)));
+		setStartTimeNs(MDF4Util.readUInt64(MDFParser.getDataBuffer(content, 0, 8)));
 
 		// INT16: Time zone offset in minutes.
-		setTzOffsetMin(
-				MDF4Util.readInt16(MDFParser.getDataBuffer(content, 8, 10)));
+		setTzOffsetMin(MDF4Util.readInt16(MDFParser.getDataBuffer(content, 8, 10)));
 
 		// INT16: Daylight saving time (DST) offset in minutes for start time
-		setDstOffsetMin(
-				MDF4Util.readInt16(MDFParser.getDataBuffer(content, 10, 12)));
+		setDstOffsetMin(MDF4Util.readInt16(MDFParser.getDataBuffer(content, 10, 12)));
 
 		// UINT8: Time flags block.setTimeFlags(MDF4Util.readUInt8(bb));
-		setTimeFlags(
-				MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 12, 13)));
+		setTimeFlags(MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 12, 13)));
 
 		// UINT8: Time quality class
-		setTimeClass(
-				MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 13, 14)));
+		setTimeClass(MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 13, 14)));
 
 		// UINT8: Flags block.setFlags(MDF4Util.readUInt8(bb)); if
 		setFlags(MDF4Util.readUInt8(MDFParser.getDataBuffer(content, 14, 15)));
@@ -258,12 +251,10 @@
 
 		// REAL: Start angle in radians at start of measurement (only for
 		// angle synchronous measurements)
-		setStartAngleRad(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 16, 24)));
+		setStartAngleRad(MDF4Util.readReal(MDFParser.getDataBuffer(content, 16, 24)));
 
 		// REAL: Start distance in meters at start of measurement
-		setStartDistanceM(
-				MDF4Util.readReal(MDFParser.getDataBuffer(content, 24, 32)));
+		setStartDistanceM(MDF4Util.readReal(MDFParser.getDataBuffer(content, 24, 32)));
 
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDBLOCK.java
index 5ec4850..a870432 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDBLOCK.java
@@ -15,9 +15,10 @@
  * <p>
  * THE META DATA BLOCK <code>MDBLOCK</code>
  * </p>
- * The MDBLOCK contains information encoded as XML string. For example this can be comments for the measured data file,
- * file history information or the identification of a channel. This information is ruled by the parent block and
- * follows specific XML schemas definitions.
+ * The MDBLOCK contains information encoded as XML string. For example this can
+ * be comments for the measured data file, file history information or the
+ * identification of a channel. This information is ruled by the parent block
+ * and follows specific XML schemas definitions.
  *
  * @author Christian Rechner, Tobias Leemann
  */
@@ -53,7 +54,6 @@
 		this.mdData = mdData;
 	}
 
-
 	@Override
 	public String toString() {
 		return "MDBLOCK [mdData=" + mdData + "]";
@@ -62,14 +62,16 @@
 	/**
 	 * Reads a MDBLOCK from its content.
 	 *
-	 * @param content The data section of this block
-	 * @throws IOException If an I/O error occurs.
+	 * @param content
+	 *            The data section of this block
+	 * @throws IOException
+	 *             If an I/O error occurs.
 	 */
 	@Override
 	public void parse(byte[] content) throws IOException {
 		// ReadXML String
 		setMdData(MDF4Util.readCharsUTF8(ByteBuffer.wrap(content), content.length));
-		//TODO: Bytes after zero termination?
+		// TODO: Bytes after zero termination?
 	}
 
 }
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4BlocksSplittMerger.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4BlocksSplittMerger.java
index 5b7933a..a17945d 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4BlocksSplittMerger.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4BlocksSplittMerger.java
@@ -145,10 +145,12 @@
 	 * @param oldsection
 	 *            The data section that is split up. This is used to create a
 	 *            provider and set section length.
-	 * @param maxblocksize The maximum size of a block in the output. Should be a multiple of the size of one record.
+	 * @param maxblocksize
+	 *            The maximum size of a block in the output. Should be a
+	 *            multiple of the size of one record.
 	 */
-	public MDF4BlocksSplittMerger(MDF4ProcessWriter ps, String blocktype,
-			MDF4GenBlock parentnode, MDF4GenBlock oldsection, long maxblocksize) {
+	public MDF4BlocksSplittMerger(MDF4ProcessWriter ps, String blocktype, MDF4GenBlock parentnode,
+			MDF4GenBlock oldsection, long maxblocksize) {
 		this.ps = ps;
 		this.blocktype = blocktype;
 		unzip = ps.getArgs().unzip;
@@ -180,8 +182,7 @@
 	 * @param prov
 	 *            The DataProvider to read from.
 	 */
-	public MDF4BlocksSplittMerger(MDF4ProcessWriter ps, String blocktype,
-			MDF4GenBlock parentnode, long totdatalength,
+	public MDF4BlocksSplittMerger(MDF4ProcessWriter ps, String blocktype, MDF4GenBlock parentnode, long totdatalength,
 			MDF4DataProvider prov, long maxblocksize) {
 		this.ps = ps;
 		this.blocktype = blocktype;
@@ -217,8 +218,7 @@
 	 * @throws DataFormatException
 	 *             If zipped data is given an an invalid format.
 	 */
-	public void splitmerge(MDF4GenBlock datablock)
-			throws IOException, DataFormatException {
+	public void splitmerge(MDF4GenBlock datablock) throws IOException, DataFormatException {
 		prov = new MDF4DataProvider(datablock, reader);
 		BlockReadPtr = 0;
 		long leftbytes;
@@ -244,8 +244,7 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public void splitmerge(long startaddress, long length)
-			throws IOException, DataFormatException {
+	public void splitmerge(long startaddress, long length) throws IOException, DataFormatException {
 		// we are doing a non-blockwise read
 		towrite = null;
 		GlobalReadPtr = startaddress;
@@ -263,14 +262,12 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public void appendDataFromPos(long leftbytes)
-			throws IOException, DataFormatException {
+	public void appendDataFromPos(long leftbytes) throws IOException, DataFormatException {
 		// check if space in curr-Block is available, and fill with first data,
 		// or attach all data if it fits
 		if (curr != null) {
 			if (datawritten < thisblockend) { // Space available
-				long bytestowrite = leftbytes < thisblockend - datawritten
-						? leftbytes : thisblockend - datawritten;
+				long bytestowrite = leftbytes < thisblockend - datawritten ? leftbytes : thisblockend - datawritten;
 				abstractcopy(bytestowrite);
 				datawritten += bytestowrite;
 
@@ -287,9 +284,8 @@
 			// we need at least
 			// one new block
 			// last block: adapt length, else use maxlength
-			long newblocklength = totdatalength < (blockcounter + 1)
-					* maxblocksize ? totdatalength - blockcounter * maxblocksize
-							: maxblocksize;
+			long newblocklength = totdatalength < (blockcounter + 1) * maxblocksize
+					? totdatalength - blockcounter * maxblocksize : maxblocksize;
 			curr = abstractcreate(newblocklength, blocktype); // This method
 			// creates a
 			// zipblock if
@@ -300,8 +296,7 @@
 
 			thisblockend += newblocklength;
 
-			long bytestowrite = leftbytes < newblocklength ? leftbytes
-					: newblocklength;
+			long bytestowrite = leftbytes < newblocklength ? leftbytes : newblocklength;
 			abstractcopy(bytestowrite);
 
 			datawritten += bytestowrite;
@@ -337,8 +332,7 @@
 
 				ps.performPut(dzblk.getHeaderBytes());
 				ps.performPut(dzblk.getBodyBytes());
-				ps.performPut(ByteBuffer.wrap(output), compressedDataLength,
-						false);
+				ps.performPut(ByteBuffer.wrap(output), compressedDataLength, false);
 				ps.writeSpacer(compressedDataLength);
 			} else {
 				ps.writeSpacer(curr.getLength());
@@ -359,8 +353,7 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public ByteBuffer abstractread(int length)
-			throws IOException, DataFormatException {
+	public ByteBuffer abstractread(int length) throws IOException, DataFormatException {
 		if (towrite != null) {
 			// blockwise
 
@@ -386,8 +379,7 @@
 	 */
 	public void abstractput(byte[] datasection) {
 		if (curr.getId().equals("##DZ")) {
-			System.arraycopy(datasection, 0, uncompressedoutData,
-					uncompressedWritePtr, datasection.length);
+			System.arraycopy(datasection, 0, uncompressedoutData, uncompressedWritePtr, datasection.length);
 			uncompressedWritePtr += datasection.length;
 		} else {
 			ps.performPut(datasection);
@@ -424,16 +416,13 @@
 	 *             If an I/O error occurs.
 	 * @see checkfinalized
 	 */
-	public MDF4GenBlock abstractcreate(long newblocklength, String blocktype)
-			throws IOException {
+	public MDF4GenBlock abstractcreate(long newblocklength, String blocktype) throws IOException {
 		uncompressedWritePtr = 0;
 		MDF4GenBlock ret;
 		if (estimatedblockcounter != 1 && blockcounter % MAX_LIST_COUNT == 0) {
 			// new list block needs to be created.
-			int childblocks = (int) (estimatedblockcounter
-					- blockcounter < MAX_LIST_COUNT
-					? estimatedblockcounter - blockcounter
-							: MAX_LIST_COUNT);
+			int childblocks = (int) (estimatedblockcounter - blockcounter < MAX_LIST_COUNT
+					? estimatedblockcounter - blockcounter : MAX_LIST_COUNT);
 			DLBLOCK newparentlist = createDList(childblocks);
 			if (parentlist == null) {
 				if (structuralroot == null) {
@@ -474,7 +463,9 @@
 
 	/**
 	 * Creates a HLBLOCK if needed.
-	 * @throws IOException If an I/O-Error occurs.
+	 * 
+	 * @throws IOException
+	 *             If an I/O-Error occurs.
 	 */
 	public void createStructure() throws IOException {
 		// Do we need a list block?
@@ -495,7 +486,7 @@
 			if (totdatalength % maxblocksize == 0) {
 				estimatedblockcounter = (int) (totdatalength / maxblocksize);
 			} else {
-				estimatedblockcounter = (int) (totdatalength / maxblocksize)+ 1;
+				estimatedblockcounter = (int) (totdatalength / maxblocksize) + 1;
 			}
 
 			// if we created a HLblock the root the subtree will be this one
@@ -519,8 +510,7 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public void abstractcopy(long length)
-			throws IOException, DataFormatException {
+	public void abstractcopy(long length) throws IOException, DataFormatException {
 		long written = 0L;
 		do {
 			int bytesread = 0;
@@ -529,16 +519,14 @@
 				ByteBuffer custombuffer = abstractread(bytesread);
 				abstractput(custombuffer, bytesread);
 			} else {
-				ByteBuffer buffer = abstractread(
-						MDF4ProcessWriter.MAX_OUTPUTBLOCKSIZE);
+				ByteBuffer buffer = abstractread(MDF4ProcessWriter.MAX_OUTPUTBLOCKSIZE);
 				bytesread = MDF4ProcessWriter.MAX_OUTPUTBLOCKSIZE;
 				abstractput(buffer, bytesread);
 			}
 			written += bytesread;
 		} while (written < length);
 		if (length != written) {
-			throw new IOException("written length not equal to blocklength: "
-					+ length + "/" + written);
+			throw new IOException("written length not equal to blocklength: " + length + "/" + written);
 		}
 	}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4DataProvider.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4DataProvider.java
index 981854f..623c934 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4DataProvider.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4DataProvider.java
@@ -74,9 +74,9 @@
 	 *            FileChannel to the input file.
 	 */
 	public MDF4DataProvider(MDF4GenBlock datasectionhead, FileChannel reader) {
-		//empty data section
-		if(datasectionhead == null){
-			sectype='0';
+		// empty data section
+		if (datasectionhead == null) {
+			sectype = '0';
 			return;
 		}
 		this.datasectionhead = datasectionhead;
@@ -138,13 +138,11 @@
 	 *             If zipped data is in an invalid format.
 	 */
 	@Override
-	public void read(long globaloffset, ByteBuffer data)
-			throws IOException, DataFormatException {
+	public void read(long globaloffset, ByteBuffer data) throws IOException, DataFormatException {
 
 		if (globaloffset + data.capacity() > sectionlength) {
 			throw new IllegalArgumentException(
-					"Invalid read access on Data Provider. Section is only "
-							+ sectionlength + " bytes long.");
+					"Invalid read access on Data Provider. Section is only " + sectionlength + " bytes long.");
 		}
 		if (dataarr != null) {
 			data.put(dataarr, (int) globaloffset, data.capacity());
@@ -157,8 +155,7 @@
 
 		// check if block was last processed block (performance optimization)
 		if (lastprocessed != null) {
-			if (lastprocessedstart <= globaloffset
-					&& lastprocessedend > globaloffset) {
+			if (lastprocessedstart <= globaloffset && lastprocessedend > globaloffset) {
 				blk = lastprocessed;
 				blkoffset = globaloffset - lastprocessedstart;
 			}
@@ -171,41 +168,38 @@
 			blkoffset = (long) bo[1];
 			lastprocessedstart = (long) bo[2];
 			lastprocessed = blk;
-			long lpdatasize = lastprocessed instanceof DZBLOCK
-					? ((DZBLOCK) lastprocessed).getOrg_data_length()
-							: lastprocessed.getLength() - 24L;
-					lastprocessedend = lastprocessedstart + lpdatasize;
+			long lpdatasize = lastprocessed instanceof DZBLOCK ? ((DZBLOCK) lastprocessed).getOrg_data_length()
+					: lastprocessed.getLength() - 24L;
+			lastprocessedend = lastprocessedstart + lpdatasize;
 		}
 
 		// length check
-		long datasize = blk instanceof DZBLOCK
-				? ((DZBLOCK) blk).getOrg_data_length() : blk.getLength() - 24L;
-				if (blkoffset + data.capacity() > datasize) {
-					int readablesize = (int) (datasize - blkoffset);
-					ByteBuffer readable = ByteBuffer.allocate(readablesize);
+		long datasize = blk instanceof DZBLOCK ? ((DZBLOCK) blk).getOrg_data_length() : blk.getLength() - 24L;
+		if (blkoffset + data.capacity() > datasize) {
+			int readablesize = (int) (datasize - blkoffset);
+			ByteBuffer readable = ByteBuffer.allocate(readablesize);
 
-					// divide and conquer: Read available section first
-					read(globaloffset, readable);
+			// divide and conquer: Read available section first
+			read(globaloffset, readable);
 
-					// read unavailable section
-					ByteBuffer unreadable = ByteBuffer
-							.allocate(data.capacity() - readablesize);
-					read(globaloffset + readablesize, unreadable);
+			// read unavailable section
+			ByteBuffer unreadable = ByteBuffer.allocate(data.capacity() - readablesize);
+			read(globaloffset + readablesize, unreadable);
 
-					// merge sections
-					data.put(readable);
-					data.put(unreadable);
-					data.rewind();
-					return;
-				}
+			// merge sections
+			data.put(readable);
+			data.put(unreadable);
+			data.rewind();
+			return;
+		}
 
-				if (blk.getId().equals("##DZ")) {
-					cache.read((DZBLOCK) blk, blkoffset, data);
-				} else {
-					reader.position(blk.getPos() + 24L + blkoffset);
-					reader.read(data);
-				}
-				data.rewind();
+		if (blk.getId().equals("##DZ")) {
+			cache.read((DZBLOCK) blk, blkoffset, data);
+		} else {
+			reader.position(blk.getPos() + 24L + blkoffset);
+			reader.read(data);
+		}
+		data.rewind();
 	}
 
 	/**
@@ -225,13 +219,11 @@
 	 *             If zipped data is in an invalid format.
 	 */
 	@Override
-	public ByteBuffer cachedRead(long globaloffset, int length)
-			throws IOException, DataFormatException {
+	public ByteBuffer cachedRead(long globaloffset, int length) throws IOException, DataFormatException {
 		// argument check
 		if (globaloffset + length > sectionlength) {
 			throw new IllegalArgumentException(
-					"Invalid read access on Data Provider. Section is only "
-							+ sectionlength + " bytes long.");
+					"Invalid read access on Data Provider. Section is only " + sectionlength + " bytes long.");
 		}
 
 		if (dataarr != null) {
@@ -262,24 +254,21 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public void read(long blockoffset, ByteBuffer data, MDF4GenBlock blk)
-			throws IOException, DataFormatException {
+	public void read(long blockoffset, ByteBuffer data, MDF4GenBlock blk) throws IOException, DataFormatException {
 		// argument check
-		long datalength = blk instanceof DZBLOCK
-				? ((DZBLOCK) blk).getOrg_data_length() : blk.getLength() - 24L;
-				if (blockoffset + data.capacity() > datalength) {
-					throw new IllegalArgumentException(
-							"Invalid read access on Data Provider. Block is only "
-									+ datalength + " bytes long.");
-				}
+		long datalength = blk instanceof DZBLOCK ? ((DZBLOCK) blk).getOrg_data_length() : blk.getLength() - 24L;
+		if (blockoffset + data.capacity() > datalength) {
+			throw new IllegalArgumentException(
+					"Invalid read access on Data Provider. Block is only " + datalength + " bytes long.");
+		}
 
-				if (blk.getId().equals("##DZ")) {
-					cache.read((DZBLOCK) blk, blockoffset, data);
-				} else {
-					reader.position(blk.getPos() + 24L + blockoffset);
-					reader.read(data);
-					data.rewind();
-				}
+		if (blk.getId().equals("##DZ")) {
+			cache.read((DZBLOCK) blk, blockoffset, data);
+		} else {
+			reader.position(blk.getPos() + 24L + blockoffset);
+			reader.read(data);
+			data.rewind();
+		}
 	}
 
 	/**
@@ -316,8 +305,7 @@
 					drag = curr;
 					curr = (DLBLOCK) curr.getLink(0);
 				}
-				ret[0] = drag.getLink(
-						(int) (blknum - (pastblocks - drag.getCount()) + 1));
+				ret[0] = drag.getLink((int) (blknum - (pastblocks - drag.getCount()) + 1));
 				ret[1] = globaloffset % drag.getEqualLength();
 				ret[2] = drag.getEqualLength() * blknum;
 			} else {
@@ -328,18 +316,17 @@
 				long draglength = 0; // length of drag block.
 				while (curroff <= globaloffset) {
 					drag = actlist.getLink(listblknum + 1);
-					draglength = drag.getId().equals("##DZ")
-							? ((DZBLOCK) drag).getOrg_data_length()
-									: drag.getLength() - 24L;
+					draglength = drag.getId().equals("##DZ") ? ((DZBLOCK) drag).getOrg_data_length()
+							: drag.getLength() - 24L;
 
-							curroff = actlist.getOffset()[listblknum] + draglength;
+					curroff = actlist.getOffset()[listblknum] + draglength;
 
-							listblknum++;
-							// switch to next list, if end is reached.
-							if (listblknum == actlist.getCount()) {
-								actlist = (DLBLOCK) actlist.getLnkDlNext();
-								listblknum = 0;
-							}
+					listblknum++;
+					// switch to next list, if end is reached.
+					if (listblknum == actlist.getCount()) {
+						actlist = (DLBLOCK) actlist.getLnkDlNext();
+						listblknum = 0;
+					}
 				}
 				ret[0] = drag;
 				ret[1] = globaloffset - (curroff - draglength);
@@ -360,7 +347,7 @@
 	 * @return The length.
 	 */
 	private long calculateLength() {
-		if(datasectionhead ==null){
+		if (datasectionhead == null) {
 			return 0;
 		}
 		long newlength = 0;
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4GenBlock.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4GenBlock.java
index ff3bbd9..2a7d776 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4GenBlock.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4GenBlock.java
@@ -24,8 +24,7 @@
  *
  * @author Tobias Leemann
  */
-public class MDF4GenBlock extends MDFGenBlock
-implements Comparable<MDF4GenBlock> {
+public class MDF4GenBlock extends MDFGenBlock implements Comparable<MDF4GenBlock> {
 
 	// Number of links
 	// UINT64
@@ -171,8 +170,7 @@
 	 */
 	@Override
 	public String toString() {
-		return "BLOCK [pos=" + pos + ", id=" + id + ", length=" + length
-				+ ", linkCount=" + linkCount + "]";
+		return "BLOCK [pos=" + pos + ", id=" + id + ", length=" + length + ", linkCount=" + linkCount + "]";
 	}
 
 	/**
@@ -242,8 +240,7 @@
 	 *             If an I/O error occurs.
 	 */
 	public void parse(byte[] content) throws IOException {
-		throw new UnsupportedOperationException(
-				"parse not valid on unspecified block.");
+		throw new UnsupportedOperationException("parse not valid on unspecified block.");
 	}
 
 	/**
@@ -340,14 +337,12 @@
 			// is sublist improvable? If yes, we have a problem here.
 			if (((DLBLOCK) dlblk).isImproveable(args)) {
 				startRecursiveTouch();
-				addProblem(new MDFCompatibilityProblem(
-						MDFProblemType.LINKED_DATALIST_PROBLEM, this));
+				addProblem(new MDFCompatibilityProblem(MDFProblemType.LINKED_DATALIST_PROBLEM, this));
 			} else {
 				if (args.unzip) {
 					// we have a problem anyway, if data should be unzipped.
 					startRecursiveTouch();
-					addProblem(new MDFCompatibilityProblem(
-							MDFProblemType.LINKED_DATALIST_PROBLEM, this));
+					addProblem(new MDFCompatibilityProblem(MDFProblemType.LINKED_DATALIST_PROBLEM, this));
 				}
 			}
 			break;
@@ -357,8 +352,7 @@
 			}
 			if (((DLBLOCK) this).isImproveable(args)) {
 				startRecursiveTouch();
-				addProblem(new MDFCompatibilityProblem(
-						MDFProblemType.LINKED_DATALIST_PROBLEM, this));
+				addProblem(new MDFCompatibilityProblem(MDFProblemType.LINKED_DATALIST_PROBLEM, this));
 			}
 			break;
 		case "##DG":
@@ -370,8 +364,7 @@
 			CGBLOCK blk = (CGBLOCK) dgblk.getLnkCgFirst();
 			if (blk.getLnkCgNext() != null) {
 				// more than one channel group per datagroup! Unsorted.
-				addProblem(new MDFCompatibilityProblem(
-						MDFProblemType.UNSORTED_DATA_PROBLEM, this));
+				addProblem(new MDFCompatibilityProblem(MDFProblemType.UNSORTED_DATA_PROBLEM, this));
 				// which block will be touched?
 				// all channel groups!
 				do {
@@ -382,16 +375,14 @@
 			break;
 		case "##DZ":
 			if (args.unzip) {
-				addProblem(new MDFCompatibilityProblem(
-						MDFProblemType.ZIPPED_DATA_PROBLEM, this));
+				addProblem(new MDFCompatibilityProblem(MDFProblemType.ZIPPED_DATA_PROBLEM, this));
 			}
 			break;
 		case "##DT":
 		case "##SD":
 		case "##RD":
 			if (!args.unzip) {
-				addProblem(new MDFCompatibilityProblem(
-						MDFProblemType.UNZIPPED_DATA_PROBLEM, this));
+				addProblem(new MDFCompatibilityProblem(MDFProblemType.UNZIPPED_DATA_PROBLEM, this));
 			}
 			break;
 		}
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4Parser.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4Parser.java
index 34bec3a..5db877e 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4Parser.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4Parser.java
@@ -44,13 +44,11 @@
 	 * @throws IOException
 	 *             If an input error occurs.
 	 */
-	private static byte[] readBytes(int bytes, FileChannel in)
-			throws IOException {
+	private static byte[] readBytes(int bytes, FileChannel in) throws IOException {
 		ByteBuffer chunk = ByteBuffer.allocate(bytes);
 		int bytesread = 0;
 		if ((bytesread = in.read(chunk)) != bytes) {
-			System.err.println(
-					"Read only " + bytesread + " Bytes instead of " + bytes);
+			System.err.println("Read only " + bytesread + " Bytes instead of " + bytes);
 		}
 		return chunk.array();
 	}
@@ -104,8 +102,8 @@
 
 		MDFSorter.log.log(Level.INFO, "Needed " + fileruns + " runs.");
 		MDFSorter.log.log(Level.INFO, "Found " + blocklist.size() + " blocks.");
-		MDFSorter.log.log(Level.FINE,
-				"ValidatorListSize: " + (foundblocks + 1)); // Expected number
+		MDFSorter.log.log(Level.FINE, "ValidatorListSize: " + (foundblocks + 1)); // Expected
+																					// number
 		// of node in Vector
 		// MDFValidators
 		// node list for
@@ -136,8 +134,7 @@
 		// Read links and create new blocks
 		head = readBytes((int) (blklinkcount * 8), in);
 		for (int i = 0; i < blklinkcount; i++) {
-			long nextlink = MDF4Util
-					.readLink(getDataBuffer(head, i * 8, (i + 1) * 8));
+			long nextlink = MDF4Util.readLink(getDataBuffer(head, i * 8, (i + 1) * 8));
 			if (nextlink != 0) {
 				if (blocklist.containsKey(nextlink)) {
 					start.addLink(i, blocklist.get(nextlink));
@@ -169,13 +166,11 @@
 	 */
 	public static ByteBuffer getDataBuffer(byte[] data, int start, int end) {
 		if (start >= 0 && end <= data.length) {
-			return java.nio.ByteBuffer
-					.wrap(Arrays.copyOfRange(data, start, end));
+			return java.nio.ByteBuffer.wrap(Arrays.copyOfRange(data, start, end));
 		} else {
 			// just for testing
 			throw new ArrayIndexOutOfBoundsException(
-					"Tried to access bytes " + start + " to " + end
-					+ "with array length " + data.length);
+					"Tried to access bytes " + start + " to " + end + "with array length " + data.length);
 		}
 	}
 
@@ -192,18 +187,17 @@
 		byte[] content = null;
 		// parse special blocktypes more precisely.
 
-
 		MDF4GenBlock sp = null;
 		switch (blk.getId()) {
 		case "##AT":
-			//sp = new ATBLOCK(blk);
+			// sp = new ATBLOCK(blk);
 			break;
 		case "##CA":
-			//sp = new CABLOCK(blk);
+			// sp = new CABLOCK(blk);
 			break;
-			//throw new UnsupportedOperationException("CA Block found!");
+		// throw new UnsupportedOperationException("CA Block found!");
 		case "##CC":
-			//sp = new CCBLOCK(blk);
+			// sp = new CCBLOCK(blk);
 			break;
 		case "##CG":
 			sp = new CGBLOCK(blk);
@@ -212,7 +206,7 @@
 			sp = new CNBLOCK(blk);
 			break;
 		case "##CH":
-			//sp = new CHBLOCK(blk);
+			// sp = new CHBLOCK(blk);
 			break;
 		case "##DG":
 			sp = new DGBLOCK(blk);
@@ -229,7 +223,7 @@
 			sp = new DZBLOCK(blk);
 			break;
 		case "##EV":
-			//sp = new EVBLOCK(blk);
+			// sp = new EVBLOCK(blk);
 			break;
 		case "##FH":
 			sp = new FHBLOCK(blk);
@@ -244,21 +238,21 @@
 			sp = new MDBLOCK(blk);
 			break;
 		case "##SI":
-			//sp = new SIBLOCK(blk);
+			// sp = new SIBLOCK(blk);
 			break;
 		case "##SR":
-			//sp = new SRBLOCK(blk);
+			// sp = new SRBLOCK(blk);
 			break;
 		case "##TX":
 			sp = new TXBLOCK(blk);
 			break;
 		default:
-			System.err.println("Unknown block of type "+ blk.getId() + " found.");
+			System.err.println("Unknown block of type " + blk.getId() + " found.");
 		}
 
-		if (blk.getId().equals("##DZ")){
+		if (blk.getId().equals("##DZ")) {
 			content = readBytes(24, in);
-		} else if (sp!=null) {
+		} else if (sp != null) {
 			content = readBytes((int) sectionsize, in);
 		}
 
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4ProcessWriter.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4ProcessWriter.java
index b492dc7..da7a680 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4ProcessWriter.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4ProcessWriter.java
@@ -49,8 +49,7 @@
 	 * @param args
 	 *            The arguments of this programm call.
 	 */
-	public MDF4ProcessWriter(MDFFileContent<MDF4GenBlock> filestructure,
-			ArgumentStruct args) {
+	public MDF4ProcessWriter(MDFFileContent<MDF4GenBlock> filestructure, ArgumentStruct args) {
 		this.filestructure = filestructure;
 		this.args = args;
 		writtenblocks = new LinkedList<MDF4GenBlock>();
@@ -75,11 +74,10 @@
 
 		// Start writer Thread
 
+		Thread t;
+		long start; // Variables used inside try.
 
-
-		Thread t; long start; //Variables used inside try.
-
-		try(DataBlockBuffer buf = new DataBlockBuffer()){
+		try (DataBlockBuffer buf = new DataBlockBuffer()) {
 			start = System.currentTimeMillis();
 			t = new Thread(new WriteWorker(out, buf));
 			t.start();
@@ -108,8 +106,7 @@
 				} else {
 					if (blk.getProblems() != null) {
 						for (MDFCompatibilityProblem p : blk.getProblems()) {
-							MDFSorter.log.log(Level.FINE,
-									"Problem of Type: " + p.getType());
+							MDFSorter.log.log(Level.FINE, "Problem of Type: " + p.getType());
 						}
 						solveProblem(blk.getProblems());
 					} else {
@@ -127,7 +124,8 @@
 			// Flush Cache
 			myCache.flush();
 		}
-		// signal buffer that all data is send (try), and wait for completion of the
+		// signal buffer that all data is send (try), and wait for completion of
+		// the
 		// write operation.
 		try {
 			t.join();
@@ -137,8 +135,7 @@
 
 		out.close();
 		MDFSorter.log.log(Level.INFO, "Wrote " + writeptr / 1000 + " kB.");
-		MDFSorter.log.log(Level.INFO,
-				"Writing took " + (System.currentTimeMillis() - start) + " ms");
+		MDFSorter.log.log(Level.INFO, "Writing took " + (System.currentTimeMillis() - start) + " ms");
 
 		// Update links with RandomAccessFile
 		RandomAccessFile r = new RandomAccessFile(args.outputname, "rw");
@@ -167,10 +164,8 @@
 		boolean ret = false;
 		for (MDF4GenBlock blk : filestructure.getList()) {
 			for (int i = 0; i < blk.getLinkCount(); i++) {
-				if (blk.getLink(i) != null
-						&& blk.getLink(i).getProblems() != null) {
-					for (MDFCompatibilityProblem p : blk.getLink(i)
-							.getProblems()) {
+				if (blk.getLink(i) != null && blk.getLink(i).getProblems() != null) {
+					for (MDFCompatibilityProblem p : blk.getLink(i).getProblems()) {
 						p.setParentnode(blk);
 					}
 					ret = true;
@@ -191,8 +186,7 @@
 	 *             If an I/O error occurs.
 	 */
 	@Override
-	public void copyBlock(MDF4GenBlock blk, FileChannel reader)
-			throws IOException {
+	public void copyBlock(MDF4GenBlock blk, FileChannel reader) throws IOException {
 		reader.position(blk.getPos());
 		blk.setOutputpos(writeptr);
 
@@ -204,8 +198,7 @@
 		do {
 			int bytesread;
 			if (written + MAX_OUTPUTBLOCKSIZE > length) {
-				ByteBuffer custombuffer = ByteBuffer
-						.allocate((int) (length - written));
+				ByteBuffer custombuffer = ByteBuffer.allocate((int) (length - written));
 				bytesread = reader.read(custombuffer);
 				performPut(custombuffer, bytesread, false);
 			} else {
@@ -216,8 +209,7 @@
 			written += bytesread;
 		} while (written < length);
 		if (length != written) {
-			throw new IOException("written length not equal to blocklength: "
-					+ length + "/" + written);
+			throw new IOException("written length not equal to blocklength: " + length + "/" + written);
 		}
 		// insert space if length%8!=0
 		if (length % 8 != 0) {
@@ -252,8 +244,7 @@
 	 * @throws DataFormatException
 	 *             If zipped data is in an invalid format.
 	 */
-	public void solveProblem(List<MDFCompatibilityProblem> l)
-			throws IOException, DataFormatException {
+	public void solveProblem(List<MDFCompatibilityProblem> l) throws IOException, DataFormatException {
 		if (l.size() != 1) {
 			System.out.println("To many Problems.");
 			// This may be supported in later versions.
@@ -264,8 +255,7 @@
 			MDF4GenBlock node = (MDF4GenBlock) prob.getStartnode();
 			MDF4GenBlock parentnode = (MDF4GenBlock) prob.getParentnode();
 
-			if (probtype == MDFProblemType.LINKED_DATALIST_PROBLEM
-					|| probtype == MDFProblemType.UNZIPPED_DATA_PROBLEM
+			if (probtype == MDFProblemType.LINKED_DATALIST_PROBLEM || probtype == MDFProblemType.UNZIPPED_DATA_PROBLEM
 					|| prob.getType() == MDFProblemType.ZIPPED_DATA_PROBLEM) {
 
 				// What types of Elements are stored in the list? Possible ##DT,
@@ -286,11 +276,12 @@
 
 				// Skip DL block to first child
 				if (typechecknode instanceof DLBLOCK) {
-					if(typechecknode.getLinkCount()>1){
-						//Data list with children
+					if (typechecknode.getLinkCount() > 1) {
+						// Data list with children
 						typechecknode = typechecknode.getLink(1);
 					} else {
-						//Data list with no children, can just be omitted, remove link
+						// Data list with no children, can just be omitted,
+						// remove link
 						parentnode.replaceLink(typechecknode, null);
 						return;
 					}
@@ -298,38 +289,39 @@
 				}
 
 				if (typechecknode instanceof DZBLOCK) {
-					blocktype = "##"
-							+ ((DZBLOCK) typechecknode).getBlock_type();
+					blocktype = "##" + ((DZBLOCK) typechecknode).getBlock_type();
 				} else {
 					blocktype = typechecknode.getId();
 				}
 
-				// calculate realnew blocksize in order that records are not split up.
+				// calculate realnew blocksize in order that records are not
+				// split up.
 				long realmaxblksize = args.maxblocksize;
-				long recordlength=-1;
-				if(parentnode instanceof DGBLOCK){
-					MDF4GenBlock cgBlock = ((DGBLOCK)parentnode).getLnkCgFirst();
-					int recIDsize = ((DGBLOCK)parentnode).getRecIdSize();
-					if(cgBlock instanceof CGBLOCK){
-						recordlength = ((CGBLOCK)cgBlock).getDataBytes()+ recIDsize + ((CGBLOCK)cgBlock).getInvalBytes();
+				long recordlength = -1;
+				if (parentnode instanceof DGBLOCK) {
+					MDF4GenBlock cgBlock = ((DGBLOCK) parentnode).getLnkCgFirst();
+					int recIDsize = ((DGBLOCK) parentnode).getRecIdSize();
+					if (cgBlock instanceof CGBLOCK) {
+						recordlength = ((CGBLOCK) cgBlock).getDataBytes() + recIDsize
+								+ ((CGBLOCK) cgBlock).getInvalBytes();
 					}
 				}
 
-				if(recordlength!=-1){
-					//at least one record has to be included.
-					realmaxblksize = recordlength > args.maxblocksize ? recordlength : recordlength*(args.maxblocksize/recordlength);
+				if (recordlength != -1) {
+					// at least one record has to be included.
+					realmaxblksize = recordlength > args.maxblocksize ? recordlength
+							: recordlength * (args.maxblocksize / recordlength);
 				}
 
 				// Create new SplitMerger for this section.
-				MDF4BlocksSplittMerger bsm = new MDF4BlocksSplittMerger(this, blocktype, parentnode, node, realmaxblksize);
+				MDF4BlocksSplittMerger bsm = new MDF4BlocksSplittMerger(this, blocktype, parentnode, node,
+						realmaxblksize);
 
 				// Now attach data sections
 				if (probtype == MDFProblemType.LINKED_DATALIST_PROBLEM) {
 					if (!(firstlistnode instanceof DLBLOCK)) {
-						MDFSorter.log
-						.severe("List header is no DL Node. Aborting.");
-						throw new RuntimeException(
-								"List header is no DL Node.");
+						MDFSorter.log.severe("List header is no DL Node. Aborting.");
+						throw new RuntimeException("List header is no DL Node.");
 					}
 					DLBLOCK dlnode = (DLBLOCK) firstlistnode;
 					do {
@@ -351,10 +343,8 @@
 					// We have more than one channel group in a single
 					// DataGroup. We have to create new DataGroups for each
 					// Channel Group.
-					LinkedList<CGBLOCK> groups = getChannelGroupsfromDataGroup(
-							(DGBLOCK) node);
-					MDFSorter.log.log(Level.INFO, "Found " + groups.size()
-					+ " Channel Groups in DG.");
+					LinkedList<CGBLOCK> groups = getChannelGroupsfromDataGroup((DGBLOCK) node);
+					MDFSorter.log.log(Level.INFO, "Found " + groups.size() + " Channel Groups in DG.");
 					MDF4GenBlock datasection = ((DGBLOCK) node).getLnkData();
 					SortDataGroup(prob, groups, datasection);
 
@@ -365,8 +355,7 @@
 		}
 	}
 
-	public LinkedList<CGBLOCK> getChannelGroupsfromDataGroup(
-			DGBLOCK startDataGroup) {
+	public LinkedList<CGBLOCK> getChannelGroupsfromDataGroup(DGBLOCK startDataGroup) {
 		LinkedList<CGBLOCK> ret = new LinkedList<CGBLOCK>();
 		CGBLOCK next = (CGBLOCK) startDataGroup.getLnkCgFirst();
 		while (next != null) {
@@ -411,29 +400,33 @@
 			}
 			pre.addLink(0, fhblk);
 		} else {
-			MDFSorter.log.warning(
-					"Cannot attach file history. No suitable Block found.");
+			MDFSorter.log.warning("Cannot attach file history. No suitable Block found.");
 		}
 
 		// After link update, all connections will be set correctly
 	}
 
 	/**
-	 * Main sorting function. Sorts a datagroup consisting of more than on channel group.
-	 * @param prob The MDFCompatibilityProblem that describes the situation.
-	 * @param groups List of ChannelGroups contained in that block.
-	 * @param datasection Data section of the DGBLOCK.
-	 * @throws IOException If an I/O-Error occurs.
-	 * @throws DataFormatException If zipped data is in an invalid format.
+	 * Main sorting function. Sorts a datagroup consisting of more than on
+	 * channel group.
+	 * 
+	 * @param prob
+	 *            The MDFCompatibilityProblem that describes the situation.
+	 * @param groups
+	 *            List of ChannelGroups contained in that block.
+	 * @param datasection
+	 *            Data section of the DGBLOCK.
+	 * @throws IOException
+	 *             If an I/O-Error occurs.
+	 * @throws DataFormatException
+	 *             If zipped data is in an invalid format.
 	 */
-	public void SortDataGroup(MDFCompatibilityProblem prob,
-			LinkedList<CGBLOCK> groups, MDF4GenBlock datasection)
-					throws IOException, DataFormatException {
+	public void SortDataGroup(MDFCompatibilityProblem prob, LinkedList<CGBLOCK> groups, MDF4GenBlock datasection)
+			throws IOException, DataFormatException {
 
 		DGBLOCK datagroup = (DGBLOCK) prob.getStartnode();
 		// sort records.
-		MDF4DataProvider prov = new MDF4DataProvider(datasection,
-				filestructure.getInput());
+		MDF4DataProvider prov = new MDF4DataProvider(datasection, filestructure.getInput());
 
 		int[] recCounters = new int[groups.size()];
 
@@ -452,7 +445,7 @@
 			if (cgroup.isVLSDChannel()) {
 				recNumtoSize.put(recID, -1L);
 			} else {
-				recNumtoSize.put(recID, cgroup.getDataBytes()+cgroup.getInvalBytes());
+				recNumtoSize.put(recID, cgroup.getDataBytes() + cgroup.getInvalBytes());
 			}
 		}
 
@@ -469,14 +462,14 @@
 				// only normal channels.
 				last = copyChannelInfrastructure(last, cgroup);
 				newlength = cgroup.getCycleCount() * cgroup.getDataBytes();
-				long reclen = cgroup.getDataBytes() +cgroup.getInvalBytes();
+				long reclen = cgroup.getDataBytes() + cgroup.getInvalBytes();
 				newlength = cgroup.getCycleCount() * reclen;
 
-				//at least one record has to be included.
-				long realmaxblksize = reclen > args.maxblocksize ? reclen : reclen*(args.maxblocksize/reclen);
+				// at least one record has to be included.
+				long realmaxblksize = reclen > args.maxblocksize ? reclen : reclen * (args.maxblocksize / reclen);
 
-				MDF4BlocksSplittMerger splitmerger = new MDF4BlocksSplittMerger(
-						this, "##DT", last, newlength, prov, realmaxblksize);
+				MDF4BlocksSplittMerger splitmerger = new MDF4BlocksSplittMerger(this, "##DT", last, newlength, prov,
+						realmaxblksize);
 
 				// write data sections.
 				for (long l : startaddresses[arridx]) {
@@ -490,28 +483,23 @@
 					for (CNBLOCK vlsdchan : vlsdchanlist) {
 						MDFGenBlock signaldata = vlsdchan.getLnkData();
 						if (signaldata == null) {
-							MDFSorter.log.severe(
-									"VLSD-Block without attached Data found!");
+							MDFSorter.log.severe("VLSD-Block without attached Data found!");
 							continue;
 						}
 						if (signaldata instanceof CGBLOCK) {
 							// we need to write a sdblock...
 							CGBLOCK vlsdcg = (CGBLOCK) signaldata;
-							int parsingidx = recNumtoArrIdx
-									.get(vlsdcg.getRecordId());
-							long expectedlength = vlsdcg.getVLSDlength()
-									+ vlsdcg.getCycleCount() * 4L;
-							MDF4BlocksSplittMerger signalsplitmerger = new MDF4BlocksSplittMerger(
-									this, "##SD", vlsdchan, expectedlength,
-									prov, args.maxblocksize);
+							int parsingidx = recNumtoArrIdx.get(vlsdcg.getRecordId());
+							long expectedlength = vlsdcg.getVLSDlength() + vlsdcg.getCycleCount() * 4L;
+							MDF4BlocksSplittMerger signalsplitmerger = new MDF4BlocksSplittMerger(this, "##SD",
+									vlsdchan, expectedlength, prov, args.maxblocksize);
 							ByteBuffer databuf;
 							// write data sections.
 							for (long l : startaddresses[parsingidx]) {
 								databuf = ByteBuffer.allocate(4);
 								prov.read(l + idSize, databuf);
 								long vllen = MDF4Util.readUInt32(databuf);
-								signalsplitmerger.splitmerge(l + idSize,
-										vllen + 4L);
+								signalsplitmerger.splitmerge(l + idSize, vllen + 4L);
 							}
 							signalsplitmerger.setLinks();
 						}
@@ -521,24 +509,25 @@
 		}
 	}
 
-	public long[][] fillRecordArray(int[] recordCounters, Map<Long, Integer> recNumtoArrIdx, Map<Long, Long> recNumtoSize,
-			AbstractDataProvider prov, int idSize) throws IOException, DataFormatException{
+	public long[][] fillRecordArray(int[] recordCounters, Map<Long, Integer> recNumtoArrIdx,
+			Map<Long, Long> recNumtoSize, AbstractDataProvider prov, int idSize)
+			throws IOException, DataFormatException {
 
 		MDFSorter.log.info("Searching Records.");
 		long[][] startaddresses = new long[recordCounters.length][];
 
-		//initilize array.
-		int counter =0;
-		long totalRecords = 0; //total number of records
-		for(long i : recordCounters){
-			totalRecords+=i;
+		// initilize array.
+		int counter = 0;
+		long totalRecords = 0; // total number of records
+		for (long i : recordCounters) {
+			totalRecords += i;
 			startaddresses[counter++] = new long[(int) i];
 		}
 
 		int[] foundrecCounters = new int[recordCounters.length];
 
-		long sectionoffset = 0; //our position in the data section
-		long foundrecords = 0; //number of records we found
+		long sectionoffset = 0; // our position in the data section
+		long foundrecords = 0; // number of records we found
 
 		ByteBuffer databuf;
 		while (foundrecords < totalRecords) {
@@ -580,8 +569,7 @@
 		}
 	}
 
-	public DGBLOCK copyChannelInfrastructure(MDF4GenBlock last, CGBLOCK towrite)
-			throws IOException {
+	public DGBLOCK copyChannelInfrastructure(MDF4GenBlock last, CGBLOCK towrite) throws IOException {
 		// Create new Data Group with default values, and write to file.
 		DGBLOCK newdg = new DGBLOCK();
 		writeBlock(newdg, null);
@@ -637,8 +625,7 @@
 	 *             If an I/O error occurs.
 	 */
 	@Override
-	public void writeBlock(MDF4GenBlock blk, byte[] appendData)
-			throws IOException {
+	public void writeBlock(MDF4GenBlock blk, byte[] appendData) throws IOException {
 		blk.setOutputpos(writeptr);
 
 		performPut(blk.getHeaderBytes());
@@ -665,9 +652,8 @@
 		byte[] spacer = new byte[spcsize];
 		performPut(spacer);
 		if (writeptr % 8L != 0) {
-			System.err.println("Wrote spacer of size " + spcsize
-					+ " but writeptr is still wrong. Len:" + length + " PTR:"
-					+ writeptr);
+			System.err.println("Wrote spacer of size " + spcsize + " but writeptr is still wrong. Len:" + length
+					+ " PTR:" + writeptr);
 		}
 	}
 }
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4Util.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4Util.java
index cb4bffa..a03cacc 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4Util.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/MDF4Util.java
@@ -152,13 +152,9 @@
 	public static long readUInt64(ByteBuffer bb) {
 		byte[] data = new byte[8];
 		bb.get(data);
-		long l1 = ((long) data[0] & 0xff) << 0
-				| ((long) data[1] & 0xff) << 8
-				| ((long) data[2] & 0xff) << 16
+		long l1 = ((long) data[0] & 0xff) << 0 | ((long) data[1] & 0xff) << 8 | ((long) data[2] & 0xff) << 16
 				| ((long) data[3] & 0xff) << 24;
-		long l2 = ((long) data[4] & 0xff) << 0
-				| ((long) data[5] & 0xff) << 8
-				| ((long) data[6] & 0xff) << 16
+		long l2 = ((long) data[4] & 0xff) << 0 | ((long) data[5] & 0xff) << 8 | ((long) data[6] & 0xff) << 16
 				| ((long) data[7] & 0xff) << 24;
 		return l1 << 0 | l2 << 32;
 	}
@@ -220,13 +216,9 @@
 	public static long readLink(ByteBuffer bb) {
 		byte[] data = new byte[8];
 		bb.get(data);
-		long l1 = ((long) data[0] & 0xff) << 0
-				| ((long) data[1] & 0xff) << 8
-				| ((long) data[2] & 0xff) << 16
+		long l1 = ((long) data[0] & 0xff) << 0 | ((long) data[1] & 0xff) << 8 | ((long) data[2] & 0xff) << 16
 				| ((long) data[3] & 0xff) << 24;
-		long l2 = ((long) data[4] & 0xff) << 0
-				| ((long) data[5] & 0xff) << 8
-				| ((long) data[6] & 0xff) << 16
+		long l2 = ((long) data[4] & 0xff) << 0 | ((long) data[5] & 0xff) << 8 | ((long) data[6] & 0xff) << 16
 				| ((long) data[7] & 0xff) << 24;
 		return l1 << 0 | l2 << 32;
 	}
@@ -257,8 +249,7 @@
 	 * @throws IOException
 	 *             If an reading error occurs.
 	 */
-	public static String readCharsISO8859(ByteBuffer bb, int length)
-			throws IOException {
+	public static String readCharsISO8859(ByteBuffer bb, int length) throws IOException {
 		byte[] b = new byte[length];
 		bb.get(b);
 		return new String(b, 0, length, CHARSET_ISO8859);
@@ -275,8 +266,7 @@
 	 * @throws IOException
 	 *             If an reading error occurs.
 	 */
-	public static String readCharsUTF8(ByteBuffer bb, int length)
-			throws IOException {
+	public static String readCharsUTF8(ByteBuffer bb, int length) throws IOException {
 		byte[] b = new byte[length];
 		bb.get(b);
 		return new String(b, 0, length, CHARSET_UTF8);
@@ -286,8 +276,7 @@
 		return s.getBytes(CHARSET_UTF8);
 	}
 
-	public static byte[] getBytesCharsUTF8WithTerminator(String s)
-			throws IOException {
+	public static byte[] getBytesCharsUTF8WithTerminator(String s) throws IOException {
 		String term = "\0";
 		return (s + term).getBytes(CHARSET_UTF8);
 	}
@@ -305,8 +294,7 @@
 	 *            Array.
 	 * @return An array containing the transposed Data.
 	 */
-	public static byte[] transposeArray(byte[] data, int columnsize,
-			boolean forward) {
+	public static byte[] transposeArray(byte[] data, int columnsize, boolean forward) {
 		byte[] out = new byte[data.length];
 		int len = data.length;
 		int rows = len / columnsize;
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/SIBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/SIBLOCK.java
index 02fcb31..d909000 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/SIBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/SIBLOCK.java
@@ -15,8 +15,9 @@
  * <p>
  * THE SOURCE INFORMATION BLOCK <code>SIBLOCK</code>
  * </p>
- * The SIBLOCK describes the source of an acquisition mode or of a signal. The source information is also used to ensure
- * a unique identification of a channel.
+ * The SIBLOCK describes the source of an acquisition mode or of a signal. The
+ * source information is also used to ensure a unique identification of a
+ * channel.
  *
  * @author Christian Rechner
  */
@@ -29,13 +30,15 @@
 	// 1 = ECU source is an ECU
 	// 2 = BUS source is a bus (e.g. for bus monitoring)
 	// 3 = I/O source is an I/O device (e.g. analog I/O)
-	// 4 = TOOL source is a software tool (e.g. for tool generated signals/events)
+	// 4 = TOOL source is a software tool (e.g. for tool generated
+	// signals/events)
 	// 5 = USER source is a user interaction/input
 	// (e.g. for user generated events)
 	// UINT8
 	private byte sourceType;
 
-	// Bus type: additional classification of used bus (should be 0 for si_type ≥ 3):
+	// Bus type: additional classification of used bus (should be 0 for si_type
+	// ≥ 3):
 	// 0 = NONE no bus
 	// 1 = OTHER bus type does not fit into given categories or is unknown
 	// 2 = CAN
@@ -116,8 +119,10 @@
 	/**
 	 * Reads a SIBLOCK from its content.
 	 *
-	 * @param content The data section of this block
-	 * @throws IOException If an I/O error occurs.
+	 * @param content
+	 *            The data section of this block
+	 * @throws IOException
+	 *             If an I/O error occurs.
 	 */
 	@Override
 	public void parse(byte[] content) throws IOException {
@@ -125,7 +130,8 @@
 		// UINT8: Source type: additional classification of source:
 		setSourceType(MDF4Util.readUInt8(bb));
 
-		// UINT8: Bus type: additional classification of used bus (should be 0 for si_type ≥ 3):
+		// UINT8: Bus type: additional classification of used bus (should be 0
+		// for si_type ≥ 3):
 		setBusType(MDF4Util.readUInt8(bb));
 
 		// UINT8: Flags
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/SRBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/SRBLOCK.java
index 197dc28..5a5ff73 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/SRBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/SRBLOCK.java
@@ -14,8 +14,8 @@
  * <p>
  * THE Sample Reduction block <code>SRBLOCK</code>
  * </p>
- * The TXBLOCK is very similar to the MDBLOCK but only contains a plain string encoded in UTF-8. The text length results
- * from the block size.
+ * The TXBLOCK is very similar to the MDBLOCK but only contains a plain string
+ * encoded in UTF-8. The text length results from the block size.
  *
  * @author Tobias Leemann
  */
@@ -23,7 +23,6 @@
 
 	/** Data section */
 
-
 	/**
 	 * Parse a SRBLOCK from an existing MDFGenBlock
 	 *
@@ -39,7 +38,6 @@
 		parent.setPrec(this);
 	}
 
-
 	@Override
 	public String toString() {
 		return "SRBLOCK";
@@ -48,8 +46,10 @@
 	/**
 	 * Reads a SRBLOCK from its content.
 	 *
-	 * @param content The data section of this block
-	 * @throws IOException If an I/O error occurs.
+	 * @param content
+	 *            The data section of this block
+	 * @throws IOException
+	 *             If an I/O error occurs.
 	 */
 	@Override
 	public void parse(byte[] content) throws IOException {
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/TXBLOCK.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/TXBLOCK.java
index 1639e57..122db7e 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/TXBLOCK.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/TXBLOCK.java
@@ -15,8 +15,8 @@
  * <p>
  * THE TEXT BLOCK <code>TXBLOCK</code>
  * </p>
- * The TXBLOCK is very similar to the MDBLOCK but only contains a plain string encoded in UTF-8. The text length results
- * from the block size.
+ * The TXBLOCK is very similar to the MDBLOCK but only contains a plain string
+ * encoded in UTF-8. The text length results from the block size.
  *
  * @author Christian Rechner, Tobias Leemann
  */
@@ -29,7 +29,6 @@
 	// CHAR
 	private String txData;
 
-
 	/**
 	 * Parse a TXBLOCK from an existing MDFGenBlock
 	 *
@@ -53,7 +52,6 @@
 		this.txData = txData;
 	}
 
-
 	@Override
 	public String toString() {
 		return "TXBLOCK [txData=" + txData + "]";
@@ -62,14 +60,16 @@
 	/**
 	 * Reads a TXBLOCK from its content.
 	 *
-	 * @param content The data section of this block
-	 * @throws IOException If an I/O error occurs.
+	 * @param content
+	 *            The data section of this block
+	 * @throws IOException
+	 *             If an I/O error occurs.
 	 */
 	@Override
 	public void parse(byte[] content) throws IOException {
 		// Read text String
 		setTxData(MDF4Util.readCharsUTF8(ByteBuffer.wrap(content), content.length));
-		//TODO: Bytes after zero termination?
+		// TODO: Bytes after zero termination?
 	}
 
 }
diff --git a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/ZippedDataCache.java b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/ZippedDataCache.java
index 2953b35..9c7d113 100644
--- a/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/ZippedDataCache.java
+++ b/src/main/java/org/eclipse/mdm/mdfsorter/mdf4/ZippedDataCache.java
@@ -57,33 +57,26 @@
 		byte[] uncompressedData;
 		Inflater decompresser = new Inflater();
 		uncompressedData = new byte[(int) dzblk.getOrg_data_length()];
-		ByteBuffer compressedData = ByteBuffer
-				.allocate((int) dzblk.getData_length());
+		ByteBuffer compressedData = ByteBuffer.allocate((int) dzblk.getData_length());
 		// Skip header section of DZ Block
 		reader.position(dzblk.getPos() + 48L);
 		reader.read(compressedData);
-		decompresser.setInput(compressedData.array(), 0,
-				(int) dzblk.getData_length());
+		decompresser.setInput(compressedData.array(), 0, (int) dzblk.getData_length());
 
 		int resultLength = decompresser.inflate(uncompressedData);
 		decompresser.end();
 
 		if (dzblk.transposeNeeded()) {
 			int columnsize = (int) dzblk.getZip_parameters();
-			uncompressedData = MDF4Util.transposeArray(uncompressedData,
-					columnsize, false);
-			MDFSorter.log.log(Level.FINER,
-					"Transposing data with columnsize " + columnsize + ".");
+			uncompressedData = MDF4Util.transposeArray(uncompressedData, columnsize, false);
+			MDFSorter.log.log(Level.FINER, "Transposing data with columnsize " + columnsize + ".");
 		}
 
 		if (resultLength != dzblk.getOrg_data_length()) {
-			throw new RuntimeException(
-					"Data gain or loss detected while unziping. Expected "
-							+ dzblk.getOrg_data_length() + " bytes, got "
-							+ resultLength);
+			throw new RuntimeException("Data gain or loss detected while unziping. Expected "
+					+ dzblk.getOrg_data_length() + " bytes, got " + resultLength);
 		}
-		MDFSorter.log.log(Level.FINER,
-				"Unzipped block of size " + resultLength + ".");
+		MDFSorter.log.log(Level.FINER, "Unzipped block of size " + resultLength + ".");
 
 		// Store Data in Cache
 		if (cacheblocks.size() == MAXENTRIES) {
@@ -110,8 +103,7 @@
 	 * @throws IOException
 	 *             If an input error occurs.
 	 */
-	public void read(DZBLOCK dzblk, long offset, ByteBuffer buf)
-			throws DataFormatException, IOException {
+	public void read(DZBLOCK dzblk, long offset, ByteBuffer buf) throws DataFormatException, IOException {
 		// Load block if not available
 		if (!isAvailable(dzblk)) {
 			load(dzblk);
diff --git a/src/test/java/org/eclipse/mdm/mdfsorter/ArgumentParserTest.java b/src/test/java/org/eclipse/mdm/mdfsorter/ArgumentParserTest.java
index 3e2386a..5a3cf95 100644
--- a/src/test/java/org/eclipse/mdm/mdfsorter/ArgumentParserTest.java
+++ b/src/test/java/org/eclipse/mdm/mdfsorter/ArgumentParserTest.java
@@ -25,31 +25,27 @@
 	// Unknown Flag test
 	@Test(expected = IllegalArgumentException.class)
 	public void testUnknownFlag() {
-		String[] test1 = { "process", "file1", "file2", "-somerandomflag=0",
-		"-unzip" };
+		String[] test1 = { "process", "file1", "file2", "-somerandomflag=0", "-unzip" };
 		ArgumentStruct.parseArgs(test1);
 	}
 
 	// Two different zipflags
 	@Test(expected = IllegalArgumentException.class)
 	public void testZipFlags() {
-		String[] test1 = { "process", "file1", "file2", "-zip",
-				"-maxblocksize=4g", "-unzip" };
+		String[] test1 = { "process", "file1", "file2", "-zip", "-maxblocksize=4g", "-unzip" };
 		ArgumentStruct.parseArgs(test1);
 	}
 
 	// Check maxblocksize without value
 	@Test(expected = IllegalArgumentException.class)
 	public void testNoValue() {
-		String[] test1 = { "process", "file1", "file2", "-zip",
-		"-maxblocksize=" };
+		String[] test1 = { "process", "file1", "file2", "-zip", "-maxblocksize=" };
 		ArgumentStruct.parseArgs(test1);
 	}
 
 	@Test // Check values of ArgumentStruct
 	public void testParsing1() {
-		String[] test1 = { "process", "file1", "file2", "-zip",
-		"-maxblocksize=300" };
+		String[] test1 = { "process", "file1", "file2", "-zip", "-maxblocksize=300" };
 		ArgumentStruct ar = ArgumentStruct.parseArgs(test1);
 		assertEquals(ar.inputname, "file1");
 		assertEquals(ar.outputname, "file2");
diff --git a/src/test/java/org/eclipse/mdm/mdfsorter/MDF3UnsortProcessor.java b/src/test/java/org/eclipse/mdm/mdfsorter/MDF3UnsortProcessor.java
index 2e46eb8..65697cb 100644
--- a/src/test/java/org/eclipse/mdm/mdfsorter/MDF3UnsortProcessor.java
+++ b/src/test/java/org/eclipse/mdm/mdfsorter/MDF3UnsortProcessor.java
@@ -30,8 +30,7 @@
 
 public class MDF3UnsortProcessor extends MDF3ProcessWriter {
 
-	public MDF3UnsortProcessor(MDFFileContent<MDF3GenBlock> filestructure,
-			ArgumentStruct args) {
+	public MDF3UnsortProcessor(MDFFileContent<MDF3GenBlock> filestructure, ArgumentStruct args) {
 		super(filestructure, args);
 	}
 
@@ -72,13 +71,11 @@
 
 					// write file out in 64k blocks
 					long length = blk.getLength();
-					ByteBuffer custombuffer = ByteBuffer
-							.allocate((int) length);
+					ByteBuffer custombuffer = ByteBuffer.allocate((int) length);
 					int bytesread = reader.read(custombuffer);
 					// alter head entry.
 					custombuffer.position(16);
-					custombuffer.put(MDF3Util.getBytesUInt16(1,
-							filestructure.isBigEndian()));
+					custombuffer.put(MDF3Util.getBytesUInt16(1, filestructure.isBigEndian()));
 					performPut(custombuffer, bytesread, false);
 					writtenblocks.addLast(blk);
 				} else {
@@ -87,8 +84,7 @@
 			} else {
 				if (blk.getProblems() != null) {
 					for (MDFCompatibilityProblem p : blk.getProblems()) {
-						MDFSorter.log.log(Level.FINE,
-								"Problem of Type: " + p.getType());
+						MDFSorter.log.log(Level.FINE, "Problem of Type: " + p.getType());
 					}
 					solveProblem(blk.getProblems(), buf, dgroups);
 				} else {
@@ -112,8 +108,7 @@
 
 		out.close();
 		MDFSorter.log.log(Level.INFO, "Wrote " + writeptr / 1000 + " kB.");
-		MDFSorter.log.log(Level.INFO,
-				"Writing took " + (System.currentTimeMillis() - start) + " ms");
+		MDFSorter.log.log(Level.INFO, "Writing took " + (System.currentTimeMillis() - start) + " ms");
 
 		// Update links with RandomAccessFile
 		RandomAccessFile r = new RandomAccessFile(args.outputname, "rw");
@@ -133,9 +128,8 @@
 	 * @throws IOException
 	 * @throws DataFormatException
 	 */
-	public void solveProblem(List<MDFCompatibilityProblem> l,
-			DataBlockBuffer buf, LinkedList<DGBLOCK> dgroups)
-					throws IOException, DataFormatException {
+	public void solveProblem(List<MDFCompatibilityProblem> l, DataBlockBuffer buf, LinkedList<DGBLOCK> dgroups)
+			throws IOException, DataFormatException {
 		if (l.size() != 1) {
 			System.out.println("To many Problems.");
 			// This may be supported in later versions.
@@ -149,10 +143,8 @@
 				// We have more than one channel group in a single
 				// DataGroup. We have to create new DataGroups for each
 				// Channel Group.
-				LinkedList<CGBLOCK> groups = getChannelGroupsfromDataGroup(
-						(DGBLOCK) node);
-				MDFSorter.log.log(Level.INFO,
-						"Found " + groups.size() + " Channel Groups in DG.");
+				LinkedList<CGBLOCK> groups = getChannelGroupsfromDataGroup((DGBLOCK) node);
+				MDFSorter.log.log(Level.INFO, "Found " + groups.size() + " Channel Groups in DG.");
 				MDF3GenBlock datasection = ((DGBLOCK) node).getLnkData();
 				UnSortDataGroup(prob, dgroups, datasection);
 
@@ -162,9 +154,8 @@
 		}
 	}
 
-	public void UnSortDataGroup(MDFCompatibilityProblem prob,
-			LinkedList<DGBLOCK> dgroups, MDF3GenBlock datasection)
-					throws IOException, DataFormatException {
+	public void UnSortDataGroup(MDFCompatibilityProblem prob, LinkedList<DGBLOCK> dgroups, MDF3GenBlock datasection)
+			throws IOException, DataFormatException {
 
 		boolean redundantIDs = true;
 		// create master DG
@@ -194,8 +185,7 @@
 			} else {
 				last.setLink(0, dgblk.getLnkCgFirst());
 			}
-			providers[count++] = new MDF3DataProvider(dgblk.getLnkData(),
-					filestructure.getInput());
+			providers[count++] = new MDF3DataProvider(dgblk.getLnkData(), filestructure.getInput());
 			last = (CGBLOCK) dgblk.getLnkCgFirst();
 			last.setRecordId(count);
 			newsize += last.getCycleCount() * last.getDataBytes();
@@ -207,8 +197,7 @@
 
 		// unsort records.
 		newsize += totreccount * (redundantIDs ? 2 : 1);
-		MDF3BlocksSplittMerger bsm = new MDF3BlocksSplittMerger(this, master,
-				newsize, providers[0]);
+		MDF3BlocksSplittMerger bsm = new MDF3BlocksSplittMerger(this, master, newsize, providers[0]);
 
 		MDF3DataProvider idprovider = new MDF3DataProvider(new byte[] { 0 });
 
@@ -226,8 +215,7 @@
 
 			// write record
 			bsm.setProv(providers[rectowrite]);
-			bsm.splitmerge(written[rectowrite] * sizes[rectowrite],
-					sizes[rectowrite]);
+			bsm.splitmerge(written[rectowrite] * sizes[rectowrite], sizes[rectowrite]);
 			written[rectowrite]++;
 			writtenrecords++;
 
@@ -236,8 +224,7 @@
 				bsm.splitmerge(0, 1);
 			}
 			if (writtenrecords % 10000 == 0) {
-				System.out.println("Wrote " + writtenrecords + " of "
-						+ totreccount + " Records.");
+				System.out.println("Wrote " + writtenrecords + " of " + totreccount + " Records.");
 			}
 		}
 		bsm.setLinks();
@@ -247,8 +234,8 @@
 		LinkedList<DGBLOCK> ret = new LinkedList<DGBLOCK>();
 		MDF3GenBlock hdroot = filestructure.getRoot();
 		// Set Problem to first datagroup
-		MDFCompatibilityProblem prob = new MDFCompatibilityProblem(
-				MDFProblemType.UNSORTED_DATA_PROBLEM, hdroot.getLink(0));
+		MDFCompatibilityProblem prob = new MDFCompatibilityProblem(MDFProblemType.UNSORTED_DATA_PROBLEM,
+				hdroot.getLink(0));
 		prob.setParentnode(hdroot);
 		hdroot.getLink(0).addProblem(prob);
 		for (MDF3GenBlock blk : filestructure.getList()) {
diff --git a/src/test/java/org/eclipse/mdm/mdfsorter/MDF3UtilTest.java b/src/test/java/org/eclipse/mdm/mdfsorter/MDF3UtilTest.java
index 7adb2f6..b48659c 100644
--- a/src/test/java/org/eclipse/mdm/mdfsorter/MDF3UtilTest.java
+++ b/src/test/java/org/eclipse/mdm/mdfsorter/MDF3UtilTest.java
@@ -33,12 +33,10 @@
 	@Test
 	public void testGetBytesUInt32() {
 		long l = Integer.MAX_VALUE * 2L + 1L;
-		assertArrayEquals(MDF3Util.getBytesUInt32(l, false),
-				new byte[] { -1, -1, -1, -1 });
+		assertArrayEquals(MDF3Util.getBytesUInt32(l, false), new byte[] { -1, -1, -1, -1 });
 
 		l = Integer.MAX_VALUE * 2L + 1L;
-		assertArrayEquals(MDF3Util.getBytesUInt32(l, true),
-				new byte[] { -1, -1, -1, -1 });
+		assertArrayEquals(MDF3Util.getBytesUInt32(l, true), new byte[] { -1, -1, -1, -1 });
 	}
 
 	@Test
@@ -50,25 +48,25 @@
 	@Test
 	public void ReadUINT32() {
 		long val = Integer.MAX_VALUE + 1L;
-		ByteBuffer buf1 = ByteBuffer.wrap(new byte[] {-128, 0, 0, 0});
+		ByteBuffer buf1 = ByteBuffer.wrap(new byte[] { -128, 0, 0, 0 });
 		assertEquals(val, MDF3Util.readUInt32(buf1, true));
 
-		ByteBuffer buf2 = ByteBuffer.wrap(new byte[] {0, 0, 0, -128});
+		ByteBuffer buf2 = ByteBuffer.wrap(new byte[] { 0, 0, 0, -128 });
 		assertEquals(val, MDF3Util.readUInt32(buf2, false));
 	}
 
 	@Test
 	public void testGetBool() {
-		assertArrayEquals(new byte[] {1, 0}, MDF3Util.getBytesBool(true, false));
+		assertArrayEquals(new byte[] { 1, 0 }, MDF3Util.getBytesBool(true, false));
 	}
 
 	@Test
 	public void testParseBool() {
-		ByteBuffer buf1 = ByteBuffer.wrap(new byte[] {0, -1});
+		ByteBuffer buf1 = ByteBuffer.wrap(new byte[] { 0, -1 });
 		assertTrue(MDF3Util.readBool(buf1, false));
 		buf1.rewind();
 		assertTrue(MDF3Util.readBool(buf1, true));
-		ByteBuffer buf2 = ByteBuffer.wrap(new byte[] {0, 0});
+		ByteBuffer buf2 = ByteBuffer.wrap(new byte[] { 0, 0 });
 		assertFalse(MDF3Util.readBool(buf2, false));
 	}
 
diff --git a/src/test/java/org/eclipse/mdm/mdfsorter/MDF4BlocksSplittMergerTest.java b/src/test/java/org/eclipse/mdm/mdfsorter/MDF4BlocksSplittMergerTest.java
index 12daf04..3886cf2 100644
--- a/src/test/java/org/eclipse/mdm/mdfsorter/MDF4BlocksSplittMergerTest.java
+++ b/src/test/java/org/eclipse/mdm/mdfsorter/MDF4BlocksSplittMergerTest.java
@@ -30,20 +30,20 @@
 
 	@Test
 	public void testLength0() {
-		//test if a splitmerger with length 0 works, and links are correctly set to null.
+		// test if a splitmerger with length 0 works, and links are correctly
+		// set to null.
 		DGBLOCK blk = new DGBLOCK();
 		blk.setId("##DG");
 
-
 		MDFFileContent<MDF4GenBlock> con = new MDFFileContent<MDF4GenBlock>(null, null, null, false);
 		MDF4ProcessWriter ps = new MDF4ProcessWriter(con, new ArgumentStruct());
 
 		MDF4BlocksSplittMerger splitmerger = new MDF4BlocksSplittMerger(ps, "##DT", blk, 0, null, 1024);
 
-		//test setLinks.
-		//set links to a random block.
+		// test setLinks.
+		// set links to a random block.
 		MDF4GenBlock blablablock = new MDF4GenBlock(0xbad);
-		for(int i = 0; i < blk.getLinkCount(); i++){
+		for (int i = 0; i < blk.getLinkCount(); i++) {
 			blk.setLink(i, blablablock);
 		}
 		splitmerger.setLinks();
diff --git a/src/test/java/org/eclipse/mdm/mdfsorter/MDF4UtilTest.java b/src/test/java/org/eclipse/mdm/mdfsorter/MDF4UtilTest.java
index 2d41d35..ea541c5 100644
--- a/src/test/java/org/eclipse/mdm/mdfsorter/MDF4UtilTest.java
+++ b/src/test/java/org/eclipse/mdm/mdfsorter/MDF4UtilTest.java
@@ -28,8 +28,7 @@
 	@Test
 	public void testGetBytesUInt32() {
 		long l = Integer.MAX_VALUE * 2L + 1L;
-		assertArrayEquals(MDF4Util.getBytesUInt32(l),
-				new byte[] { -1, -1, -1, -1 });
+		assertArrayEquals(MDF4Util.getBytesUInt32(l), new byte[] { -1, -1, -1, -1 });
 	}
 
 	@Test
diff --git a/src/test/java/org/eclipse/mdm/mdfsorter/MDFUnsortProcessor.java b/src/test/java/org/eclipse/mdm/mdfsorter/MDFUnsortProcessor.java
index 1ddcb25..e8c893c 100644
--- a/src/test/java/org/eclipse/mdm/mdfsorter/MDFUnsortProcessor.java
+++ b/src/test/java/org/eclipse/mdm/mdfsorter/MDFUnsortProcessor.java
@@ -36,8 +36,7 @@
  */
 public class MDFUnsortProcessor extends MDF4ProcessWriter {
 
-	public MDFUnsortProcessor(MDFFileContent<MDF4GenBlock> filestructure,
-			ArgumentStruct args) {
+	public MDFUnsortProcessor(MDFFileContent<MDF4GenBlock> filestructure, ArgumentStruct args) {
 		super(filestructure, args);
 	}
 
@@ -91,8 +90,7 @@
 			} else {
 				if (blk.getProblems() != null) {
 					for (MDFCompatibilityProblem p : blk.getProblems()) {
-						MDFSorter.log.log(Level.FINE,
-								"Problem of Type: " + p.getType());
+						MDFSorter.log.log(Level.FINE, "Problem of Type: " + p.getType());
 					}
 					solveProblem(blk.getProblems(), buf, dgroups);
 				} else {
@@ -119,8 +117,7 @@
 
 		out.close();
 		MDFSorter.log.log(Level.INFO, "Wrote " + writeptr / 1000 + " kB.");
-		MDFSorter.log.log(Level.INFO,
-				"Writing took " + (System.currentTimeMillis() - start) + " ms");
+		MDFSorter.log.log(Level.INFO, "Writing took " + (System.currentTimeMillis() - start) + " ms");
 
 		// Update links with RandomAccessFile
 		RandomAccessFile r = new RandomAccessFile(args.outputname, "rw");
@@ -149,9 +146,8 @@
 	 * @throws IOException
 	 * @throws DataFormatException
 	 */
-	public void solveProblem(List<MDFCompatibilityProblem> l,
-			DataBlockBuffer buf, LinkedList<DGBLOCK> dgroups)
-					throws IOException, DataFormatException {
+	public void solveProblem(List<MDFCompatibilityProblem> l, DataBlockBuffer buf, LinkedList<DGBLOCK> dgroups)
+			throws IOException, DataFormatException {
 		if (l.size() != 1) {
 			System.out.println("To many Problems.");
 			// This may be supported in later versions.
@@ -165,10 +161,8 @@
 				// We have more than one channel group in a single
 				// DataGroup. We have to create new DataGroups for each
 				// Channel Group.
-				LinkedList<CGBLOCK> groups = getChannelGroupsfromDataGroup(
-						(DGBLOCK) node);
-				MDFSorter.log.log(Level.INFO,
-						"Found " + groups.size() + " Channel Groups in DG.");
+				LinkedList<CGBLOCK> groups = getChannelGroupsfromDataGroup((DGBLOCK) node);
+				MDFSorter.log.log(Level.INFO, "Found " + groups.size() + " Channel Groups in DG.");
 				MDF4GenBlock datasection = ((DGBLOCK) node).getLnkData();
 				UnSortDataGroup(prob, dgroups, datasection, buf);
 
@@ -179,8 +173,7 @@
 	}
 
 	@Override
-	public LinkedList<CGBLOCK> getChannelGroupsfromDataGroup(
-			DGBLOCK startDataGroup) {
+	public LinkedList<CGBLOCK> getChannelGroupsfromDataGroup(DGBLOCK startDataGroup) {
 		LinkedList<CGBLOCK> ret = new LinkedList<CGBLOCK>();
 		CGBLOCK next = (CGBLOCK) startDataGroup.getLnkCgFirst();
 		while (next != null) {
@@ -190,8 +183,7 @@
 		return ret;
 	}
 
-	public void UnSortDataGroup(MDFCompatibilityProblem prob,
-			LinkedList<DGBLOCK> dgroups, MDF4GenBlock datasection,
+	public void UnSortDataGroup(MDFCompatibilityProblem prob, LinkedList<DGBLOCK> dgroups, MDF4GenBlock datasection,
 			DataBlockBuffer buf) throws IOException, DataFormatException {
 		// create master DG
 		DGBLOCK master = new DGBLOCK();
@@ -217,8 +209,7 @@
 			} else {
 				last.setLink(0, dgblk.getLnkCgFirst());
 			}
-			providers[count++] = new MDF4DataProvider(dgblk.getLnkData(),
-					filestructure.getInput());
+			providers[count++] = new MDF4DataProvider(dgblk.getLnkData(), filestructure.getInput());
 			last = (CGBLOCK) dgblk.getLnkCgFirst();
 			last.setRecordId(count);
 			newsize += last.getCycleCount() * last.getDataBytes();
@@ -230,8 +221,8 @@
 
 		// unsort records.
 		newsize += totreccount;
-		MDF4BlocksSplittMerger bsm = new MDF4BlocksSplittMerger(this, "##DT",
-				master, newsize, providers[0], args.maxblocksize);
+		MDF4BlocksSplittMerger bsm = new MDF4BlocksSplittMerger(this, "##DT", master, newsize, providers[0],
+				args.maxblocksize);
 
 		MDF4DataProvider idprovider = new MDF4DataProvider(new byte[] { 0 });
 
@@ -249,13 +240,11 @@
 
 			// write record
 			bsm.setProv(providers[rectowrite]);
-			bsm.splitmerge(written[rectowrite] * sizes[rectowrite],
-					sizes[rectowrite]);
+			bsm.splitmerge(written[rectowrite] * sizes[rectowrite], sizes[rectowrite]);
 			written[rectowrite]++;
 			writtenrecords++;
 			if (rectowrite == 10) {
-				System.out.println("Wrote " + writtenrecords + " of "
-						+ totreccount + " Records.");
+				System.out.println("Wrote " + writtenrecords + " of " + totreccount + " Records.");
 			}
 		}
 		bsm.setLinks();
@@ -281,8 +270,8 @@
 		LinkedList<DGBLOCK> ret = new LinkedList<DGBLOCK>();
 		HDBLOCK hdroot = (HDBLOCK) filestructure.getRoot();
 		// Set Problem to first datagroup
-		MDFCompatibilityProblem prob = new MDFCompatibilityProblem(
-				MDFProblemType.UNSORTED_DATA_PROBLEM, hdroot.getLnkDgFirst());
+		MDFCompatibilityProblem prob = new MDFCompatibilityProblem(MDFProblemType.UNSORTED_DATA_PROBLEM,
+				hdroot.getLnkDgFirst());
 		prob.setParentnode(hdroot);
 		hdroot.getLnkDgFirst().addProblem(prob);
 		for (MDF4GenBlock blk : filestructure.getList()) {
@@ -314,8 +303,8 @@
 		return ret;
 	}
 
-	public DGBLOCK copyChannelInfrastructure(MDF4GenBlock last, CGBLOCK towrite,
-			DataBlockBuffer buf) throws IOException {
+	public DGBLOCK copyChannelInfrastructure(MDF4GenBlock last, CGBLOCK towrite, DataBlockBuffer buf)
+			throws IOException {
 		// Create new Data Group with default values, and write to file.
 		DGBLOCK newdg = new DGBLOCK();
 		writeBlock(newdg, null);
diff --git a/src/test/java/org/eclipse/mdm/mdfsorter/MDFUnsorter.java b/src/test/java/org/eclipse/mdm/mdfsorter/MDFUnsorter.java
index c42cf25..4d550e8 100644
--- a/src/test/java/org/eclipse/mdm/mdfsorter/MDFUnsorter.java
+++ b/src/test/java/org/eclipse/mdm/mdfsorter/MDFUnsorter.java
@@ -62,8 +62,7 @@
 	 *            output file.
 	 */
 	@SuppressWarnings("unchecked")
-	public static void unsortMDF(String inputfile, String outputfile,
-			long maxblocksize, boolean unzip) {
+	public static void unsortMDF(String inputfile, String outputfile, long maxblocksize, boolean unzip) {
 		ArgumentStruct struct = new ArgumentStruct();
 		struct.inputname = inputfile;
 		struct.outputname = outputfile;
@@ -75,18 +74,15 @@
 			bufstream = new FileInputStream(inputfile);
 			MDFSorter.log.log(Level.INFO, "File opened.");
 			// 1. Parse file and get Content-Struct
-			MDFFileContent<? extends MDFGenBlock> con = MDFParser
-					.serializeFile(bufstream.getChannel());
+			MDFFileContent<? extends MDFGenBlock> con = MDFParser.serializeFile(bufstream.getChannel());
 
 			MDFAbstractProcessWriter<?> processorwriter;
 
 			if (con.isMDF3()) {
-				processorwriter = new MDF3UnsortProcessor(
-						(MDFFileContent<MDF3GenBlock>) con, struct);
+				processorwriter = new MDF3UnsortProcessor((MDFFileContent<MDF3GenBlock>) con, struct);
 				processorwriter.processAndWriteOut();
 			} else {
-				processorwriter = new MDFUnsortProcessor(
-						(MDFFileContent<MDF4GenBlock>) con, struct);
+				processorwriter = new MDFUnsortProcessor((MDFFileContent<MDF4GenBlock>) con, struct);
 				processorwriter.processAndWriteOut();
 			}