author	 = {Nathanel Hübbe and Julian Kunkel},
	title	 = {{Reducing the HPC-Datastorage Footprint with MAFISC -- Multidimensional Adaptive Filtering Improved Scientific data Compression}},
	year	 = {2013},
	month	 = {05},
	publisher	 = {Springer},
	journal	 = {Computer Science - Research and Development},
	series	 = {Volume 28, Issue 2-3},
	pages	 = {231--239},
	abstract	 = {Large HPC installations today also include large data storage installations. Data compression can significantly reduce the amount of data, and it was one of our goals to find out, how much compression can do for climate data. The price of compression is, of course, the need for additional computational resources, so our second goal was to relate the savings of compression to the costs it necessitates. In this paper we present the results of our analysis of typical climate data. A lossless algorithm based on these insights is developed and its compression ratio is compared to that of standard compression tools. As it turns out, this algorithm is general enough to be useful for a large class of scientific data, which is the reason we speak of MAFISC as a method for scientific data compression. A numeric problem for lossless compression of scientific data is identified and a possible solution is given. Finally, we discuss the economics of data compression in HPC environments using the example of the German Climate Computing Center.},
	url	 = {},