Bladeren bron

volume io and server fixes and tweaks

Alexander Rose 6 jaren geleden
bovenliggende
commit
ea8d409bdd

+ 38 - 31
src/mol-io/common/file-handle.ts

@@ -26,7 +26,7 @@ export interface FileHandle {
      *
      * @param position — The offset from the beginning of the file where this data should be written.
      * @param buffer - The buffer data to be written.
-     * @param length — The number of bytes to write. If not supplied, defaults to buffer.length - offset.
+     * @param length — The number of bytes to write. If not supplied, defaults to buffer.length
      */
     writeBuffer(position: number, buffer: SimpleBuffer, length?: number): Promise<number>
 
@@ -35,7 +35,7 @@ export interface FileHandle {
      *
      * @param position — The offset from the beginning of the file where this data should be written.
      * @param buffer - The buffer data to be written.
-     * @param length — The number of bytes to write. If not supplied, defaults to buffer.length - offset.
+     * @param length — The number of bytes to write. If not supplied, defaults to buffer.length
      */
     writeBufferSync(position: number, buffer: SimpleBuffer, length?: number): number
 
@@ -47,28 +47,35 @@ export namespace FileHandle {
     export function fromBuffer(buffer: SimpleBuffer): FileHandle {
         return {
             readBuffer: (position: number, sizeOrBuffer: SimpleBuffer | number, size?: number, byteOffset?: number) => {
+                let bytesRead: number
+                let outBuffer: SimpleBuffer
                 if (typeof sizeOrBuffer === 'number') {
+                    size = defaults(size, sizeOrBuffer)
                     const start = position
-                    const end = Math.min(buffer.length, start + (defaults(size, sizeOrBuffer)))
-                    return Promise.resolve({ bytesRead: end - start, buffer: SimpleBuffer.fromUint8Array(buffer.subarray(start, end)) })
+                    const end = Math.min(buffer.length, start + size)
+                    bytesRead = end - start
+                    outBuffer = SimpleBuffer.fromUint8Array(new Uint8Array(buffer.buffer, start, end - start))
                 } else {
-                    if (size === void 0) {
-                        return Promise.reject('readBuffer: Specify size.');
-                    }
+                    size = defaults(size, sizeOrBuffer.length)
                     const start = position
-                    const end = Math.min(buffer.length, start + defaults(size, sizeOrBuffer.length))
+                    const end = Math.min(buffer.length, start + size)
                     sizeOrBuffer.set(buffer.subarray(start, end), byteOffset)
-                    return Promise.resolve({ bytesRead: end - start, buffer: sizeOrBuffer })
+                    bytesRead = end - start
+                    outBuffer = sizeOrBuffer
+                }
+                if (size !== bytesRead) {
+                    console.warn(`byteCount ${size} and bytesRead ${bytesRead} differ`)
                 }
+                return Promise.resolve({ bytesRead, buffer: outBuffer })
             },
             writeBuffer: (position: number, buffer: SimpleBuffer, length?: number) => {
                 length = defaults(length, buffer.length)
-                console.warn('FileHandle.writeBuffer not implemented')
+                console.error('.writeBuffer not implemented for FileHandle.fromBuffer')
                 return Promise.resolve(0)
             },
             writeBufferSync: (position: number, buffer: SimpleBuffer, length?: number, ) => {
                 length = defaults(length, buffer.length)
-                console.warn('FileHandle.writeSync not implemented')
+                console.error('.writeSync not implemented for FileHandle.fromBuffer')
                 return 0
             },
             close: noop
@@ -80,39 +87,39 @@ export namespace FileHandle {
         return {
             readBuffer: (position: number, sizeOrBuffer: SimpleBuffer | number, length?: number, byteOffset?: number) => {
                 return new Promise((res, rej) => {
+                    let outBuffer: SimpleBuffer
                     if (typeof sizeOrBuffer === 'number') {
-                        let buff = new Buffer(new ArrayBuffer(sizeOrBuffer));
-                        fs.read(file, buff, 0, sizeOrBuffer, position, (err, bytesRead, buffer) => {
-                            if (err) {
-                                rej(err);
-                                return;
-                            }
-                            res({ bytesRead, buffer });
-                        });
+                        byteOffset = defaults(byteOffset, 0)
+                        length = defaults(length, sizeOrBuffer)
+                        outBuffer = SimpleBuffer.fromArrayBuffer(new ArrayBuffer(sizeOrBuffer));
                     } else {
-                        if (length === void 0) {
-                            rej('readBuffer: Specify size.');
+                        byteOffset = defaults(byteOffset, 0)
+                        length = defaults(length, sizeOrBuffer.length)
+                        outBuffer = sizeOrBuffer
+                    }
+                    fs.read(file, outBuffer, byteOffset, length, position, (err, bytesRead, buffer) => {
+                        if (err) {
+                            rej(err);
                             return;
                         }
-                        fs.read(file, sizeOrBuffer, byteOffset ? +byteOffset : 0, length, position, (err, bytesRead, buffer) => {
-                            if (err) {
-                                rej(err);
-                                return;
-                            }
-                            res({ bytesRead, buffer });
-                        });
-                    }
+                        if (length !== bytesRead) {
+                            console.warn(`byteCount ${length} and bytesRead ${bytesRead} differ`)
+                        }
+                        res({ bytesRead, buffer });
+                    });
                 })
             },
-            writeBuffer: (position: number, buffer: Buffer, length?: number) => {
+            writeBuffer: (position: number, buffer: SimpleBuffer, length?: number) => {
+                length = defaults(length, buffer.length)
                 return new Promise<number>((res, rej) => {
-                    fs.write(file, buffer, 0, length !== void 0 ? length : buffer.length, position, (err, written) => {
+                    fs.write(file, buffer, 0, length, position, (err, written) => {
                         if (err) rej(err);
                         else res(written);
                     })
                 })
             },
             writeBufferSync: (position: number, buffer: Uint8Array, length?: number) => {
+                length = defaults(length, buffer.length)
                 return fs.writeSync(file, buffer, 0, length, position);
             },
             close: () => {

+ 4 - 0
src/mol-io/common/simple-buffer.ts

@@ -92,6 +92,10 @@ export namespace SimpleBuffer {
         })
     }
 
+    export function fromArrayBuffer(arrayBuffer: ArrayBuffer): SimpleBuffer {
+        return fromUint8Array(new Uint8Array(arrayBuffer))
+    }
+
     export function fromBuffer(buffer: Buffer): SimpleBuffer {
         return buffer
     }

+ 5 - 8
src/mol-io/common/typed-array.ts

@@ -23,7 +23,7 @@ export type TypedArrayValueArray = Float32Array | Int8Array | Int16Array
 export interface TypedArrayBufferContext {
     type: TypedArrayValueType,
     elementByteSize: number,
-    readBuffer: Buffer,
+    readBuffer: SimpleBuffer,
     valuesBuffer: Uint8Array,
     values: TypedArrayValueArray
 }
@@ -52,7 +52,7 @@ export function createTypedArray(type: TypedArrayValueType, size: number) {
 export function createTypedArrayBufferContext(size: number, type: TypedArrayValueType): TypedArrayBufferContext {
     let elementByteSize = getElementByteSize(type);
     let arrayBuffer = new ArrayBuffer(elementByteSize * size);
-    let readBuffer = new Buffer(arrayBuffer);
+    let readBuffer = SimpleBuffer.fromArrayBuffer(arrayBuffer);
     let valuesBuffer = SimpleBuffer.IsNativeEndianLittle ? arrayBuffer : new ArrayBuffer(elementByteSize * size);
     return {
         type,
@@ -63,14 +63,11 @@ export function createTypedArrayBufferContext(size: number, type: TypedArrayValu
     };
 }
 
-export async function readTypedArray(ctx: TypedArrayBufferContext, file: FileHandle, position: number, count: number, valueOffset: number, littleEndian?: boolean) {
-    let byteCount = ctx.elementByteSize * count;
-    let byteOffset = ctx.elementByteSize * valueOffset;
-
-    await file.readBuffer(position, ctx.readBuffer, byteCount, byteOffset);
+export async function readTypedArray(ctx: TypedArrayBufferContext, file: FileHandle, position: number, byteCount: number, valueByteOffset: number, littleEndian?: boolean) {
+    await file.readBuffer(position, ctx.readBuffer, byteCount, valueByteOffset);
     if (ctx.elementByteSize > 1 && ((littleEndian !== void 0 && littleEndian !== SimpleBuffer.IsNativeEndianLittle) || !SimpleBuffer.IsNativeEndianLittle)) {
         // fix the endian
-        SimpleBuffer.flipByteOrder(ctx.readBuffer, ctx.valuesBuffer, byteCount, ctx.elementByteSize, byteOffset);
+        SimpleBuffer.flipByteOrder(ctx.readBuffer, ctx.valuesBuffer, byteCount, ctx.elementByteSize, valueByteOffset);
     }
     return ctx.values;
 }

+ 42 - 35
src/mol-io/reader/ccp4/parser.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2018-2019 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
  */
@@ -9,7 +9,7 @@ import { Ccp4File, Ccp4Header } from './schema'
 import { ReaderResult as Result } from '../result'
 import { FileHandle } from '../../common/file-handle';
 import { SimpleBuffer } from 'mol-io/common/simple-buffer';
-import { TypedArrayValueType, getElementByteSize, makeTypedArray, TypedArrayBufferContext, readTypedArray } from 'mol-io/common/typed-array';
+import { TypedArrayValueType, getElementByteSize, TypedArrayBufferContext, readTypedArray, createTypedArrayBufferContext } from 'mol-io/common/typed-array';
 
 export async function readCcp4Header(file: FileHandle): Promise<{ header: Ccp4Header, littleEndian: boolean }> {
     const headerSize = 1024;
@@ -96,12 +96,26 @@ export async function readCcp4Header(file: FileHandle): Promise<{ header: Ccp4He
     return { header, littleEndian }
 }
 
-export async function readCcp4Slices(buffer: TypedArrayBufferContext, file: FileHandle, byteOffset: number, length: number, littleEndian: boolean) {
-    // TODO support data from mapmode2to0, see below
-    await readTypedArray(buffer, file, byteOffset, length, 0, littleEndian);
+export async function readCcp4Slices(header: Ccp4Header, buffer: TypedArrayBufferContext, file: FileHandle, byteOffset: number, length: number, littleEndian: boolean) {
+    if (isMapmode2to0(header)) {
+        // data from mapmode2to0 is in MODE 0 (Int8) and needs to be scaled and written as float32
+        const valueByteOffset = 3 * length
+        // read int8 data to last quarter of the read buffer
+        await file.readBuffer(byteOffset, buffer.readBuffer, length, valueByteOffset);
+        // get int8 view of last quarter of the read buffer
+        const int8 = new Int8Array(buffer.valuesBuffer.buffer, valueByteOffset)
+        // scaling f(x)=b1*x+b0 such that f(-128)=min and f(127)=max
+        const b1 = (header.AMAX - header.AMIN) / 255.0
+        const b0 = 0.5 * (header.AMIN + header.AMAX + b1)
+        for (let j = 0, jl = length; j < jl; ++j) {
+            buffer.values[j] = b1 * int8[j] + b0
+        }
+    } else {
+        await readTypedArray(buffer, file, byteOffset, length, 0, littleEndian);
+    }
 }
 
-function getTypedArrayValueType(mode: number) {
+function getCcp4DataType(mode: number) {
     switch (mode) {
         case 2: return TypedArrayValueType.Float32
         case 1: return TypedArrayValueType.Int16
@@ -110,47 +124,40 @@ function getTypedArrayValueType(mode: number) {
     throw new Error(`ccp4 mode '${mode}' unsupported`);
 }
 
+/** check if the file was converted by mapmode2to0, see https://github.com/uglymol/uglymol */
+function isMapmode2to0(header: Ccp4Header) {
+    return header.userFlag1 === -128 && header.userFlag2 === 127
+}
+
+export function getCcp4ValueType(header: Ccp4Header) {
+    return isMapmode2to0(header) ? TypedArrayValueType.Float32 : getCcp4DataType(header.MODE)
+}
+
+export function getCcp4DataOffset(header: Ccp4Header) {
+    return 256 * 4 + header.NSYMBT
+}
+
 async function parseInternal(file: FileHandle, size: number, ctx: RuntimeContext): Promise<Ccp4File> {
-    await ctx.update({ message: 'Parsing CCP4/MRC file...' });
+    await ctx.update({ message: 'Parsing CCP4/MRC/MAP file...' });
 
     const { header, littleEndian } = await readCcp4Header(file)
-
-    const offset = 256 * 4 + header.NSYMBT
-    const valueType = getTypedArrayValueType(header.MODE)
-    const { buffer, bytesRead } = await file.readBuffer(offset, size - offset)
+    const offset = getCcp4DataOffset(header)
+    const dataType = getCcp4DataType(header.MODE)
+    const valueType = getCcp4ValueType(header)
 
     const count = header.NC * header.NR * header.NS
-    const elementByteSize = getElementByteSize(valueType)
+    const elementByteSize = getElementByteSize(dataType)
     const byteCount = count * elementByteSize
 
-    if (byteCount !== bytesRead) {
-        console.warn(`byteCount ${byteCount} and bytesRead ${bytesRead} differ`)
-    }
-
-    let values = makeTypedArray(valueType, buffer.buffer, offset, count)
-
-    if (!littleEndian && valueType !== TypedArrayValueType.Int8) {
-        SimpleBuffer.flipByteOrder(buffer, new Uint8Array(values.buffer), byteCount, elementByteSize, 0)
-    }
-
-    // if the file was converted by mapmode2to0 - scale the data
-    // based on uglymol (https://github.com/uglymol/uglymol) by Marcin Wojdyr (wojdyr)
-    if (header.userFlag1 === -128 && header.userFlag2 === 127) {
-        values = new Float32Array(values)
-        // scaling f(x)=b1*x+b0 such that f(-128)=min and f(127)=max
-        const b1 = (header.AMAX - header.AMIN) / 255.0
-        const b0 = 0.5 * (header.AMIN + header.AMAX + b1)
-        for (let j = 0, jl = values.length; j < jl; ++j) {
-            values[j] = b1 * values[j] + b0
-        }
-    }
+    const buffer = createTypedArrayBufferContext(count, valueType)
+    readCcp4Slices(header, buffer, file, offset, byteCount, littleEndian)
 
-    const result: Ccp4File = { header, values };
+    const result: Ccp4File = { header, values: buffer.values };
     return result
 }
 
 export function parseFile(file: FileHandle, size: number) {
-    return Task.create<Result<Ccp4File>>('Parse CCP4/MRC', async ctx => {
+    return Task.create<Result<Ccp4File>>('Parse CCP4/MRC/MAP', async ctx => {
         try {
             return Result.success(await parseInternal(file, size, ctx));
         } catch (e) {

+ 25 - 26
src/mol-io/reader/dsn6/parser.ts

@@ -60,6 +60,14 @@ function parseDsn6Header(buffer: SimpleBuffer, littleEndian: boolean): Dsn6Heade
     }
 }
 
+function getBlocks(header: Dsn6Header) {
+    const { xExtent, yExtent, zExtent } = header
+    const xBlocks = Math.ceil(xExtent / 8)
+    const yBlocks = Math.ceil(yExtent / 8)
+    const zBlocks = Math.ceil(zExtent / 8)
+    return { xBlocks, yBlocks, zBlocks }
+}
+
 export async function readDsn6Header(file: FileHandle): Promise<{ header: Dsn6Header, littleEndian: boolean }> {
     const { buffer } = await file.readBuffer(0, dsn6HeaderSize)
     const brixStr = String.fromCharCode.apply(null, buffer) as string
@@ -69,12 +77,14 @@ export async function readDsn6Header(file: FileHandle): Promise<{ header: Dsn6He
     return { header, littleEndian }
 }
 
-export async function parseDsn6Values(header: Dsn6Header, source: Uint8Array, target: Float32Array) {
-    const { divisor, summand, xExtent, yExtent, zExtent } = header
+export async function parseDsn6Values(header: Dsn6Header, source: Uint8Array, target: Float32Array, littleEndian: boolean) {
+    if (!littleEndian) {
+        // even though the values are one byte they need to be swapped like they are 2
+        SimpleBuffer.flipByteOrderInPlace2(source.buffer)
+    }
 
-    const xBlocks = Math.ceil(xExtent / 8)
-    const yBlocks = Math.ceil(yExtent / 8)
-    const zBlocks = Math.ceil(zExtent / 8)
+    const { divisor, summand, xExtent, yExtent, zExtent } = header
+    const { xBlocks, yBlocks, zBlocks } = getBlocks(header)
 
     let offset = 0
     // loop over blocks
@@ -105,35 +115,24 @@ export async function parseDsn6Values(header: Dsn6Header, source: Uint8Array, ta
     }
 }
 
-async function parseInternal(file: FileHandle, size: number, ctx: RuntimeContext): Promise<Dsn6File> {
-    await ctx.update({ message: 'Parsing DSN6/BRIX file...' });
-
-    const { header, littleEndian } = await readDsn6Header(file)
+export function getDsn6Counts(header: Dsn6Header) {
     const { xExtent, yExtent, zExtent } = header
-
-    const { buffer, bytesRead } = await file.readBuffer(dsn6HeaderSize, size - dsn6HeaderSize)
-
-    const xBlocks = Math.ceil(xExtent / 8)
-    const yBlocks = Math.ceil(yExtent / 8)
-    const zBlocks = Math.ceil(zExtent / 8)
+    const { xBlocks, yBlocks, zBlocks } = getBlocks(header)
     const valueCount = xExtent * yExtent * zExtent
-
     const count = xBlocks * 8 * yBlocks * 8 * zBlocks * 8
     const elementByteSize = 1
     const byteCount = count * elementByteSize
+    return { count, byteCount, valueCount }
+}
 
-    if (byteCount !== bytesRead) {
-        console.warn(`byteCount ${byteCount} and bytesRead ${bytesRead} differ`)
-    }
+async function parseInternal(file: FileHandle, size: number, ctx: RuntimeContext): Promise<Dsn6File> {
+    await ctx.update({ message: 'Parsing DSN6/BRIX file...' });
+    const { header, littleEndian } = await readDsn6Header(file)
+    const { buffer } = await file.readBuffer(dsn6HeaderSize, size - dsn6HeaderSize)
+    const { valueCount } = getDsn6Counts(header)
 
     const values = new Float32Array(valueCount)
-
-    if (!littleEndian) {
-        // even though the values are one byte they need to be swapped like they are 2
-        SimpleBuffer.flipByteOrderInPlace2(buffer.buffer)
-    }
-
-    await parseDsn6Values(header, buffer, values)
+    await parseDsn6Values(header, buffer, values, littleEndian)
 
     const result: Dsn6File = { header, values };
     return result;

+ 1 - 1
src/mol-io/reader/dsn6/schema.ts

@@ -40,5 +40,5 @@ export interface Dsn6Header {
  */
 export interface Dsn6File {
     header: Dsn6Header
-    values: Float32Array | Int8Array
+    values: Float32Array
 }

+ 29 - 14
src/mol-model-formats/volume/ccp4.ts

@@ -8,12 +8,36 @@ import { VolumeData } from 'mol-model/volume/data'
 import { Task } from 'mol-task';
 import { SpacegroupCell, Box3D } from 'mol-math/geometry';
 import { Tensor, Vec3 } from 'mol-math/linear-algebra';
-import { Ccp4File } from 'mol-io/reader/ccp4/schema';
+import { Ccp4File, Ccp4Header } from 'mol-io/reader/ccp4/schema';
 import { degToRad } from 'mol-math/misc';
+import { getCcp4ValueType } from 'mol-io/reader/ccp4/parser';
+import { TypedArrayValueType } from 'mol-io/common/typed-array';
 
-function volumeFromCcp4(source: Ccp4File, params?: { voxelSize?: Vec3 }): Task<VolumeData> {
+/** When available (e.g. in MRC files) use ORIGIN records instead of N[CRS]START */
+export function getCcp4Origin(header: Ccp4Header) {
+    let gridOrigin: number[]
+    if (header.originX === 0.0 && header.originY === 0.0 && header.originZ === 0.0) {
+        gridOrigin = [header.NCSTART, header.NRSTART, header.NSSTART];
+    } else {
+        gridOrigin = [header.originX, header.originY, header.originZ];
+    }
+    return gridOrigin
+}
+
+function getTypedArrayCtor(header: Ccp4Header) {
+    const valueType = getCcp4ValueType(header)
+    switch (valueType) {
+        case TypedArrayValueType.Float32: return Float32Array;
+        case TypedArrayValueType.Int8: return Int8Array;
+        case TypedArrayValueType.Int16: return Int16Array;
+    }
+    throw Error(`${valueType} is not a supported value format.`);
+}
+
+export function volumeFromCcp4(source: Ccp4File, params?: { voxelSize?: Vec3 }): Task<VolumeData> {
     return Task.create<VolumeData>('Create Volume Data', async ctx => {
         const { header, values } = source;
+        console.log({ header, values })
         const size = Vec3.create(header.xLength, header.yLength, header.zLength)
         if (params && params.voxelSize) Vec3.mul(size, size, params.voxelSize)
         const angles = Vec3.create(degToRad(header.alpha), degToRad(header.beta), degToRad(header.gamma))
@@ -24,19 +48,12 @@ function volumeFromCcp4(source: Ccp4File, params?: { voxelSize?: Vec3 }): Task<V
 
         const grid = [header.NX, header.NY, header.NZ];
         const extent = normalizeOrder([header.NC, header.NR, header.NS]);
-
-        let gridOrigin: number[]
-        if (header.originX === 0.0 && header.originY === 0.0 && header.originZ === 0.0) {
-            gridOrigin = normalizeOrder([header.NCSTART, header.NRSTART, header.NSSTART]);
-        } else {
-            // When available (e.g. in MRC files) use ORIGIN records instead of N[CRS]START
-            gridOrigin = [header.originX, header.originY, header.originZ];
-        }
+        const gridOrigin = normalizeOrder(getCcp4Origin(header));
 
         const origin_frac = Vec3.create(gridOrigin[0] / grid[0], gridOrigin[1] / grid[1], gridOrigin[2] / grid[2]);
         const dimensions_frac = Vec3.create(extent[0] / grid[0], extent[1] / grid[1], extent[2] / grid[2]);
 
-        const space = Tensor.Space(extent, Tensor.invertAxisOrder(axis_order_fast_to_slow), header.MODE === 0 ? Int8Array : Float32Array);
+        const space = Tensor.Space(extent, Tensor.invertAxisOrder(axis_order_fast_to_slow), getTypedArrayCtor(header));
         const data = Tensor.create(space, Tensor.Data1(values));
 
         // TODO Calculate stats? When to trust header data?
@@ -55,6 +72,4 @@ function volumeFromCcp4(source: Ccp4File, params?: { voxelSize?: Vec3 }): Task<V
             }
         };
     });
-}
-
-export { volumeFromCcp4 }
+}

+ 4 - 4
src/servers/volume/pack.ts

@@ -14,7 +14,7 @@ interface Config {
     format: 'ccp4' | 'dsn6',
     isPeriodic: boolean,
     outputFilename: string,
-    blockSize: number
+    blockSizeInMB: number
 }
 
 let config: Config = {
@@ -22,7 +22,7 @@ let config: Config = {
     format: 'ccp4',
     isPeriodic: false,
     outputFilename: '',
-    blockSize: 96
+    blockSizeInMB: 96
 };
 
 function getFormat(format: string): Config['format'] {
@@ -67,7 +67,7 @@ function parseInput() {
     for (let i = 2; i < process.argv.length; i++) {
         switch (process.argv[i].toLowerCase()) {
             case '-blocksize':
-                config.blockSize = +process.argv[++i];
+                config.blockSizeInMB = +process.argv[++i];
                 break;
             case '-format':
                 config.format = getFormat(process.argv[++i]);
@@ -102,5 +102,5 @@ function parseInput() {
 }
 
 if (parseInput()) {
-    pack(config.input, config.blockSize, config.isPeriodic, config.outputFilename, config.format);
+    pack(config.input, config.blockSizeInMB, config.isPeriodic, config.outputFilename, config.format);
 }

+ 5 - 2
src/servers/volume/pack/format.ts

@@ -29,6 +29,7 @@ export interface Header {
 /** Represents a circular buffer for 2 * blockSize layers */
 export interface SliceBuffer {
     buffer: TypedArrayBufferContext,
+    maxBlockBytes: number
     sliceCapacity: number,
     slicesRead: number,
 
@@ -55,13 +56,15 @@ export interface Context {
     provider: Provider
 }
 
-export function assignSliceBuffer(data: Data, blockSize: number) {
+export function assignSliceBuffer(data: Data, blockSizeInMB: number) {
     const { extent, valueType } = data.header;
+    const maxBlockBytes = blockSizeInMB * 1024 * 1024
     const sliceSize = extent[0] * extent[1] * getElementByteSize(valueType);
-    const sliceCapacity = Math.max(1, Math.floor(Math.min(1 * 1024 * 1024, sliceSize * extent[2]) / sliceSize));
+    const sliceCapacity = Math.max(1, Math.floor(Math.min(maxBlockBytes, sliceSize * extent[2]) / sliceSize));
     const buffer = createTypedArrayBufferContext(sliceCapacity * extent[0] * extent[1], valueType);
     data.slices = {
         buffer,
+        maxBlockBytes,
         sliceCapacity,
         slicesRead: 0,
         values: buffer.values,

+ 9 - 19
src/servers/volume/pack/format/ccp4.ts

@@ -6,36 +6,26 @@
  */
 
 import { FileHandle } from 'mol-io/common/file-handle';
-import { readCcp4Header, readCcp4Slices } from 'mol-io/reader/ccp4/parser';
+import { readCcp4Header, readCcp4Slices, getCcp4DataOffset, getCcp4ValueType } from 'mol-io/reader/ccp4/parser';
 import { Header, Provider, Data } from '../format';
-import { TypedArrayValueType } from 'mol-io/common/typed-array';
-
-function getTypedArrayValueType(mode: number) {
-    switch (mode) {
-        case 2: return TypedArrayValueType.Float32
-        case 1: return TypedArrayValueType.Int16
-        case 0: return TypedArrayValueType.Int8
-    }
-    throw new Error(`ccp4 mode '${mode}' unsupported`);
-}
+import { getCcp4Origin } from 'mol-model-formats/volume/ccp4';
+import { Ccp4Header } from 'mol-io/reader/ccp4/schema';
 
 async function readHeader(name: string, file: FileHandle) {
     const { header: ccp4Header, littleEndian } = await readCcp4Header(file)
 
-    const origin2k = [ccp4Header.originX, ccp4Header.originY, ccp4Header.originZ];
-    const nxyzStart = [ccp4Header.NCSTART, ccp4Header.NRSTART, ccp4Header.NSSTART];
     const header: Header = {
         name,
-        valueType: getTypedArrayValueType(ccp4Header.MODE),
+        valueType: getCcp4ValueType(ccp4Header),
         grid: [ccp4Header.NX, ccp4Header.NY, ccp4Header.NZ],
         axisOrder: [ccp4Header.MAPC, ccp4Header.MAPR, ccp4Header.MAPS].map(i => i - 1),
         extent: [ccp4Header.NC, ccp4Header.NR, ccp4Header.NS],
-        origin: origin2k[0] === 0.0 && origin2k[1] === 0.0 && origin2k[2] === 0.0 ? nxyzStart : origin2k,
+        origin: getCcp4Origin(ccp4Header),
         spacegroupNumber: ccp4Header.ISPG,
         cellSize: [ccp4Header.xLength, ccp4Header.yLength, ccp4Header.zLength],
         cellAngles: [ccp4Header.alpha, ccp4Header.beta, ccp4Header.gamma],
         littleEndian,
-        dataOffset: 256 * 4 + ccp4Header.NSYMBT, /* symBytes */
+        dataOffset: getCcp4DataOffset(ccp4Header),
         originalHeader: ccp4Header
     };
     // "normalize" the grid axis order
@@ -49,13 +39,13 @@ export async function readSlices(data: Data) {
         return;
     }
 
-    const { extent } = header;
+    const { extent, originalHeader } = header;
     const sliceSize = extent[0] * extent[1];
     const sliceByteOffset = slices.buffer.elementByteSize * sliceSize * slices.slicesRead;
     const sliceCount = Math.min(slices.sliceCapacity, extent[2] - slices.slicesRead);
-    const sliceByteCount = sliceCount * sliceSize;
+    const sliceByteCount = slices.buffer.elementByteSize * sliceCount * sliceSize;
 
-    await readCcp4Slices(slices.buffer, data.file, header.dataOffset + sliceByteOffset, sliceByteCount, header.littleEndian);
+    await readCcp4Slices(originalHeader as Ccp4Header, slices.buffer, data.file, header.dataOffset + sliceByteOffset, sliceByteCount, header.littleEndian);
     slices.slicesRead += sliceCount;
     slices.sliceCount = sliceCount;
 

+ 0 - 31
src/servers/volume/pack/format/common.ts

@@ -1,31 +0,0 @@
-/**
- * Copyright (c) 2019 mol* contributors, licensed under MIT, See LICENSE file for more info.
- *
- * @author David Sehnal <david.sehnal@gmail.com>
- * @author Alexander Rose <alexander.rose@weirdbyte.de>
- */
-
-import { Data } from '../format';
-import { readTypedArray } from 'mol-io/common/typed-array';
-
-export async function readSlices(data: Data) {
-    const { slices, header } = data;
-    if (slices.isFinished) {
-        return;
-    }
-
-    const { extent } = header;
-    const sliceSize = extent[0] * extent[1];
-    const sliceByteOffset = slices.buffer.elementByteSize * sliceSize * slices.slicesRead;
-    const sliceCount = Math.min(slices.sliceCapacity, extent[2] - slices.slicesRead);
-    const sliceByteCount = sliceCount * sliceSize;
-    console.log('sliceByteOffset', sliceByteOffset, 'sliceSize', sliceSize, 'sliceCount', sliceCount)
-
-    await readTypedArray(slices.buffer, data.file, header.dataOffset + sliceByteOffset, sliceByteCount, 0, header.littleEndian);
-    slices.slicesRead += sliceCount;
-    slices.sliceCount = sliceCount;
-
-    if (slices.slicesRead >= extent[2]) {
-        slices.isFinished = true;
-    }
-}

+ 21 - 18
src/servers/volume/pack/format/dsn6.ts

@@ -6,7 +6,7 @@
 
 import { FileHandle } from 'mol-io/common/file-handle';
 import { Header, Provider, Data } from '../format';
-import { readDsn6Header, dsn6HeaderSize, parseDsn6Values } from 'mol-io/reader/dsn6/parser';
+import { readDsn6Header, dsn6HeaderSize, parseDsn6Values, getDsn6Counts } from 'mol-io/reader/dsn6/parser';
 import { TypedArrayValueType } from 'mol-io/common/typed-array';
 import { Dsn6Header } from 'mol-io/reader/dsn6/schema';
 
@@ -15,12 +15,12 @@ async function readHeader(name: string, file: FileHandle) {
 
     const header: Header = {
         name,
-        valueType: TypedArrayValueType.Int16,
-        grid: [dsn6Header.xRate, dsn6Header.yRate, dsn6Header.zRate],
-        axisOrder: [0, 1, 2],
-        extent: [dsn6Header.xExtent, dsn6Header.yExtent, dsn6Header.zExtent],
-        origin: [dsn6Header.xStart, dsn6Header.yStart, dsn6Header.zStart],
-        spacegroupNumber: 1, // P 1
+        valueType: TypedArrayValueType.Float32,
+        grid: [dsn6Header.xRate, dsn6Header.yRate, dsn6Header.zRate].reverse(),
+        axisOrder: [0, 1, 2].reverse(),
+        extent: [dsn6Header.xExtent, dsn6Header.yExtent, dsn6Header.zExtent].reverse(),
+        origin: [dsn6Header.xStart, dsn6Header.yStart, dsn6Header.zStart].reverse(),
+        spacegroupNumber: 1, // set as P 1, since it is not available in DSN6 files
         cellSize: [dsn6Header.xlen, dsn6Header.ylen, dsn6Header.zlen],
         cellAngles: [dsn6Header.alpha, dsn6Header.beta, dsn6Header.gamma],
         littleEndian,
@@ -31,26 +31,29 @@ async function readHeader(name: string, file: FileHandle) {
 }
 
 export async function readSlices(data: Data) {
-    // TODO due to the dsn6 data layout, the file must be read a a whole, need check if the file is too big for that
+    // TODO due to the dsn6 data layout we the read file into one big buffer
+    //      to avoid this, either change the sampling algoritm to work with this layout or
+    //      read the data into a collection of buffers that can be access like one big buffer
+    //      => for now not worth putting time in, for big files better use another file format
 
     const { slices, header, file } = data;
     if (slices.isFinished) {
         return;
     }
 
-    const { extent, originalHeader } = header;
+    const { extent, dataOffset, originalHeader } = header;
     const sliceCount = extent[2]
 
-    const { xExtent, yExtent, zExtent } = originalHeader as Dsn6Header
-    const xBlocks = Math.ceil(xExtent / 8)
-    const yBlocks = Math.ceil(yExtent / 8)
-    const zBlocks = Math.ceil(zExtent / 8)
-    const count = xBlocks * 8 * yBlocks * 8 * zBlocks * 8
-    const elementByteSize = 1
-    const byteCount = count * elementByteSize
+    const { byteCount } = getDsn6Counts(originalHeader as Dsn6Header)
+    if (byteCount > slices.maxBlockBytes) {
+        throw new Error(`dsn6 file to large, can't read ${byteCount} bytes at once, increase block size or use another file format`)
+    }
 
-    const { buffer } = await file.readBuffer(dsn6HeaderSize, byteCount)
-    await parseDsn6Values(originalHeader as Dsn6Header, buffer, slices.values as Float32Array) // TODO fix cast
+    const { buffer } = await file.readBuffer(dataOffset, byteCount)
+    if (!(slices.values instanceof Float32Array)) {
+        throw new Error(`dsn6 reader only supports Float32Array for output values`)
+    }
+    await parseDsn6Values(originalHeader as Dsn6Header, buffer, slices.values, header.littleEndian)
 
     slices.slicesRead += sliceCount;
     slices.sliceCount = sliceCount;

+ 13 - 13
src/servers/volume/pack/main.ts

@@ -13,9 +13,9 @@ import * as Sampling from './sampling'
 import * as DataFormat from '../common/data-format'
 import { FileHandle } from 'mol-io/common/file-handle';
 
-export default async function pack(input: { name: string, filename: string }[], blockSize: number, isPeriodic: boolean, outputFilename: string, format: Format.Type) {
+export default async function pack(input: { name: string, filename: string }[], blockSizeInMB: number, isPeriodic: boolean, outputFilename: string, format: Format.Type) {
     try {
-        await create(outputFilename, input, blockSize, isPeriodic, format);
+        await create(outputFilename, input, blockSizeInMB, isPeriodic, format);
     } catch (e) {
         console.error('[Error] ' + e);
     }
@@ -49,15 +49,15 @@ async function allocateFile(ctx: Data.Context) {
     }
 }
 
-function determineBlockSize(data: Format.Data, blockSize: number) {
+function determineBlockSize(data: Format.Data, blockSizeInMB: number) {
     const { extent } = data.header;
     const maxLayerSize = 1024 * 1024 * 1024;
     const valueCount = extent[0] * extent[1];
-    if (valueCount * blockSize <= maxLayerSize) return blockSize;
+    if (valueCount * blockSizeInMB <= maxLayerSize) return blockSizeInMB;
 
-    while (blockSize > 0) {
-        blockSize -= 4;
-        if (valueCount * blockSize <= maxLayerSize) return blockSize;
+    while (blockSizeInMB > 0) {
+        blockSizeInMB -= 4;
+        if (valueCount * blockSizeInMB <= maxLayerSize) return blockSizeInMB;
     }
 
     throw new Error('Could not determine a valid block size.');
@@ -69,10 +69,10 @@ async function writeHeader(ctx: Data.Context) {
     await ctx.file.writeBuffer(4, header);
 }
 
-async function create(filename: string, sourceDensities: { name: string, filename: string }[], sourceBlockSize: number, isPeriodic: boolean, format: Format.Type) {
+async function create(filename: string, sourceDensities: { name: string, filename: string }[], sourceBlockSizeInMB: number, isPeriodic: boolean, format: Format.Type) {
     const startedTime = getTime();
 
-    if (sourceBlockSize % 4 !== 0 || sourceBlockSize < 4) {
+    if (sourceBlockSizeInMB % 4 !== 0 || sourceBlockSizeInMB < 4) {
         throw Error('Block size must be a positive number divisible by 4.');
     }
 
@@ -93,16 +93,16 @@ async function create(filename: string, sourceDensities: { name: string, filenam
         if (!isOk) {
             throw new Error('Input file headers are not compatible (different grid, etc.).');
         }
-        const blockSize = determineBlockSize(channels[0].data, sourceBlockSize);
-        for (const ch of channels) Format.assignSliceBuffer(ch.data, blockSize);
+        const blockSizeInMB = determineBlockSize(channels[0].data, sourceBlockSizeInMB);
+        for (const ch of channels) Format.assignSliceBuffer(ch.data, blockSizeInMB);
 
         // Step 1c: Create data context.
-        const context = await Sampling.createContext(filename, channels, blockSize, isPeriodic);
+        const context = await Sampling.createContext(filename, channels, blockSizeInMB, isPeriodic);
         for (const s of channels) files.push(s.data.file);
         files.push(context.file);
         process.stdout.write('   done.\n');
 
-        console.log(`Block size: ${blockSize}`);
+        console.log(`Block size: ${blockSizeInMB}`);
 
         // Step 2: Allocate disk space.
         process.stdout.write('Allocating...      0%');

+ 5 - 3
src/servers/volume/server/query/compose.ts

@@ -18,19 +18,21 @@ export default async function compose(query: Data.QueryContext.Data) {
 }
 
 async function readBlock(query: Data.QueryContext.Data, coord: Coords.Grid<'Block'>, blockBox: Box.Fractional): Promise<Data.BlockData> {
+    const { valueType, blockSize } = query.data.header;
+    const elementByteSize = getElementByteSize(valueType)
     const numChannels = query.data.header.channels.length;
     const blockSampleCount = Box.dimensions(Box.fractionalToGrid(blockBox, query.samplingInfo.sampling.dataDomain));
     const size = numChannels * blockSampleCount[0] * blockSampleCount[1] * blockSampleCount[2];
-    const { valueType, blockSize } = query.data.header;
+    const byteSize = elementByteSize * size
     const dataSampleCount = query.data.header.sampling[query.samplingInfo.sampling.index].sampleCount;
     const buffer = createTypedArrayBufferContext(size, valueType);
     const byteOffset = query.samplingInfo.sampling.byteOffset
-        + getElementByteSize(valueType) * numChannels * blockSize
+        + elementByteSize * numChannels * blockSize
         * (blockSampleCount[1] * blockSampleCount[2] * coord[0]
             + dataSampleCount[0] * blockSampleCount[2] * coord[1]
             + dataSampleCount[0] * dataSampleCount[1] * coord[2]);
 
-    const values = await readTypedArray(buffer, query.data.file, byteOffset, size, 0);
+    const values = await readTypedArray(buffer, query.data.file, byteOffset, byteSize, 0);
     return {
         sampleCount: blockSampleCount,
         values