Browse Source

Added VolumeServer

David Sehnal 6 years ago
parent
commit
88c9248c3f
45 changed files with 3751 additions and 20 deletions
  1. 129 0
      docs/volume-server/README.md
  2. 9 0
      docs/volume-server/examples.md
  3. 82 0
      docs/volume-server/how-it-works.md
  4. BIN
      docs/volume-server/img/1tqn_downsampled.png
  5. BIN
      docs/volume-server/img/zika_downsampled.png
  6. 126 0
      docs/volume-server/response-data-format.md
  7. 37 2
      package-lock.json
  8. 2 0
      package.json
  9. 2 2
      src/apps/domain-annotation-server/mapping.ts
  10. 2 2
      src/mol-app/ui/transform/spacefill.tsx
  11. 5 5
      src/mol-io/writer/cif.ts
  12. 1 1
      src/mol-io/writer/cif/encoder.ts
  13. 2 2
      src/mol-io/writer/cif/encoder/binary.ts
  14. 2 2
      src/mol-io/writer/cif/encoder/text.ts
  15. 2 2
      src/mol-io/writer/encoder.ts
  16. 3 2
      src/mol-io/writer/writer.ts
  17. 35 0
      src/servers/volume/CHANGELOG.md
  18. 154 0
      src/servers/volume/common/binary-schema.ts
  19. 134 0
      src/servers/volume/common/data-format.ts
  20. 171 0
      src/servers/volume/common/file.ts
  21. 75 0
      src/servers/volume/local.ts
  22. 86 0
      src/servers/volume/pack.ts
  23. 163 0
      src/servers/volume/pack/ccp4.ts
  24. 129 0
      src/servers/volume/pack/data-model.ts
  25. 175 0
      src/servers/volume/pack/downsampling.ts
  26. 133 0
      src/servers/volume/pack/main.ts
  27. 213 0
      src/servers/volume/pack/sampling.ts
  28. 1 0
      src/servers/volume/pack/version.ts
  29. 66 0
      src/servers/volume/pack/writer.ts
  30. 77 0
      src/servers/volume/server-config.ts
  31. 61 0
      src/servers/volume/server.ts
  32. 131 0
      src/servers/volume/server/algebra/box.ts
  33. 166 0
      src/servers/volume/server/algebra/coordinate.ts
  34. 72 0
      src/servers/volume/server/api.ts
  35. 174 0
      src/servers/volume/server/documentation.ts
  36. 143 0
      src/servers/volume/server/local-api.ts
  37. 94 0
      src/servers/volume/server/query/compose.ts
  38. 78 0
      src/servers/volume/server/query/data-model.ts
  39. 217 0
      src/servers/volume/server/query/encode.ts
  40. 234 0
      src/servers/volume/server/query/execute.ts
  41. 123 0
      src/servers/volume/server/query/identify.ts
  42. 13 0
      src/servers/volume/server/state.ts
  43. 42 0
      src/servers/volume/server/utils/logger.ts
  44. 1 0
      src/servers/volume/server/version.ts
  45. 186 0
      src/servers/volume/server/web-api.ts

+ 129 - 0
docs/volume-server/README.md

@@ -0,0 +1,129 @@
+What is VolumeServer
+=====================
+
+VolumeServer is a service for accessing subsets of volumetric density data. It automatically downsamples the data depending on the volume of the requested region to reduce the bandwidth requirements and provide near-instant access to even the largest data sets.
+
+It uses the text based CIF and BinaryCIF formats to deliver the data to the client. 
+
+For quick info about the benefits of using the server, check out the [examples](examples.md).
+
+Installing the Server 
+=====================
+
+- Install [Node.js](https://nodejs.org/en/) (tested on Node 6.* and 7.*; x64 version is strongly preferred).
+- Get the code.
+- Prepare the data.
+- Run the server.
+
+Preparing the Data
+------------------
+
+For the server to work, CCP4/MAP (models 0, 1, 2 are supported) input data need to be converted into a custom block format. 
+To achieve this, use the ``pack`` application.
+
+- To prepare data from x-ray based methods, use: 
+
+    ```
+    node pack -xray main.ccp4 diff.ccp4 out.mdb
+    ```
+
+- For EM data, use:
+
+    ```
+    node pack -em em.map out.mdb
+    ```
+
+Running the Server
+------------------
+
+- Install production dependencies:
+
+   ```
+   npm install --only=production
+   ```
+
+- Update ``server-config.js`` to link to your data and optionally tweak the other parameters.
+
+- Run it:
+
+    ```
+    node server
+    ```
+
+    In production it is a good idea to use a service that will keep the server running, such as [forever.js](https://github.com/foreverjs/forever).
+
+### Local Mode
+
+The program ``local`` in the build folder can be used to query the data without running a http server.
+
+- ``node local`` prints the program usage.
+- ``node local jobs.json`` takes a list of jobs to execute in JSON format. A job entry is defined by this interface:
+
+    ```TypeScript
+    interface JobEntry {
+        source: {
+            filename: string,    
+            name: string,
+            id: string
+        },
+        query: {
+            kind: 'box' | 'cell',
+            space?: 'fractional' | 'cartesian',
+            bottomLeft?: number[],
+            topRight?: number[],
+        }
+        params: {
+            /** Determines the detail level as specified in server-config */
+            detail?: number,
+            /** 
+             * Determines the sampling level:
+             * 1: Original data
+             * 2: Downsampled by factor 1/2
+             * ...
+             * N: downsampled 1/2^(N-1)
+             */
+            forcedSamplingLevel?: number,
+            asBinary: boolean,
+        },
+        outputFolder: string
+    }
+    ```
+
+    Example ``jobs.json`` file content:
+
+    ```TypeScript
+    [{
+        source: {
+            filename: `g:/test/mdb/emd-8116.mdb`,
+            name: 'em',
+            id: '8116',
+        },
+        query: {
+            kind: 'cell'
+        },
+        params: {
+            detail: 4,
+            asBinary: true
+        },
+        outputFolder: 'g:/test/local-test'
+    }]
+    ```
+
+## Navigating the Source Code
+
+The source code is split into 2 mains parts: ``pack`` and ``server``:
+
+- The ``pack`` part provides the means of converting CCP4 files into the internal block format.
+- The ``server`` includes
+  - ``query``: the main part of the server that handles a query. ``execute.ts`` is the "entry point".
+  - ``algebra``: linear, "coordinate", and "box" algebra provides the means for calculations necessary to concent a user query into a menaningful response.
+  - API wrapper that handles the requests.
+
+Consuming the Data 
+==================
+
+The data can be consumed in any (modern) browser using the [CIFTools.js library](https://github.com/dsehnal/CIFTools.js) (or any other piece of code that can read text or binary CIF).
+
+The [Data Format](DataFormat.md) document gives a detailed description of the server response format.
+
+As a reference/example of the server usage, please see the implementation in [LiteMol](https://github.com/dsehnal/LiteMol) ([CIF.ts + Data.ts](https://github.com/dsehnal/LiteMol/tree/master/src/lib/Core/Formats/Density), [UI](https://github.com/dsehnal/LiteMol/tree/master/src/Viewer/Extensions/DensityStreaming)) or in Mol*.

+ 9 - 0
docs/volume-server/examples.md

@@ -0,0 +1,9 @@
+Zika Virus
+==========
+
+![Zika Virus](img/zika_downsampled.png)
+
+1TQN
+====
+
+![1tqn](img/1tqn_downsampled.png)

+ 82 - 0
docs/volume-server/how-it-works.md

@@ -0,0 +1,82 @@
+How it works
+============
+
+This document provides a high level overview of how the DensityServer works.
+
+## Overview
+
+- Data is stored in using block layout to reduce the number of disk seeks/reads each query requires.
+- Data is downsampled by ``1/2``, ``1/4``, ``1/8``, ... depending on the size of the input.
+- To keep the server response time/size small, each query is satisfied using the appropriate downsampling level.
+- The server response is encoded using the [BinaryCIF](https://github.com/dsehnal/BinaryCIF) format.
+- The contour level is preserved using relative instead of absolute values.
+
+## Data Layout
+
+To enable efficient access to the 3D data, the density values are stored in a "block level" format. 
+This means that the data is split into ``NxNxN`` blocks (by default ``N=96``, which corresponds to ``96^3 * 4 bytes = 3.375MB`` disk read 
+per block access and provides good size/performance ratio).  This data layout 
+enables to access the data from a hard drive using a bounded number of disk seeks/reads which
+greatly reduces the server latency.
+
+## Downsampling 
+
+- The input is density data with ``[H,K,L]`` number of samples along each axis (i.e. the ``extent`` field in the CCP4 header).
+- To downsample, use the kernel ``C = [1,4,6,4,1]`` (customizable on the source code level) along each axis, because it is "separable":
+
+    ```
+    downsampled[i] = C[0] * source[2 * i - 2] + ... + C[4] * source[2 * i + 2]
+    ```
+
+    The downsampling step is applied in 3 steps:
+
+    ```
+    [H,K,L] => [H/2, K, L] => [H/2, K/2, L] => [H/2, K/2, L/2]
+    ```
+
+    (if the dimension is odd, the value ``(D+1)/2`` is used instead).
+
+- Apply the downsampling step iteratively until the number of samples along the largest dimension is smaller than "block size" (or the smallest dimension has >2 samples).
+
+## Satisfying the query
+
+When the server receives a query for a 3D region, it chooses the the appropriate downsampling level based on the required details so that 
+the number of voxels in the response is small enough. This enables sub-second response time even for the largest of entries.
+
+### Encoding the response
+
+The [BinaryCIF](https://github.com/dsehnal/BinaryCIF) format is used to encode the response. Floating point data are quantized into 1 byte values (256 levels) before being
+sent back to the client. This quantization is performed by splitting the data interval into uniform pieces.
+
+## Preserving the contour level
+
+Downsampling the data results in changing of absolute contour levels. To mitigate this effect, relative values are always used when displaying the data.
+
+- Imagine the input data points are ``A = [-0.3, 2, 0.1, 6, 3, -0.4]``: 
+- Downsampling using every other value results in ``B = [-0.3, 0.1, 3]``.
+- The "range" of the data went from (-0.4, 6) to (-0.3,3).
+- Attempting to use the same absolute contour level on both "data sets" will likely yield very different results.
+- The effect is similar if instead of skipping values they are averaged (or weighted averaged in the case of the ``[1 4 6 4 1]`` kernel) only not as severe.
+- As a result, the "absolute range" of the data changes, some outlier values are lost, but the mean and relative proportions (i.e. deviation ``X`` from mean in ``Y = mean + sigma * X``) are preserved. 
+
+----------------------
+
+## Compression Analysis
+
+- Downsampling: ``i-th`` level (starting from zero) reduces the size by approximate factor ``1/[(2^i)^3]`` (i.e. "cubic" of the frequency).
+- BinaryCIF: CCP4 mode 2 (32 bit floats) is reduced by factor of 4, CCP4 mode 1 (16bit integers) by factor of 2, CCP4 mode 0 (just bytes) is not reduced. This is done by single byte quantization, but smarter than CCP4 mode 0
+- Gzip, from observation:
+  - Gzipping BinaryCIF reduces the size by factor ~2 - ~7 (2 for "dense" data such as x-ray density, 7 for sparse data such such an envelope of a virus)
+  - Gzipping CCP4 reduces the size by 10-25% (be it mode 2 or 0)
+- Applying the downsampling kernel helps with the compression ratios because it smooths out the values.
+
+### Toy example:
+
+```
+Start with 3.5GB compressed density data in the CCP4 mode 2 format (32-bit float for each value)
+    => ~4GB uncompressed CCP4
+    => Downsample by 1/4 => 4GB * (1/4)^3 = 62MB
+    => Convert to BinaryCIF => 62MB / 4 = ~16MB
+    => Gzip: 2 - 8 MB depending on the "density" of the data 
+        (e.g. a viral shell data will be smaller because it is "empty" inside)
+```

BIN
docs/volume-server/img/1tqn_downsampled.png


BIN
docs/volume-server/img/zika_downsampled.png


+ 126 - 0
docs/volume-server/response-data-format.md

@@ -0,0 +1,126 @@
+Data Format
+===========
+
+This document describes the CIF categories and fields generated by the server.
+
+Query info
+----------
+
+The reponse always contains a data block called ``SERVER`` with this format:
+
+```
+data_SERVER
+#
+_density_server_result.server_version     0.9.0 
+_density_server_result.datetime_utc       '2017-03-09 20:35:45' 
+_density_server_result.guid               f69581b4-6b48-4fa4-861f-c879b4323688 
+_density_server_result.is_empty           no 
+_density_server_result.has_error          no 
+_density_server_result.error              . 
+_density_server_result.query_source_id    xray/1cbs 
+_density_server_result.query_type         box 
+_density_server_result.query_box_type     cartesian 
+_density_server_result.query_box_a[0]     14.555 
+_density_server_result.query_box_a[1]     16.075001 
+_density_server_result.query_box_a[2]     9.848 
+_density_server_result.query_box_b[0]     29.302999 
+_density_server_result.query_box_b[1]     35.737 
+_density_server_result.query_box_b[2]     32.037001 
+```
+
+Query data
+----------
+
+If the query completed successfully with a non-empty result the response will contain one or more data blocks that correpond to the
+"channels" present in the data (e.g. for x-ray data there will be ``2Fo-Fc`` and ``Fo-Fc``) channels.
+
+The format is this:
+
+```
+data_2FO-FC
+#
+_volume_data_3d_info.name                         2Fo-Fc 
+```
+### Axis order
+
+Axis order determines the order of axes of ``origin``, ``dimensions`` and ``sample_count`` fields. It also specifies
+the order of values in ``_volume_data_3d.values``. ``0`` = X axis, ``1`` = Y axis, ``2`` = Z axis.
+
+```
+_volume_data_3d_info.axis_order[0]                0 
+_volume_data_3d_info.axis_order[1]                1 
+_volume_data_3d_info.axis_order[2]                2  
+```
+
+### Origin and dimensions
+
+Specifies the fractional coordinates of the bounding box of the data present in the data block.
+
+```
+_volume_data_3d_info.origin[0]                    -0.5 
+_volume_data_3d_info.origin[1]                    -0.5 
+_volume_data_3d_info.origin[2]                    -0.5 
+_volume_data_3d_info.dimensions[0]                1 
+_volume_data_3d_info.dimensions[1]                1 
+_volume_data_3d_info.dimensions[2]                1 
+```
+
+### Sample rate
+
+Determines how many values of the original input data were collapsed in to 1 value.
+
+```
+_volume_data_3d_info.sample_rate                  8 
+```
+
+### Sample count
+
+Determines how many values in ``_volume_data_3d.values`` are present along each axis (in ``axis_order``).
+
+```
+_volume_data_3d_info.sample_count[0]              96 
+_volume_data_3d_info.sample_count[1]              96 
+_volume_data_3d_info.sample_count[2]              96 
+```
+
+### Spacegroup information
+
+```
+_volume_data_3d_info.spacegroup_number            1 
+_volume_data_3d_info.spacegroup_cell_size[0]      798.72 
+_volume_data_3d_info.spacegroup_cell_size[1]      798.72 
+_volume_data_3d_info.spacegroup_cell_size[2]      798.72 
+_volume_data_3d_info.spacegroup_cell_angles[0]    90 
+_volume_data_3d_info.spacegroup_cell_angles[1]    90 
+_volume_data_3d_info.spacegroup_cell_angles[2]    90 
+```
+
+### Values info
+
+Contains mean, standard deviation, min, and max values for the _entire_ (i.e. not just the slice present in response)
+original and the downsampled data. Both types of values are present 
+so that relative iso-levels can be estimated when sampling changes between queries.
+
+```
+_volume_data_3d_info.mean_source                  0.026747 
+_volume_data_3d_info.mean_sampled                 0.026748 
+_volume_data_3d_info.sigma_source                 1.129252 
+_volume_data_3d_info.sigma_sampled                0.344922 
+_volume_data_3d_info.min_source                   -19.308199 
+_volume_data_3d_info.min_sampled                  -2.692016 
+_volume_data_3d_info.max_source                   26.264214 
+_volume_data_3d_info.max_sampled                  3.533172 
+```
+
+### Values
+
+The values are stored in the ``_volume_data_3d.values`` loop containg ``sample_count[0] * sample_count[1] * sample_count[2]`` values. ``axis_order[0]`` is the axis that changes the fastest, ``axis_order[2]`` is the axis that changes the slowest, same as in the [CCP4 format](http://www.ccp4.ac.uk/html/maplib.html#description)).
+
+```
+loop_
+_volume_data_3d.values
+-0.075391 
+-0.078252 
+-0.078161 
+...
+```

+ 37 - 2
package-lock.json

@@ -62,6 +62,15 @@
         "@types/node": "10.1.1"
       }
     },
+    "@types/compression": {
+      "version": "0.0.36",
+      "resolved": "https://registry.npmjs.org/@types/compression/-/compression-0.0.36.tgz",
+      "integrity": "sha512-B66iZCIcD2eB2F8e8YDIVtCUKgfiseOR5YOIbmMN2tM57Wu55j1xSdxdSw78aVzsPmbZ6G+hINc+1xe1tt4NBg==",
+      "dev": true,
+      "requires": {
+        "@types/express": "4.11.1"
+      }
+    },
     "@types/events": {
       "version": "1.1.0",
       "resolved": "https://registry.npmjs.org/@types/events/-/events-1.1.0.tgz",
@@ -2664,6 +2673,28 @@
       "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=",
       "dev": true
     },
+    "compressible": {
+      "version": "2.0.13",
+      "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.13.tgz",
+      "integrity": "sha1-DRAgq5JLL9tNYnmHXH1tq6a6p6k=",
+      "requires": {
+        "mime-db": "1.33.0"
+      }
+    },
+    "compression": {
+      "version": "1.7.2",
+      "resolved": "http://registry.npmjs.org/compression/-/compression-1.7.2.tgz",
+      "integrity": "sha1-qv+81qr4VLROuygDU9WtFlH1mmk=",
+      "requires": {
+        "accepts": "1.3.5",
+        "bytes": "3.0.0",
+        "compressible": "2.0.13",
+        "debug": "2.6.9",
+        "on-headers": "1.0.1",
+        "safe-buffer": "5.1.1",
+        "vary": "1.1.2"
+      }
+    },
     "concat-map": {
       "version": "0.0.1",
       "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@@ -4891,8 +4922,7 @@
         "jsbn": {
           "version": "0.1.1",
           "bundled": true,
-          "dev": true,
-          "optional": true
+          "dev": true
         },
         "json-schema": {
           "version": "0.2.3",
@@ -9477,6 +9507,11 @@
         "ee-first": "1.1.1"
       }
     },
+    "on-headers": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.1.tgz",
+      "integrity": "sha1-ko9dD0cNSTQmUepnlLCFfBAGk/c="
+    },
     "once": {
       "version": "1.4.0",
       "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",

+ 2 - 0
package.json

@@ -58,6 +58,7 @@
   "devDependencies": {
     "@types/argparse": "^1.0.33",
     "@types/benchmark": "^1.0.31",
+    "@types/compression": "0.0.36",
     "@types/express": "^4.11.1",
     "@types/jest": "^22.2.3",
     "@types/node": "^10.1.1",
@@ -90,6 +91,7 @@
   },
   "dependencies": {
     "argparse": "^1.0.10",
+    "compression": "^1.7.2",
     "express": "^4.16.3",
     "gl": "^4.0.4",
     "immutable": "^4.0.0-rc.9",

+ 2 - 2
src/apps/domain-annotation-server/mapping.ts

@@ -5,7 +5,7 @@
  */
 
 import { Table } from 'mol-data/db'
-import { CIFEncoder, create as createEncoder } from 'mol-io/writer/cif'
+import { EncoderInstance, create as createEncoder } from 'mol-io/writer/cif'
 import * as S from './schemas'
 import { getCategoryInstanceProvider } from './utils'
 
@@ -36,7 +36,7 @@ interface DomainAnnotation {
 }
 type MappingRow = Table.Row<S.mapping>;
 
-function writeDomain(enc: CIFEncoder<any>, domain: DomainAnnotation | undefined) {
+function writeDomain(enc: EncoderInstance, domain: DomainAnnotation | undefined) {
     if (!domain) return;
     enc.writeCategory(getCategoryInstanceProvider(`pdbx_${domain.name}_domain_annotation`, domain.domains));
     enc.writeCategory(getCategoryInstanceProvider(`pdbx_${domain.name}_domain_mapping`, domain.mappings));

+ 2 - 2
src/mol-app/ui/transform/spacefill.tsx

@@ -17,8 +17,8 @@ import { SpacefillUpdate } from 'mol-view/state/transform'
 import { StateContext } from 'mol-view/state/context';
 import { ColorTheme } from 'mol-geo/theme';
 import { Color, ColorNames } from 'mol-util/color';
-import { Query, Queries as Q } from 'mol-model/structure';
-import { Slider } from '../controls/slider';
+// import { Query, Queries as Q } from 'mol-model/structure';
+// import { Slider } from '../controls/slider';
 
 export const ColorThemeInfo = {
     'atom-index': {},

+ 5 - 5
src/mol-io/writer/cif.ts

@@ -13,14 +13,14 @@ import { CategoryDefinition } from './cif/encoder'
 
 export * from './cif/encoder'
 
-export function create(params?: { binary?: boolean, encoderName?: string }) {
+export type EncoderInstance = BinaryCIFEncoder<{}> | TextCIFEncoder<{}>
+
+export function create(params?: { binary?: boolean, encoderName?: string }): EncoderInstance {
     const { binary = false, encoderName = 'mol*' } = params || {};
     return binary ? new BinaryCIFEncoder(encoderName) : new TextCIFEncoder();
 }
 
-type CIFEncoder = BinaryCIFEncoder<{}> | TextCIFEncoder<{}>
-
-export function writeDatabase(encoder: CIFEncoder, name: string, database: Database<Database.Schema>) {
+export function writeDatabase(encoder: EncoderInstance, name: string, database: Database<Database.Schema>) {
     encoder.startDataBlock(name);
     for (const table of database._tableNames) {
         encoder.writeCategory(
@@ -29,7 +29,7 @@ export function writeDatabase(encoder: CIFEncoder, name: string, database: Datab
     }
 }
 
-export function writeDatabaseCollection(encoder: CIFEncoder, collection: DatabaseCollection<Database.Schema>) {
+export function writeDatabaseCollection(encoder: EncoderInstance, collection: DatabaseCollection<Database.Schema>) {
     for (const name of Object.keys(collection)) {
         writeDatabase(encoder, name, collection[name])
     }

+ 1 - 1
src/mol-io/writer/cif/encoder.ts

@@ -70,7 +70,7 @@ export interface CategoryProvider {
     (ctx: any): CategoryInstance
 }
 
-export interface CIFEncoder<T = string | Uint8Array, Context = any> extends Encoder<T> {
+export interface CIFEncoder<T = string | Uint8Array, Context = any> extends Encoder {
     startDataBlock(header: string): void,
     writeCategory(category: CategoryProvider, contexts?: Context[]): void,
     getData(): T

+ 2 - 2
src/mol-io/writer/cif/encoder/binary.ts

@@ -59,8 +59,8 @@ export default class BinaryCIFWriter<Context> implements CIFEncoder<Uint8Array,
         this.dataBlocks = <any>null;
     }
 
-    writeTo(writer: Writer<Uint8Array>) {
-        writer.write(this.encodedData);
+    writeTo(writer: Writer) {
+        writer.writeBinary(this.encodedData);
     }
 
     getData() {

+ 2 - 2
src/mol-io/writer/cif/encoder/text.ts

@@ -48,10 +48,10 @@ export default class TextCIFEncoder<Context> implements Enc.CIFEncoder<string, C
         this.encoded = true;
     }
 
-    writeTo(stream: Writer<string>) {
+    writeTo(stream: Writer) {
         const chunks = StringBuilder.getChunks(this.builder);
         for (let i = 0, _i = chunks.length; i < _i; i++) {
-            stream.write(chunks[i]);
+            stream.writeString(chunks[i]);
         }
     }
 

+ 2 - 2
src/mol-io/writer/encoder.ts

@@ -6,9 +6,9 @@
 
 import Writer from './writer'
 
-interface Encoder<T> {
+interface Encoder {
     encode(): void,
-    writeTo(writer: Writer<T>): void
+    writeTo(writer: Writer): void
 }
 
 export default Encoder

+ 3 - 2
src/mol-io/writer/writer.ts

@@ -4,8 +4,9 @@
  * @author David Sehnal <david.sehnal@gmail.com>
  */
 
-interface Writer<T> {
-    write(data: T): boolean
+interface Writer {
+    writeString(data: string): boolean,
+    writeBinary(data: Uint8Array): boolean
 }
 
 namespace Writer {

+ 35 - 0
src/servers/volume/CHANGELOG.md

@@ -0,0 +1,35 @@
+# 0.9.5
+* Better query response box resolution.
+
+# 0.9.4
+* Support for CCP4 Mode 1.
+* Asking for summary for a non-existing entry will now correctly return 404.
+* Automatically determine max block size for large entries.
+
+# 0.9.3
+* Fixed a bug in CIFTools (BinaryCIF encoding).
+
+# 0.9.2
+* Changed "EM" naming access scheme.
+
+# 0.9.1
+* Removed max fractional dimension checking.
+* Fix a bug in MDB packer.
+
+# 0.9.0
+* Rewrote pretty much everything to add downsampling.
+
+# 0.8.4
+* Add support for converting CCP4 mode 0 maps.
+
+# 0.8.3
+* Allow periodic boundary for P 1 spacegroup.
+
+# 0.8.2
+* Fixed axis ordering issue.
+
+# 0.8.1
+* Updated value encoding.
+
+# 0.8.0
+* Let's call this an initial version.

+ 154 - 0
src/servers/volume/common/binary-schema.ts

@@ -0,0 +1,154 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as UTF8 from 'mol-io/common/utf8'
+
+export type Bool = { kind: 'bool' }
+export type Int = { kind: 'int' }
+export type Float = { kind: 'float' }
+export type String = { kind: 'string' }
+export type Array = { kind: 'array', element: Element }
+export type Prop = { element: Element, prop: string }
+export type Obj = { kind: 'object', props: Prop[] }
+// tslint:disable-next-line:array-type
+export type Element = Bool | Int | Float | String | Array | Obj
+
+export const bool: Bool = { kind: 'bool' };
+export const int: Int = { kind: 'int' };
+export const float: Float = { kind: 'float' };
+export const str: String = { kind: 'string' };
+// tslint:disable-next-line:array-type
+export function array(element: Element): Array { return { kind: 'array', element }; }
+export function obj<T>(schema: ((keyof T) | Element)[][]): Obj {
+    return {
+        kind: 'object',
+        props: schema.map(s => ({
+            element: s[1] as Element,
+            prop: s[0] as string
+        }))
+    };
+}
+
+function byteCount(e: Element, src: any) {
+    let size = 0;
+    switch (e.kind) {
+        case 'bool': size += 1; break;
+        case 'int': size += 4; break;
+        case 'float': size += 8; break;
+        case 'string': size += 4 + UTF8.utf8ByteCount(src); break;
+        case 'array': {
+            size += 4; // array length
+            for (const x of src) {
+                size += byteCount(e.element, x);
+            }
+            break;
+        }
+        case 'object': {
+            for (const p of e.props) {
+                size += byteCount(p.element, src[p.prop]);
+            }
+            break;
+        }
+    }
+    return size;
+}
+
+function writeElement(e: Element, buffer: Buffer, src: any, offset: number) {
+    switch (e.kind) {
+        case 'bool': buffer.writeInt8(src ? 1 : 0, offset); offset += 1; break;
+        case 'int': buffer.writeInt32LE(src | 0, offset); offset += 4; break;
+        case 'float': buffer.writeDoubleLE(+src, offset); offset += 8; break;
+        case 'string': {
+            const val = '' + src;
+            const size = UTF8.utf8ByteCount(val);
+            buffer.writeInt32LE(size, offset);
+            offset += 4; // str len
+            const str = new Uint8Array(size);
+            UTF8.utf8Write(str, 0, val);
+            for (const b of <number[]><any>str) {
+                buffer.writeUInt8(b, offset);
+                offset++;
+            }
+            break;
+        }
+        case 'array': {
+            buffer.writeInt32LE(src.length, offset);
+            offset += 4; // array length
+            for (const x of src) {
+                offset = writeElement(e.element, buffer, x, offset);
+            }
+            break;
+        }
+        case 'object': {
+            for (const p of e.props) {
+                offset = writeElement(p.element, buffer, src[p.prop], offset);
+            }
+            break;
+        }
+    }
+    return offset;
+}
+
+function write(element: Element, src: any) {
+    const size = byteCount(element, src);
+    const buffer = new Buffer(size);
+    writeElement(element, buffer, src, 0);
+    return buffer;
+}
+
+export function encode(element: Element, src: any): Buffer {
+    return write(element, src);
+}
+
+function decodeElement(e: Element, buffer: Buffer, offset: number, target: { value: any }) {
+    switch (e.kind) {
+        case 'bool': target.value = !!buffer.readInt8(offset); offset += 1; break;
+        case 'int': target.value = buffer.readInt32LE(offset); offset += 4; break;
+        case 'float': target.value = buffer.readDoubleLE(offset); offset += 8; break;
+        case 'string': {
+            const size = buffer.readInt32LE(offset);
+            offset += 4; // str len
+            const str = new Uint8Array(size);
+            for (let i = 0; i < size; i++) {
+                str[i] = buffer.readUInt8(offset);
+                offset++;
+            }
+            target.value = UTF8.utf8Read(str, 0, size);
+            break;
+        }
+        case 'array': {
+            const array: any[] = [];
+            const count = buffer.readInt32LE(offset);
+            const element = { value: void 0 };
+            offset += 4;
+            for (let i = 0; i < count; i++) {
+                offset = decodeElement(e.element, buffer, offset, element);
+                array.push(element.value);
+            }
+            target.value = array;
+            break;
+        }
+        case 'object': {
+            const t = Object.create(null);
+            const element = { value: void 0 };
+            for (const p of e.props) {
+                offset = decodeElement(p.element, buffer, offset, element);
+                t[p.prop] = element.value;
+            }
+            target.value = t;
+            break;
+        }
+    }
+    return offset;
+}
+
+export function decode<T>(element: Element, buffer: Buffer, offset?: number) {
+    const target = { value: void 0 as any };
+    decodeElement(element, buffer, offset! | 0, target);
+    return target.value as T;
+}

+ 134 - 0
src/servers/volume/common/data-format.ts

@@ -0,0 +1,134 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as File from './file'
+import * as Schema from './binary-schema'
+
+export type ValueType = 'float32' | 'int8' | 'int16'
+
+export namespace ValueType {
+    export const Float32: ValueType = 'float32';
+    export const Int8: ValueType = 'int8';
+    export const Int16: ValueType = 'int16';
+}
+
+export type ValueArray = Float32Array | Int8Array | Int16Array
+
+export interface Spacegroup {
+    number: number,
+    size: number[],
+    angles: number[],
+    /** Determine if the data should be treated as periodic or not. (e.g. X-ray = periodic, EM = not periodic) */
+    isPeriodic: boolean,
+}
+
+export interface ValuesInfo {
+    mean: number,
+    sigma: number,
+    min: number,
+    max: number
+}
+
+export interface Sampling {
+    byteOffset: number,
+
+    /** How many values along each axis were collapsed into 1 */
+    rate: number,
+    valuesInfo: ValuesInfo[],
+
+    /** Number of samples along each axis, in axisOrder  */
+    sampleCount: number[]
+}
+
+export interface Header {
+    /** Format version number  */
+    formatVersion: string,
+
+    /** Axis order from the slowest to fastest moving, same as in CCP4 */
+    axisOrder: number[],
+
+    /** Origin in fractional coordinates, in axisOrder */
+    origin: number[],
+
+    /** Dimensions in fractional coordinates, in axisOrder */
+    dimensions: number[],
+
+    spacegroup: Spacegroup,
+    channels: string[],
+
+    /** Determines the data type of the values */
+    valueType: ValueType,
+
+    /** The value are stored in blockSize^3 cubes */
+    blockSize: number,
+    sampling: Sampling[]
+}
+
+namespace _schema {
+    const { array, obj, int, bool, float, str } = Schema
+
+    export const schema = obj<Header>([
+        ['formatVersion', str],
+        ['axisOrder', array(int)],
+        ['origin', array(float)],
+        ['dimensions', array(float)],
+        ['spacegroup', obj<Spacegroup>([
+            ['number', int],
+            ['size', array(float)],
+            ['angles', array(float)],
+            ['isPeriodic', bool],
+        ])],
+        ['channels', array(str)],
+        ['valueType', str],
+        ['blockSize', int],
+        ['sampling', array(obj<Sampling>([
+            ['byteOffset', float],
+            ['rate', int],
+            ['valuesInfo', array(obj<ValuesInfo>([
+                ['mean', float],
+                ['sigma', float],
+                ['min', float],
+                ['max', float]
+            ]))],
+            ['sampleCount', array(int)]
+        ]))]
+    ]);
+}
+
+const headerSchema = _schema.schema;
+
+export function getValueByteSize(type: ValueType) {
+    if (type === ValueType.Float32) return 4;
+    if (type === ValueType.Int16) return 2;
+    return 1;
+}
+
+export function createValueArray(type: ValueType, size: number) {
+    switch (type) {
+        case ValueType.Float32: return new Float32Array(new ArrayBuffer(4 * size));
+        case ValueType.Int8: return new Int8Array(new ArrayBuffer(1 * size));
+        case ValueType.Int16: return new Int16Array(new ArrayBuffer(2 * size));
+    }
+    throw Error(`${type} is not a supported value format.`);
+}
+
+export function encodeHeader(header: Header) {
+    return Schema.encode(headerSchema, header);
+}
+
+export async function readHeader(file: number): Promise<{ header: Header, dataOffset: number }> {
+    let { buffer } = await File.readBuffer(file, 0, 4 * 4096);
+    const headerSize = buffer.readInt32LE(0);
+
+    if (headerSize > buffer.byteLength - 4) {
+        buffer = (await File.readBuffer(file, 0, headerSize + 4)).buffer;
+    }
+
+    const header = Schema.decode<Header>(headerSchema, buffer, 4);
+    return { header, dataOffset: headerSize + 4 };
+}

+ 171 - 0
src/servers/volume/common/file.ts

@@ -0,0 +1,171 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as fs from 'fs'
+import * as path from 'path'
+import * as DataFormat from './data-format'
+
+export const IsNativeEndianLittle = new Uint16Array(new Uint8Array([0x12, 0x34]).buffer)[0] === 0x3412;
+
+export async function openRead(filename: string) {
+    return new Promise<number>((res, rej) => {
+        fs.open(filename, 'r', async (err, file) => {
+            if (err) {
+                rej(err);
+                return;
+            }
+
+            try {
+                res(file);
+            } catch (e) {
+                fs.closeSync(file);
+            }
+        })
+    });
+}
+
+export function readBuffer(file: number, position: number, sizeOrBuffer: Buffer | number, size?: number, byteOffset?: number): Promise<{ bytesRead: number, buffer: Buffer }> {
+    return new Promise((res, rej) => {
+        if (typeof sizeOrBuffer === 'number') {
+            let buff = new Buffer(new ArrayBuffer(sizeOrBuffer));
+            fs.read(file, buff, 0, sizeOrBuffer, position, (err, bytesRead, buffer) => {
+                if (err) {
+                    rej(err);
+                    return;
+                }
+                res({ bytesRead, buffer });
+            });
+        } else {
+            if (size === void 0) {
+                rej('readBuffer: Specify size.');
+                return;
+            }
+
+            fs.read(file, sizeOrBuffer, byteOffset ? +byteOffset : 0, size, position, (err, bytesRead, buffer) => {
+                if (err) {
+                    rej(err);
+                    return;
+                }
+                res({ bytesRead, buffer });
+            });
+        }
+    })
+}
+
+export function writeBuffer(file: number, position: number, buffer: Buffer, size?: number): Promise<number> {
+    return new Promise<number>((res, rej) => {
+        fs.write(file, buffer, 0, size !== void 0 ? size : buffer.length, position, (err, written) => {
+            if (err) rej(err);
+            else res(written);
+        })
+    })
+}
+
+function makeDir(path: string, root?: string): boolean {
+    let dirs = path.split(/\/|\\/g),
+        dir = dirs.shift();
+
+    root = (root || '') + dir + '/';
+
+    try { fs.mkdirSync(root); }
+    catch (e) {
+        if (!fs.statSync(root).isDirectory()) throw new Error(e);
+    }
+
+    return !dirs.length || makeDir(dirs.join('/'), root);
+}
+
+export function exists(filename: string) {
+    return fs.existsSync(filename);
+}
+
+export function createFile(filename: string) {
+    return new Promise<number>((res, rej) => {
+        if (fs.existsSync(filename)) fs.unlinkSync(filename);
+        makeDir(path.dirname(filename));
+        fs.open(filename, 'w', (err, file) => {
+            if (err) rej(err);
+            else res(file);
+        })
+    });
+}
+
+const __emptyFunc = function () { };
+export function close(file: number | undefined) {
+    try {
+        if (file !== void 0) fs.close(file, __emptyFunc);
+    } catch (e) {
+
+    }
+}
+
+const smallBuffer = new Buffer(8);
+export async function writeInt(file: number, value: number, position: number) {
+    smallBuffer.writeInt32LE(value, 0);
+    await writeBuffer(file, position, smallBuffer, 4);
+}
+
+export interface TypedArrayBufferContext {
+    type: DataFormat.ValueType,
+    elementByteSize: number,
+    readBuffer: Buffer,
+    valuesBuffer: Uint8Array,
+    values: DataFormat.ValueArray
+}
+
+function getElementByteSize(type: DataFormat.ValueType) {
+    if (type === DataFormat.ValueType.Float32) return 4;
+    if (type === DataFormat.ValueType.Int16) return 2;
+    return 1;
+}
+
+function makeTypedArray(type: DataFormat.ValueType, buffer: ArrayBuffer): DataFormat.ValueArray {
+    if (type === DataFormat.ValueType.Float32) return new Float32Array(buffer);
+    if (type === DataFormat.ValueType.Int16) return new Int16Array(buffer);
+    return new Int8Array(buffer);
+}
+
+export function createTypedArrayBufferContext(size: number, type: DataFormat.ValueType): TypedArrayBufferContext {
+    let elementByteSize = getElementByteSize(type);
+    let arrayBuffer = new ArrayBuffer(elementByteSize * size);
+    let readBuffer = new Buffer(arrayBuffer);
+    let valuesBuffer = IsNativeEndianLittle ? arrayBuffer : new ArrayBuffer(elementByteSize * size);
+    return {
+        type,
+        elementByteSize,
+        readBuffer,
+        valuesBuffer: new Uint8Array(valuesBuffer),
+        values: makeTypedArray(type, valuesBuffer)
+    };
+}
+
+function flipByteOrder(source: Buffer, target: Uint8Array, byteCount: number, elementByteSize: number, offset: number) {
+    for (let i = 0, n = byteCount; i < n; i += elementByteSize) {
+        for (let j = 0; j < elementByteSize; j++) {
+            target[offset + i + elementByteSize - j - 1] = source[offset + i + j];
+        }
+    }
+}
+
+export async function readTypedArray(ctx: TypedArrayBufferContext, file: number, position: number, count: number, valueOffset: number, littleEndian?: boolean) {
+    let byteCount = ctx.elementByteSize * count;
+    let byteOffset = ctx.elementByteSize * valueOffset;
+
+    await readBuffer(file, position, ctx.readBuffer, byteCount, byteOffset);
+    if (ctx.elementByteSize > 1 && ((littleEndian !== void 0 && littleEndian !== IsNativeEndianLittle) || !IsNativeEndianLittle)) {
+        // fix the endian 
+        flipByteOrder(ctx.readBuffer, ctx.valuesBuffer, byteCount, ctx.elementByteSize, byteOffset);
+    }
+    return ctx.values;
+}
+
+export function ensureLittleEndian(source: Buffer, target: Buffer, byteCount: number, elementByteSize: number, offset: number) {
+    if (IsNativeEndianLittle) return;
+    if (!byteCount || elementByteSize <= 1) return;
+    flipByteOrder(source, target, byteCount, elementByteSize, offset);
+}

+ 75 - 0
src/servers/volume/local.ts

@@ -0,0 +1,75 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as LocalApi from './server/local-api'
+import VERSION from './server/version'
+
+import * as fs from 'fs'
+
+console.log(`VolumeServer ${VERSION}, (c) 2016 - now, David Sehnal`);
+console.log();
+
+function help() {
+    const exampleJobs: LocalApi.JobEntry[] = [{
+        source: {
+            filename: `g:/test/mdb/xray-1tqn.mdb`,
+            name: 'xray',
+            id: '1tqn',
+        },
+        query: {
+            kind: 'box',
+            space: 'cartesian',
+            bottomLeft: [-42.996, -64.169, -45.335],
+            topRight: [8.768, 15.316, 21.599]
+        },
+        params: {
+            forcedSamplingLevel: 2,
+            asBinary: true
+        },
+        outputFolder: 'g:/test/local-test'
+    }, {
+        source: {
+            filename: `g:/test/mdb/emd-8116.mdb`,
+            name: 'em',
+            id: '8116',
+        },
+        query: {
+            kind: 'cell'
+        },
+        params: {
+            detail: 4,
+            asBinary: true
+        },
+        outputFolder: 'g:/test/local-test'
+    }];
+
+    console.log('Usage: node local jobs.json');
+    console.log();
+    console.log('Example jobs.json:');
+    console.log(JSON.stringify(exampleJobs, null, 2));
+}
+
+async function run() {
+    if (process.argv.length !== 3) {
+        help();
+        return;
+    }
+
+    let jobs: LocalApi.JobEntry[];
+    try {
+        jobs = JSON.parse(fs.readFileSync(process.argv[2], 'utf-8'));
+    } catch (e) {
+        console.log('Error:');
+        console.error(e);
+        return;
+    }
+
+    await LocalApi.run(jobs);
+}
+
+run();

+ 86 - 0
src/servers/volume/pack.ts

@@ -0,0 +1,86 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import pack from './pack/main'
+import VERSION from './pack/version'
+
+let config = {
+    input: <{ name: string, filename: string }[]>[],
+    isPeriodic: false,
+    outputFilename: '',
+    blockSize: 96
+};
+
+function printHelp() {
+    let help = [
+        `VolumeServer Packer ${VERSION}, (c) 2016 - now, David Sehnal`,
+        ``,
+        `The input data must be CCP4/MAP mode 2 (32-bit floats) files.`,
+        ``,
+        `Usage: `,
+        ``,
+        `  node pack -v`,
+        `    Print version.`,
+        ``,
+        `  node pack -xray main.ccp4 diff.ccp4 output.mdb [-blockSize 96]`,
+        `    Pack main and diff density into a single block file.`,
+        `    Optionally specify maximum block size.`,
+        ``,
+        `  node pack -em density.map output.mdb [-blockSize 96]`,
+        `    Pack single density into a block file.`, 
+        `    Optionally specify maximum block size.`
+    ];
+    console.log(help.join('\n'));
+}
+
+function parseInput() {
+    let input = false;
+
+    if (process.argv.length <= 2) {
+        printHelp();
+        process.exit();
+        return false;
+    }
+
+    for (let i = 2; i < process.argv.length; i++) {
+        switch (process.argv[i].toLowerCase()) {
+            case '-blocksize':
+                config.blockSize = +process.argv[++i];
+                break;
+            case '-xray':
+                input = true;
+                config.input = [
+                    { name: '2Fo-Fc', filename: process.argv[++i] },
+                    { name: 'Fo-Fc', filename: process.argv[++i] }
+                ];
+                config.isPeriodic = true;
+                config.outputFilename = process.argv[++i];
+                break;
+            case '-em':
+                input = true;
+                config.input = [
+                    { name: 'em', filename: process.argv[++i] }
+                ];
+                config.outputFilename = process.argv[++i];
+                break;
+            case '-v':
+                console.log(VERSION);
+                process.exit();
+                return false;
+            default:
+                printHelp();
+                process.exit();
+                return false;
+        }
+    }
+    return input;
+}
+
+if (parseInput()) {
+    pack(config.input, config.blockSize, config.isPeriodic, config.outputFilename);
+}

+ 163 - 0
src/servers/volume/pack/ccp4.ts

@@ -0,0 +1,163 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as File from '../common/file'
+import * as DataFormat from '../common/data-format'
+
+export const enum Mode { Int8 = 0, Int16 = 1, Float32 = 2 }
+
+export interface Header {
+    name: string,
+    mode: Mode,
+    grid: number[], // grid is converted to the axis order!!
+    axisOrder: number[],
+    extent: number[],
+    origin: number[],
+    spacegroupNumber: number,
+    cellSize: number[],
+    cellAngles: number[],
+    littleEndian: boolean,
+    dataOffset: number
+}
+
+/** Represents a circular buffer for 2 * blockSize layers */
+export interface SliceBuffer {
+    buffer: File.TypedArrayBufferContext,
+    sliceCapacity: number,
+    slicesRead: number,
+
+    values: DataFormat.ValueArray,
+    sliceCount: number,
+
+    /** Have all the input slice been read? */
+    isFinished: boolean
+}
+
+export interface Data {
+    header: Header,
+    file: number,
+    slices: SliceBuffer
+}
+
+export function getValueType(header: Header) {
+    if (header.mode === Mode.Float32) return DataFormat.ValueType.Float32;
+    if (header.mode === Mode.Int16) return DataFormat.ValueType.Int16;
+    return DataFormat.ValueType.Int8;
+}
+
+export function assignSliceBuffer(data: Data, blockSize: number) {
+    const { extent } = data.header;
+    const valueType = getValueType(data.header);
+    const sliceSize = extent[0] * extent[1] * DataFormat.getValueByteSize(valueType);
+    const sliceCapacity = Math.max(1, Math.floor(Math.min(64 * 1024 * 1024, sliceSize * extent[2]) / sliceSize));
+    const buffer = File.createTypedArrayBufferContext(sliceCapacity * extent[0] * extent[1], valueType);
+    data.slices = {
+        buffer,
+        sliceCapacity,
+        slicesRead: 0,
+        values: buffer.values,
+        sliceCount: 0,
+        isFinished: false
+    };
+}
+
+function compareProp(a: any, b: any) {
+    if (a instanceof Array && b instanceof Array) {
+        if (a.length !== b.length) return false;
+        for (let i = 0; i < a.length; i++) {
+            if (a[i] !== b[i]) return false;
+        }
+        return true;
+    }
+    return a === b;
+}
+
+export function compareHeaders(a: Header, b: Header) {
+    for (const p of ['grid', 'axisOrder', 'extent', 'origin', 'spacegroupNumber', 'cellSize', 'cellAngles', 'mode']) {
+        if (!compareProp((a as any)[p], (b as any)[p])) return false;
+    }
+    return true;
+}
+
+function getArray(r: (offset: number) => number, offset: number, count: number) {
+    const ret: number[] = [];
+    for (let i = 0; i < count; i++) {
+        ret[i] = r(offset + i);
+    }
+    return ret;
+}
+
+async function readHeader(name: string, file: number) {
+    const headerSize = 1024;
+    const { buffer: data } = await File.readBuffer(file, 0, headerSize);
+
+    let littleEndian = true;
+
+    let mode = data.readInt32LE(3 * 4);
+    if (mode < 0 || mode > 2) {
+        littleEndian = false;
+        mode = data.readInt32BE(3 * 4, true);
+        if (mode < 0 || mode > 2) {
+            throw Error('Only CCP4 modes 0, 1, and 2 are supported.');
+        }
+    }
+
+    const readInt = littleEndian ? (o: number) => data.readInt32LE(o * 4) : (o: number) => data.readInt32BE(o * 4);
+    const readFloat = littleEndian ? (o: number) => data.readFloatLE(o * 4) : (o: number) => data.readFloatBE(o * 4);
+
+    const origin2k = getArray(readFloat, 49, 3);
+    const nxyzStart = getArray(readInt, 4, 3);
+    const header: Header = {
+        name,
+        mode,
+        grid: getArray(readInt, 7, 3),
+        axisOrder: getArray(readInt, 16, 3).map(i => i - 1),
+        extent: getArray(readInt, 0, 3),
+        origin: origin2k[0] === 0.0 && origin2k[1] === 0.0 && origin2k[2] === 0.0 ? nxyzStart : origin2k,
+        spacegroupNumber: readInt(22),
+        cellSize: getArray(readFloat, 10, 3),
+        cellAngles: getArray(readFloat, 13, 3),
+        // mean: readFloat(21),
+        littleEndian,
+        dataOffset: headerSize + readInt(23) /* symBytes */
+    };
+    // "normalize" the grid axis order
+    header.grid = [header.grid[header.axisOrder[0]], header.grid[header.axisOrder[1]], header.grid[header.axisOrder[2]]];
+    return header;
+}
+
+export async function readSlices(data: Data) {
+    const { slices, header } = data;
+    if (slices.isFinished) {
+        return;
+    }
+
+    const { extent } = header;
+    const sliceSize = extent[0] * extent[1];
+    const sliceByteOffset = slices.buffer.elementByteSize * sliceSize * slices.slicesRead;
+    const sliceCount = Math.min(slices.sliceCapacity, extent[2] - slices.slicesRead);
+    const sliceByteCount = sliceCount * sliceSize;
+
+    await File.readTypedArray(slices.buffer, data.file, header.dataOffset + sliceByteOffset, sliceByteCount, 0, header.littleEndian);
+    slices.slicesRead += sliceCount;
+    slices.sliceCount = sliceCount;
+
+    if (slices.slicesRead >= extent[2]) {
+        slices.isFinished = true;
+    }
+}
+
+export async function open(name: string, filename: string): Promise<Data> {
+    const file = await File.openRead(filename);
+    const header = await readHeader(name, file);
+    return {
+        header,
+        file,
+        slices: void 0 as any
+    };
+}

+ 129 - 0
src/servers/volume/pack/data-model.ts

@@ -0,0 +1,129 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+import * as CCP4 from './ccp4'
+import * as DataFormat from '../common/data-format'
+
+const FORMAT_VERSION = '1.0.0';
+
+export interface Progress {
+    current: number,
+    max: number
+}
+
+export interface ValuesInfo {
+    sum: number,
+    sqSum: number,
+    min: number,
+    max: number
+}
+
+export interface BlockBuffer {
+    values: DataFormat.ValueArray[],
+    buffers: Buffer[],
+    slicesWritten: number
+}
+
+export interface DownsamplingBuffer {
+    /** dimensions (sampleCount[1], sampleCount[0] / 2, 1), axis order (K, H, L) */
+    downsampleH: DataFormat.ValueArray,
+    /** "Cyclic" (in the 1st dimensions) buffer with dimensions (5, sampleCount[0] / 2, sampleCount[1] / 2), axis order (L, H, K),  */
+    downsampleHK: DataFormat.ValueArray,
+
+    slicesWritten: number,
+    startSliceIndex: number
+}
+
+export interface Sampling {
+    /** How many values along each axis are collapsed into 1 */
+    rate: number,
+
+    sampleCount: number[],
+
+    /** One per channel, same indexing */
+    blocks: BlockBuffer,
+    valuesInfo: ValuesInfo[],
+    downsampling?: DownsamplingBuffer[],
+
+    /** Info about location in the output file, 0 offset is where the header ends */
+    byteOffset: number,
+    byteSize: number,
+    /** where to write the next block relative to the byteoffset */
+    writeByteOffset: number
+}
+
+/** Kernel used for downsampling */
+export interface Kernel {
+    /** The kernel size is curently fixed at 5 */
+    size: 5,
+    /** Compute new sample as c[0] * data[i - 2] + ... + c[4] * data[i + 2] */
+    coefficients: number[],
+    /** Precomputed coefficients.sum() */
+    coefficientSum: number
+}
+
+export interface Context {
+    /** Output file handle  */
+    file: number,
+
+    /** Periodic are x-ray density files that cover the entire grid and have [0,0,0] origin */
+    isPeriodic: boolean,
+
+    channels: CCP4.Data[],
+    valueType: DataFormat.ValueType,
+    blockSize: number,
+    /** Able to store channels.length * blockSize^3 values. */
+    cubeBuffer: Buffer,
+    /** All values are stored in little endian format which might not be the native endian of the system  */
+    litteEndianCubeBuffer: Buffer,
+
+    kernel: Kernel,
+    sampling: Sampling[],
+    dataByteOffset: number,
+    totalByteSize: number,
+
+    progress: Progress
+}
+
+export function createHeader(ctx: Context): DataFormat.Header {
+    const header = ctx.channels[0].header;
+    const grid = header.grid;
+
+    function normalize(data: number[]) {
+        return [data[0] / grid[0], data[1] / grid[1], data[2] / grid[2]];
+    }
+
+    return {
+        formatVersion: FORMAT_VERSION,
+        valueType: CCP4.getValueType(header),
+        blockSize: ctx.blockSize,
+        axisOrder: header.axisOrder,
+        origin: normalize(header.origin),
+        dimensions: normalize(header.extent),
+        spacegroup: { number: header.spacegroupNumber, size: header.cellSize, angles: header.cellAngles, isPeriodic: ctx.isPeriodic },
+        channels: ctx.channels.map(c => c.header.name),
+        sampling: ctx.sampling.map(s => {
+            const N = s.sampleCount[0] * s.sampleCount[1] * s.sampleCount[2];
+            const valuesInfo = [];
+            for (const { sum, sqSum, min, max } of s.valuesInfo) {
+                const mean = sum / N;
+                const sigma = Math.sqrt(Math.max(0, sqSum / N - mean * mean));
+                valuesInfo.push({ mean, sigma, min, max });
+            }
+            return {
+                byteOffset: s.byteOffset,
+                rate: s.rate,
+                valuesInfo,
+                sampleCount: s.sampleCount,
+            }
+        })
+    };
+}
+
+export function samplingBlockCount(sampling: Sampling, blockSize: number) {
+    return sampling.sampleCount.map(c => Math.ceil(c / blockSize)).reduce((c, v) => c * v, 1);
+}

+ 175 - 0
src/servers/volume/pack/downsampling.ts

@@ -0,0 +1,175 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as Data from './data-model'
+import * as DataFormat from '../common/data-format'
+
+/** 
+ * Downsamples each slice of input data and checks if there is enough data to perform 
+ * higher rate downsampling.
+ */
+export function downsampleLayer(ctx: Data.Context) {
+    for (let i = 0, _ii = ctx.sampling.length - 1; i < _ii; i++) {
+        const s = ctx.sampling[i];
+        downsampleSlice(ctx, s);
+        if (canDownsampleBuffer(s, false)) {
+            downsampleBuffer(ctx.kernel, s, ctx.sampling[i + 1], ctx.blockSize);
+        } else {
+            break;
+        }
+    }
+}
+
+/** 
+ * When the "native" (rate = 1) sampling is finished, there might still 
+ * be some data left to be processed for the higher rate samplings.
+ */
+export function finalize(ctx: Data.Context) {
+    for (let i = 0, _ii = ctx.sampling.length - 1; i < _ii; i++) {
+        const s = ctx.sampling[i];
+        // skip downsampling the 1st slice because that is guaranteed to be done in "downsampleLayer"
+        if (i > 0) downsampleSlice(ctx, s);
+        // this is different from downsample layer in that it does not need 2 extra slices but just 1 is enough.
+        if (canDownsampleBuffer(s, true)) {
+            downsampleBuffer(ctx.kernel, s, ctx.sampling[i + 1], ctx.blockSize);
+        } else {
+            break;
+        }
+    }
+}
+
+/**
+ * The functions downsampleH and downsampleHK both essentially do the
+ * same thing: downsample along H (1st axis in axis order) and K (2nd axis in axis order) axes respectively.
+ * 
+ * The reason there are two copies of almost the same code is performance:
+ * Both functions use a different memory layout to improve cache coherency
+ *  - downsampleU uses the H axis as the fastest moving one
+ *  - downsampleUV uses the K axis as the fastest moving one
+ */
+
+
+function conv(w: number, c: number[], src: DataFormat.ValueArray, b: number, i0: number, i1: number, i2: number, i3: number, i4: number) {
+    return w * (c[0] * src[b + i0] + c[1] * src[b + i1] + c[2] * src[b + i2] + c[3] * src[b + i3] + c[4] * src[b + i4]);
+}
+
+/**
+ * Map from L-th slice in src to an array of dimensions (srcDims[1], (srcDims[0] / 2), 1),
+ * flipping the 1st and 2nd axis in the process to optimize cache coherency for downsampleUV call
+ * (i.e. use (K, H, L) axis order).
+ */
+function downsampleH(kernel: Data.Kernel, srcDims: number[], src: DataFormat.ValueArray, srcLOffset: number, buffer: Data.DownsamplingBuffer) {
+    const target = buffer.downsampleH;
+    const sizeH = srcDims[0], sizeK = srcDims[1], srcBaseOffset = srcLOffset * sizeH * sizeK;
+    const targetH = Math.floor((sizeH + 1) / 2);
+    const isEven = sizeH % 2 === 0;
+    const w = 1.0 / kernel.coefficientSum;
+    const c = kernel.coefficients;
+
+    for (let k = 0; k < sizeK; k++) {
+        let srcOffset = srcBaseOffset + k * sizeH;
+        let targetOffset = k;
+        target[targetOffset] = conv(w, c, src, srcOffset, 0, 0, 0, 1, 2);
+        for (let h = 1; h < targetH - 1; h++) {
+            srcOffset += 2;
+            targetOffset += sizeK;
+            target[targetOffset] = conv(w, c, src, srcOffset, -2, -1, 0, 1, 2);
+        }
+        srcOffset += 2;
+        targetOffset += sizeK;
+        if (isEven) target[targetOffset] = conv(w, c, src, srcOffset, -2, -1, 0, 1, 1);
+        else target[targetOffset] = conv(w, c, src, srcOffset, -2, -1, 0, 0, 0);
+    }
+}
+
+/** 
+ * Downsample first axis in the slice present in buffer.downsampleH 
+ * The result is written into the "cyclical" downsampleHk buffer 
+ * in the (L, H, K) axis order.
+ */
+function downsampleHK(kernel: Data.Kernel, dimsX: number[], buffer: Data.DownsamplingBuffer) {
+    const { downsampleH: src, downsampleHK: target, slicesWritten } = buffer;
+
+    const kernelSize = kernel.size;
+    const sizeH = dimsX[0], sizeK = dimsX[1];
+    const targetH = Math.floor((sizeH + 1) / 2);
+    const isEven = sizeH % 2 === 0;
+    const targetSliceSize = kernelSize * sizeK;
+    const targetBaseOffset = slicesWritten % kernelSize;
+    const w = 1.0 / kernel.coefficientSum;
+    const c = kernel.coefficients;
+
+    for (let k = 0; k < sizeK; k++) {
+        let sourceOffset = k * sizeH;
+        let targetOffset = targetBaseOffset + k * kernelSize;
+        target[targetOffset] = conv(w, c, src, sourceOffset, 0, 0, 0, 1, 2);
+        for (let h = 1; h < targetH - 1; h++) {
+            sourceOffset += 2; targetOffset += targetSliceSize;
+            target[targetOffset] = conv(w, c, src, sourceOffset, -2, -1, 0, 1, 2);
+        }
+        sourceOffset += 2; targetOffset += targetSliceSize;
+        if (isEven) target[targetOffset] = conv(w, c, src, sourceOffset, -2, -1, 0, 1, 1);
+        else target[targetOffset] = conv(w, c, src, sourceOffset, -2, -1, 0, 0, 0);
+    }
+    buffer.slicesWritten++;
+}
+
+/** Calls downsampleH and downsampleHk for each input channel separately. */
+function downsampleSlice(ctx: Data.Context, sampling: Data.Sampling) {
+    const dimsU = [sampling.sampleCount[1], Math.floor((sampling.sampleCount[0] + 1) / 2)]
+    for (let i = 0, _ii = sampling.blocks.values.length; i < _ii; i++) {
+        downsampleH(ctx.kernel, sampling.sampleCount, sampling.blocks.values[i], sampling.blocks.slicesWritten - 1, sampling.downsampling![i]);
+        downsampleHK(ctx.kernel, dimsU, sampling.downsampling![i]);
+    }
+}
+
+/** Determine if a buffer has enough data to be downsampled */
+function canDownsampleBuffer(source: Data.Sampling, finishing: boolean): boolean {
+    const buffer = source.downsampling![0];
+    const delta = buffer.slicesWritten - buffer.startSliceIndex;
+    return (finishing && delta > 0) || (delta > 2 && (delta - 3) % 2 === 0);
+}
+
+/** Downsample data in the buffer */
+function downsampleBuffer(kernel: Data.Kernel, source: Data.Sampling, target: Data.Sampling, blockSize: number) {
+    const downsampling = source.downsampling!;
+    const { slicesWritten, startSliceIndex } = downsampling[0];
+    const sizeH = target.sampleCount[0], sizeK = target.sampleCount[1], sizeHK = sizeH * sizeK;
+
+    const kernelSize = kernel.size;
+    const w = 1.0 / kernel.coefficientSum;
+    const c = kernel.coefficients;
+
+    // Indices to the 1st dimeninsion in the cyclic buffer.
+    const i0 = Math.max(0, startSliceIndex - 2) % kernelSize;
+    const i1 = Math.max(0, startSliceIndex - 1) % kernelSize;
+    const i2 = startSliceIndex % kernelSize;
+    const i3 = Math.min(slicesWritten, startSliceIndex + 1) % kernelSize;
+    const i4 = Math.min(slicesWritten, startSliceIndex + 2) % kernelSize;
+
+    const channelCount = downsampling.length;
+    const valuesBaseOffset = target.blocks.slicesWritten * sizeHK;
+
+    for (let channelIndex = 0; channelIndex < channelCount; channelIndex++) {
+        const src = downsampling[channelIndex].downsampleHK;
+        const values = target.blocks.values[channelIndex];
+
+        for (let k = 0; k < sizeK; k++) {
+            const valuesOffset = valuesBaseOffset + k * sizeH;
+            for (let h = 0; h < sizeH; h++) {
+                const sO = kernelSize * h + kernelSize * k * sizeH;
+                const s = conv(w, c, src, sO, i0, i1, i2, i3, i4);
+                values[valuesOffset + h] = s;
+            }
+        }
+        // we have "consume" two layers of the buffer.
+        downsampling[channelIndex].startSliceIndex += 2;
+    }
+
+    target.blocks.slicesWritten++;
+}

+ 133 - 0
src/servers/volume/pack/main.ts

@@ -0,0 +1,133 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as CCP4 from './ccp4'
+import * as File from '../common/file'
+import * as Data from './data-model'
+import * as Sampling from './sampling'
+import * as DataFormat from '../common/data-format'
+import * as fs from 'fs'
+
+export default async function pack(input: { name: string, filename: string }[], blockSize: number, isPeriodic: boolean, outputFilename: string) {
+    try {
+        await create(outputFilename, input, blockSize, isPeriodic);
+    } catch (e) {
+        console.error('[Error] ' + e);
+    }
+}
+
+function getTime() {
+    let t = process.hrtime();
+    return t[0] * 1000 + t[1] / 1000000;
+}
+
+function updateAllocationProgress(progress: Data.Progress, progressDone: number) {
+    let old = (100 * progress.current / progress.max).toFixed(0);
+    progress.current += progressDone;
+    let $new = (100 * progress.current / progress.max).toFixed(0);
+    if (old !== $new) {
+        process.stdout.write(`\rAllocating...      ${$new}%`);
+    }
+}
+
+/**
+ * Pre allocate the disk space to be able to do "random" writes into the entire file. 
+ */
+async function allocateFile(ctx: Data.Context) {
+    const { totalByteSize, file } = ctx;
+    const buffer = new Buffer(Math.min(totalByteSize, 8 * 1024 * 1024));
+    const progress: Data.Progress = { current: 0, max: Math.ceil(totalByteSize / buffer.byteLength) };
+    let written = 0;
+    while (written < totalByteSize) {
+        written += fs.writeSync(file, buffer, 0, Math.min(totalByteSize - written, buffer.byteLength));
+        updateAllocationProgress(progress, 1);
+    }
+}
+
+function determineBlockSize(data: CCP4.Data, blockSize: number) {
+    const { extent } = data.header;
+    const maxLayerSize = 1024 * 1024 * 1024;
+    const valueCount = extent[0] * extent[1];
+    if (valueCount * blockSize <= maxLayerSize) return blockSize;
+
+    while (blockSize > 0) {
+        blockSize -= 4;
+        if (valueCount * blockSize <= maxLayerSize) return blockSize;
+    }
+
+    throw new Error('Could not determine a valid block size.');
+}
+
+async function writeHeader(ctx: Data.Context) {
+    const header = DataFormat.encodeHeader(Data.createHeader(ctx));
+    await File.writeInt(ctx.file, header.byteLength, 0);
+    await File.writeBuffer(ctx.file, 4, header);
+}
+
+async function create(filename: string, sourceDensities: { name: string, filename: string }[], sourceBlockSize: number, isPeriodic: boolean) {
+    const startedTime = getTime();
+
+    if (sourceBlockSize % 4 !== 0 || sourceBlockSize < 4) {
+        throw Error('Block size must be a positive number divisible by 4.');
+    }
+
+    if (!sourceDensities.length) {
+        throw Error('Specify at least one source density.');
+    }
+
+    process.stdout.write('Initializing... ');
+    const files: number[] = [];
+    try {
+        // Step 1a: Read the CCP4 headers
+        const channels: CCP4.Data[] = [];
+        for (const s of sourceDensities) channels.push(await CCP4.open(s.name, s.filename));
+        // Step 1b: Check if the CCP4 headers are compatible.
+        const isOk = channels.reduce((ok, s) => ok && CCP4.compareHeaders(channels[0].header, s.header), true);
+        if (!isOk) {
+            throw new Error('Input file headers are not compatible (different grid, etc.).');
+        }
+        const blockSize = determineBlockSize(channels[0], sourceBlockSize);
+        for (const ch of channels) CCP4.assignSliceBuffer(ch, blockSize);
+
+        // Step 1c: Create data context.
+        const context = await Sampling.createContext(filename, channels, blockSize, isPeriodic);
+        for (const s of channels) files.push(s.file);
+        files.push(context.file);
+        process.stdout.write('   done.\n');
+
+        console.log(`Block size: ${blockSize}`);
+
+        // Step 2: Allocate disk space.        
+        process.stdout.write('Allocating...      0%');
+        await allocateFile(context);
+        process.stdout.write('\rAllocating...      done.\n');
+
+        // Step 3: Process and write the data 
+        process.stdout.write('Writing data...    0%');
+        await Sampling.processData(context);
+        process.stdout.write('\rWriting data...    done.\n');
+
+        // Step 4: Write the header at the start of the file.
+        // The header is written last because the sigma/min/max values are computed 
+        // during step 3.
+        process.stdout.write('Writing header...  ');
+        await writeHeader(context);
+        process.stdout.write('done.\n');
+
+        // Step 5: Report the time, d'ph.
+        const time = getTime() - startedTime;
+        console.log(`[Done] ${time.toFixed(0)}ms.`);
+    } finally {
+        for (let f of files) File.close(f);
+
+        // const ff = await File.openRead(filename);
+        // const hh = await DataFormat.readHeader(ff);
+        // File.close(ff);
+        // console.log(hh.header);
+    }
+} 

+ 213 - 0
src/servers/volume/pack/sampling.ts

@@ -0,0 +1,213 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as CCP4 from './ccp4'
+import * as Data from './data-model'
+import * as File from '../common/file'
+import * as Downsampling from './downsampling'
+import * as Writer from './writer'
+import * as DataFormat from '../common/data-format'
+
+export async function createContext(filename: string, channels: CCP4.Data[], blockSize: number, isPeriodic: boolean): Promise<Data.Context> {
+    const header = channels[0].header;
+    const samplingCounts = getSamplingCounts(channels[0].header.extent, blockSize);
+    const valueType = CCP4.getValueType(header);
+    const cubeBuffer = new Buffer(new ArrayBuffer(channels.length * blockSize * blockSize * blockSize * DataFormat.getValueByteSize(valueType)));
+
+    const litteEndianCubeBuffer = File.IsNativeEndianLittle
+        ? cubeBuffer
+        : new Buffer(new ArrayBuffer(channels.length * blockSize * blockSize * blockSize * DataFormat.getValueByteSize(valueType)));
+
+    // The data can be periodic iff the extent is the same as the grid and origin is 0.
+    if (header.grid.some((v, i) => v !== header.extent[i]) || header.origin.some(v => v !== 0)) {
+        isPeriodic = false;
+    }
+
+    const ctx: Data.Context = {
+        file: await File.createFile(filename),
+        isPeriodic,
+        channels,
+        valueType,
+        blockSize,
+        cubeBuffer,
+        litteEndianCubeBuffer,
+        kernel: { size: 5, coefficients: [1, 4, 6, 4, 1], coefficientSum: 16 },
+        sampling: samplingCounts.map((__, i) => createSampling(i, valueType, channels.length, samplingCounts, blockSize)),
+        dataByteOffset: 0,
+        totalByteSize: 0,
+        progress: { current: 0, max: 0 }
+    };
+
+
+    let byteOffset = 0;
+    for (const s of ctx.sampling) {
+        // Max progress = total number of blocks that need to be written.
+        ctx.progress.max += Data.samplingBlockCount(s, blockSize);
+        s.byteOffset = byteOffset;
+        byteOffset += s.byteSize;
+    }
+
+    ctx.dataByteOffset = 4 + DataFormat.encodeHeader(Data.createHeader(ctx)).byteLength;
+    ctx.totalByteSize = ctx.dataByteOffset + byteOffset;
+
+    return ctx;
+}
+
+export async function processData(ctx: Data.Context) {
+    const channel = ctx.channels[0];
+    while (!channel.slices.isFinished) {
+        for (const src of ctx.channels) {
+            await CCP4.readSlices(src);
+        }
+        await processSlices(ctx);
+    }
+}
+
+/** Determine the suitable sampling rates for the input data */
+function getSamplingCounts(baseSampleCount: number[], blockSize: number) {
+    const ret = [baseSampleCount];
+    let prev = baseSampleCount;
+    let hasSingleBoxSampling = false;
+    while (true) {
+        let next = [0, 0, 0];
+        let max = 0;
+        for (let i = 0; i < 3; i++) {
+            const s = Math.floor((prev[i] + 1) / 2);
+            if (s < 2) return ret;
+            if (s > max) max = s;
+            next[i] = s;
+        }
+        // no point in downsampling below the block size.
+        if (max < blockSize) {
+            if (hasSingleBoxSampling) return ret;
+            hasSingleBoxSampling = true;
+        }
+        ret.push(next);
+        prev = next;
+    }
+}
+
+function createBlockBuffer(sampleCount: number[], blockSize: number, valueType: DataFormat.ValueType, numChannels: number): Data.BlockBuffer {
+    const values = [];
+    for (let i = 0; i < numChannels; i++) values[i] = DataFormat.createValueArray(valueType, sampleCount[0] * sampleCount[1] * blockSize);
+    return {
+        values,
+        buffers: values.map(xs => new Buffer(xs.buffer)),
+        slicesWritten: 0
+    };
+}
+
+function createDownsamplingBuffer(valueType: DataFormat.ValueType, sourceSampleCount: number[], targetSampleCount: number[], numChannels: number): Data.DownsamplingBuffer[] {
+    const ret = [];
+    for (let i = 0; i < numChannels; i++) {
+        ret[ret.length] = {
+            downsampleH: DataFormat.createValueArray(valueType, sourceSampleCount[1] * targetSampleCount[0]),
+            downsampleHK: DataFormat.createValueArray(valueType, 5 * targetSampleCount[0] * targetSampleCount[1]),
+            slicesWritten: 0,
+            startSliceIndex: 0
+        }
+    }
+    return ret;
+}
+
+function createSampling(index: number, valueType: DataFormat.ValueType, numChannels: number, sampleCounts: number[][], blockSize: number): Data.Sampling {
+    const sampleCount = sampleCounts[index];
+    const valuesInfo: Data.ValuesInfo[] = [];
+    for (let i = 0; i < numChannels; i++) {
+        valuesInfo[valuesInfo.length] = {
+            sum: 0.0,
+            sqSum: 0.0,
+            max: Number.NEGATIVE_INFINITY,
+            min: Number.POSITIVE_INFINITY
+        }
+    }
+    return {
+        rate: 1 << index,
+        sampleCount,
+        blocks: createBlockBuffer(sampleCount, blockSize, valueType, numChannels),
+        valuesInfo,
+        downsampling: index < sampleCounts.length - 1 ? createDownsamplingBuffer(valueType, sampleCount, sampleCounts[index + 1], numChannels) : void 0,
+
+        byteOffset: 0,
+        byteSize: numChannels * sampleCount[0] * sampleCount[1] * sampleCount[2] * DataFormat.getValueByteSize(valueType),
+        writeByteOffset: 0
+    }
+}
+
+function copyLayer(ctx: Data.Context, sliceIndex: number) {
+    const { channels } = ctx;
+    const { blocks, sampleCount } = ctx.sampling[0];
+
+    const size = sampleCount[0] * sampleCount[1];
+    const srcOffset = sliceIndex * size;
+    const targetOffset = blocks.slicesWritten * size;
+
+    for (let channelIndex = 0; channelIndex < channels.length; channelIndex++) {
+        const src = channels[channelIndex].slices.values;
+        const target = blocks.values[channelIndex];
+        for (let i = 0; i < size; i++) {
+            const v = src[srcOffset + i];
+            target[targetOffset + i] = v;
+        }
+    }
+
+    blocks.slicesWritten++;
+}
+
+function updateValuesInfo(sampling: Data.Sampling) {
+    const { blocks, sampleCount } = sampling;
+    const size = blocks.slicesWritten * sampleCount[0] * sampleCount[1];
+
+    for (let channelIndex = 0; channelIndex < blocks.values.length; channelIndex++) {
+        const values = blocks.values[channelIndex];
+        const valuesInfo = sampling.valuesInfo[channelIndex];
+        let { sum, sqSum, max, min } = valuesInfo;
+        for (let i = 0; i < size; i++) {
+            const v = values[i];
+            sum += v;
+            sqSum += v * v;
+            if (v > max) max = v;
+            else if (v < min) min = v;
+        }
+        valuesInfo.sum = sum;
+        valuesInfo.sqSum = sqSum;
+        valuesInfo.max = max;
+        valuesInfo.min = min;
+    }
+}
+
+function shouldSamplingBeWritten(sampling: Data.Sampling, blockSize: number, isDataFinished: boolean) {
+    if (isDataFinished) return sampling.blocks.slicesWritten > 0;
+    return sampling.blocks.slicesWritten >= blockSize;
+}
+
+async function writeBlocks(ctx: Data.Context, isDataFinished: boolean) {
+    for (const s of ctx.sampling) {
+        if (shouldSamplingBeWritten(s, ctx.blockSize, isDataFinished)) {
+            updateValuesInfo(s);
+            await Writer.writeBlockLayer(ctx, s);
+        }
+    }
+}
+
+async function processSlices(ctx: Data.Context) {
+    const channel = ctx.channels[0];
+    const sliceCount = channel.slices.sliceCount;
+    for (let i = 0; i < sliceCount; i++) {
+        copyLayer(ctx, i);
+        Downsampling.downsampleLayer(ctx);
+
+        await writeBlocks(ctx, false);
+
+        const isDataFinished = i === sliceCount - 1 && channel.slices.isFinished;
+        if (isDataFinished) {
+            Downsampling.finalize(ctx);
+            await writeBlocks(ctx, true);
+        }
+    }
+}

+ 1 - 0
src/servers/volume/pack/version.ts

@@ -0,0 +1 @@
+export default '0.9.2'

+ 66 - 0
src/servers/volume/pack/writer.ts

@@ -0,0 +1,66 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as Data from './data-model'
+import * as File from '../common/file'
+import * as DataFormat from '../common/data-format'
+
+/** Converts a layer to blocks and writes them to the output file. */
+export async function writeBlockLayer(ctx: Data.Context, sampling: Data.Sampling) {
+    const nU = Math.ceil(sampling.sampleCount[0] / ctx.blockSize);
+    const nV = Math.ceil(sampling.sampleCount[1] / ctx.blockSize);
+    const startOffset = ctx.dataByteOffset + sampling.byteOffset;
+
+    for (let v = 0; v < nV; v++) {
+        for (let u = 0; u < nU; u++) {
+            const size = fillCubeBuffer(ctx, sampling, u, v);
+            await File.writeBuffer(ctx.file, startOffset + sampling.writeByteOffset, ctx.litteEndianCubeBuffer, size);
+            sampling.writeByteOffset += size;
+            updateProgress(ctx.progress, 1);
+        }
+    }
+    sampling.blocks.slicesWritten = 0;
+}
+
+/** Fill a cube at position (u,v) with values from each of the channel */
+function fillCubeBuffer(ctx: Data.Context, sampling: Data.Sampling, u: number, v: number): number {
+    const { blockSize, cubeBuffer } = ctx;
+    const { sampleCount } = sampling;
+    const { buffers, slicesWritten } = sampling.blocks;
+    const elementSize = DataFormat.getValueByteSize(ctx.valueType);
+    const sizeH = sampleCount[0], sizeHK = sampleCount[0] * sampleCount[1];
+    const offsetH = u * blockSize,
+        offsetK = v * blockSize;
+    const copyH = Math.min(blockSize, sampleCount[0] - offsetH) * elementSize,
+        maxK = offsetK + Math.min(blockSize, sampleCount[1] - offsetK),
+        maxL = slicesWritten;
+
+    let writeOffset = 0;
+    for (const src of buffers) {
+        for (let l = 0; l < maxL; l++) {
+            for (let k = offsetK; k < maxK; k++) {
+                // copying the bytes direct is faster than using buffer.write* functions.
+                const start = (l * sizeHK + k * sizeH + offsetH) * elementSize;
+                src.copy(cubeBuffer, writeOffset, start, start + copyH);
+                writeOffset += copyH;
+            }
+        }
+    }
+    // flip the byte order if needed.
+    File.ensureLittleEndian(ctx.cubeBuffer, ctx.litteEndianCubeBuffer, writeOffset, elementSize, 0);
+    return writeOffset;
+}
+
+function updateProgress(progress: Data.Progress, progressDone: number) {
+    let old = (100 * progress.current / progress.max).toFixed(0);
+    progress.current += progressDone;
+    let $new = (100 * progress.current / progress.max).toFixed(0);
+    if (old !== $new) {
+        process.stdout.write(`\rWriting data...    ${$new}%`);
+    }
+}

+ 77 - 0
src/servers/volume/server-config.ts

@@ -0,0 +1,77 @@
+
+const Config = {
+    limits: {
+        /**
+         * Maximum number of blocks that could be read in 1 query.
+         * This is somewhat tied to the maxOutputSizeInVoxelCountByPrecisionLevel
+         * in that the <maximum number of voxel> = maxRequestBlockCount * <block size>^3.
+         * The default block size is 96 which corresponds to 28,311,552 voxels with 32 max blocks.
+         */
+        maxRequestBlockCount: 32,
+
+        /**
+         * The maximum fractional volume of the query box (to prevent queries that are too big).
+         */
+        maxFractionalBoxVolume: 1024,
+
+        /**
+         * What is the (approximate) maximum desired size in voxel count by precision level
+         * Rule of thumb: <response gzipped size> \in [<voxel count> / 8, <voxel count> / 4];
+         *
+         * The maximum number of voxels is tied to maxRequestBlockCount.
+         */
+        maxOutputSizeInVoxelCountByPrecisionLevel: [
+            0.5 * 1024 * 1024, // ~ 80*80*80
+            1 * 1024 * 1024,
+            2 * 1024 * 1024,
+            4 * 1024 * 1024,
+            8 * 1024 * 1024,
+            16 * 1024 * 1024, // ~ 256*256*256
+            24 * 1024 * 1024
+        ]
+    },
+
+    /**
+     * Specify the prefix of the API, i.e.
+     * <host>/<apiPrefix>/<API queries>
+     */
+    apiPrefix: '/VolumeServer',
+
+    /**
+     * If not specified otherwise by the 'port' environment variable, use this port.
+     */
+    defaultPort: 1337,
+
+    /**
+     * Node (V8) sometimes exhibits GC related issues  that significantly slow down the execution
+     * (https://github.com/nodejs/node/issues/8670).
+     * 
+     * Therefore an option is provided that automatically shuts down the server.
+     * For this to work, the server must be run using a deamon (i.e. forever.js on Linux
+     * or IISnode on Windows) so that the server is automatically restarted when the shutdown happens.
+     */
+    shutdownParams: {
+        // 0 for off, server will shut down after this amount of minutes.
+        timeoutMinutes: 24 * 60 /* a day */,
+        // modifies the shutdown timer by +/- timeoutVarianceMinutes (to avoid multiple instances shutting at the same time)
+        timeoutVarianceMinutes: 60
+    },
+
+    /**
+     * Maps a request identifier to a filename.
+     * 
+     * @param source 
+     *   Source of the data.
+     * @param id
+     *   Id provided in the request. For xray, PDB id, for emd, EMDB id number. 
+     */
+    mapFile(source: string, id: string) {
+        switch (source.toLowerCase()) {
+            case 'x-ray': return `g:/test/mdb/xray-${id.toLowerCase()}.mdb`;
+            case 'emd': return `g:/test/mdb/${id.toLowerCase()}.mdb`;
+            default: return void 0;
+        }
+    }
+}
+
+export default Config;

+ 61 - 0
src/servers/volume/server.ts

@@ -0,0 +1,61 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as express from 'express'
+import * as compression from 'compression'
+
+import init from './server/web-api'
+import VERSION from './server/version'
+import ServerConfig from './server-config'
+import * as Logger from './server/utils/logger'
+import { State } from './server/state'
+
+function setupShutdown() {
+    if (ServerConfig.shutdownParams.timeoutVarianceMinutes > ServerConfig.shutdownParams.timeoutMinutes) {
+        Logger.logPlain('Server', 'Shutdown timeout variance is greater than the timer itself, ignoring.');
+    } else {
+        let tVar = 0;
+        if (ServerConfig.shutdownParams.timeoutVarianceMinutes > 0) {
+            tVar = 2 * (Math.random() - 0.5) * ServerConfig.shutdownParams.timeoutVarianceMinutes;
+        }
+        let tMs = (ServerConfig.shutdownParams.timeoutMinutes + tVar) * 60 * 1000;
+
+        console.log(`----------------------------------------------------------------------------`);
+        console.log(`  The server will shut down in ${Logger.formatTime(tMs)} to prevent slow performance.`);
+        console.log(`  Please make sure a daemon is running that will automatically restart it.`);
+        console.log(`----------------------------------------------------------------------------`);
+        console.log();
+
+        setTimeout(() => {
+            if (State.pendingQueries > 0) {
+                State.shutdownOnZeroPending = true;
+            } else {
+                Logger.logPlain('Server', `Shut down due to timeout.`);
+                process.exit(0);
+            }
+        }, tMs);
+    }
+}
+
+
+let port = process.env.port || ServerConfig.defaultPort;
+
+let app = express();
+app.use(compression({ level: 6, memLevel: 9, chunkSize: 16 * 16384, filter: () => true }));
+init(app);
+
+app.listen(port);
+
+console.log(`VolumeServer ${VERSION}, (c) 2016 - now, David Sehnal`);
+console.log(``);
+console.log(`The server is running on port ${port}.`);
+console.log(``);
+
+if (ServerConfig.shutdownParams && ServerConfig.shutdownParams.timeoutMinutes > 0) {
+    setupShutdown();
+}

+ 131 - 0
src/servers/volume/server/algebra/box.ts

@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2016 - now, David Sehnal, licensed under Apache 2.0, See LICENSE file for more info.
+ */
+
+import * as Coords from './coordinate'
+import { SpacegroupCell } from 'mol-math/geometry';
+
+export interface Box<C extends Coords.Coord<Coords.Space>> { a: C, b: C }
+
+export interface Cartesian extends Box<Coords.Cartesian> { }
+export interface Fractional extends Box<Coords.Fractional> { }
+export interface Grid<K> extends Box<Coords.Grid<K>> { }
+
+///////////////////////////////////////////
+// CONVERSIONS
+///////////////////////////////////////////
+
+export function cartesianToFractional(box: Cartesian, spacegroup: SpacegroupCell): Fractional {
+    const { a: l, b: r } = box;
+    const corners = [
+        [l[0], l[1], l[2]],
+        [r[0], l[1], l[2]],
+        [l[0], r[1], l[2]],
+        [l[0], l[1], r[2]],
+        [r[0], r[1], l[2]],
+        [r[0], l[1], r[2]],
+        [l[0], r[1], r[2]],
+        [r[0], r[1], r[2]],
+    ].map(c => Coords.cartesianToFractional(Coords.cartesian(c[0], c[1], c[2]), spacegroup));
+    return bounding(corners);
+}
+
+export function fractionalToGrid<K>(box: Fractional, domain: Coords.GridDomain<K>): Grid<K> {
+    return { a: Coords.fractionalToGrid(box.a, domain, 'bottom'), b: Coords.fractionalToGrid(box.b, domain, 'top') }
+}
+
+export function gridToFractional<K>(box: Grid<K>): Fractional {
+    return { a: Coords.gridToFractional(box.a), b: Coords.gridToFractional(box.b) }
+}
+
+export function fractionalBoxReorderAxes(box: Fractional, axisOrder: number[]) {
+    const { a, b } = box;
+    return {
+        a: Coords.withCoord(a, a[axisOrder[0]], a[axisOrder[1]], a[axisOrder[2]]),
+        b: Coords.withCoord(b, b[axisOrder[0]], b[axisOrder[1]], b[axisOrder[2]])
+    }
+}
+
+export function expandGridBox<K>(box: Grid<K>, by: number) {
+    const { a, b } = box;
+    return {
+        a: Coords.withCoord(a, a[0] - by, a[1] - by, a[2] - by),
+        b: Coords.withCoord(b, b[0] + by, b[1] + by, b[2] + by)
+    }
+}
+
+///////////////////////////////////////////
+// MISC
+///////////////////////////////////////////
+
+export function shift<C extends Coords.Coord<S>, S extends Coords.Space>(box: Box<C>, offset: C): Box<C> {
+    return { a: Coords.add(box.a, offset), b: Coords.add(box.b, offset) } as Box<C>;
+}
+
+export function clampGridToSamples<C extends Coords.Grid<K>, K>(box: Box<C>): Box<C> {
+    return { a: Coords.clampGridToSamples(box.a), b: Coords.clampGridToSamples(box.b) } as Box<C>;
+}
+
+export function fractionalToDomain<K>(box: Fractional, kind: K, delta: Coords.Fractional): Coords.GridDomain<K> {
+    const ds = Coords.fractional(box.b[0] - box.a[0], box.b[1] - box.a[1], box.b[2] - box.a[2]);
+    return Coords.domain(kind, {
+        delta,
+        origin: box.a,
+        dimensions: ds,
+        sampleCount: Coords.sampleCounts(ds, delta)
+    });
+}
+
+export function fractionalFromBlock(block: Coords.Grid<'Block'>): Fractional {
+    const { domain } = block;
+    const a = Coords.gridToFractional(block);
+    const b = Coords.add(a, domain.delta);
+    for (let i = 0; i < 3; i++) {
+        b[i] = Math.min(b[i], domain.origin[i] + domain.dimensions[i]);
+    }
+    return { a, b }
+}
+
+export function bounding<C extends Coords.Coord<Coords.Space>>(xs: C[]): Box<C> {
+    const a = Coords.clone(xs[0]);
+    const b = Coords.clone(xs[0]);
+
+    for (const x of xs) {
+        for (let i = 0; i < 3; i++) {
+            a[i] = Math.min(a[i], x[i]);
+            b[i] = Math.max(b[i], x[i]);
+        }
+    }
+    return { a, b }
+}
+
+export function areIntersecting<C extends Coords.Coord<S>, S extends Coords.Space>(box1: Box<C>, box2: Box<C>) {
+    for (let i = 0; i < 3; i++) {
+        const x = box1.a[i], y = box1.b[i];
+        const u = box2.a[i], v = box2.b[i];
+        if (x > v || y < u) return false;
+    }
+    return true;
+}
+
+export function intersect<C extends Coords.Coord<S>, S extends Coords.Space>(box1: Box<C>, box2: Box<C>): Box<C> | undefined {
+    let a = Coords.clone(box1.a);
+    let b = Coords.clone(box1.a);
+
+    for (let i = 0; i < 3; i++) {
+        const x = box1.a[i], y = box1.b[i];
+        const u = box2.a[i], v = box2.b[i];
+        if (x > v || y < u) return void 0;
+        a[i] = Math.max(x, u);
+        b[i] = Math.min(y, v);
+    }
+    return { a, b };
+}
+
+export function dimensions<C extends Coords.Coord<S>, S extends Coords.Space>(box: Box<C>): number[] {
+    return [box.b[0] - box.a[0], box.b[1] - box.a[1], box.b[2] - box.a[2]];
+}
+
+export function volume<C extends Coords.Coord<S>, S extends Coords.Space>(box: Box<C>) {
+    return (box.b[0] - box.a[0]) * (box.b[1] - box.a[1]) * (box.b[2] - box.a[2]);
+}

+ 166 - 0
src/servers/volume/server/algebra/coordinate.ts

@@ -0,0 +1,166 @@
+/*
+ * Copyright (c) 2016 - now, David Sehnal, licensed under Apache 2.0, See LICENSE file for more info.
+ */
+
+import { Mat4, Vec3 } from 'mol-math/linear-algebra'
+import { SpacegroupCell } from 'mol-math/geometry'
+
+/** Information about a region sampled in fractional coordinates */
+export interface GridInfo {
+    /** Origin in fractional coords. */
+    origin: Fractional,
+    /** Box dimensions in fractional coords. */
+    dimensions: Fractional,
+    /** Grid delta in fractional coordinates along each axis (in axis order) */
+    delta: Fractional,
+    /** Sample count of the grid box */
+    sampleCount: number[]
+}
+
+/**
+ * Grid domain with the supplied info and "kind".
+ * The "kind" is used so that the TypeScript compiler
+ * can distinguish between different types of grids,
+ * e.g. GridDomain<'Data'>, GridDomain<'Query'>, GridDomain<'Block'>, etc.
+ */
+export interface GridDomain<K> extends GridInfo { kind: K, sampleVolume: number }
+
+export const enum Space { Cartesian, Fractional, Grid }
+export interface Coord<S extends Space> { kind: S, '0': number, '1': number, '2': number, [index: number]: number }
+export interface Cartesian extends Coord<Space.Cartesian> { }
+export interface Fractional extends Coord<Space.Fractional> { }
+export interface Grid<K> extends Coord<Space.Grid> { domain: GridDomain<K> }
+
+///////////////////////////////////////////
+// CONSTRUCTORS
+///////////////////////////////////////////
+
+export function domain<K>(kind: K, info: GridInfo): GridDomain<K> {
+    const sc = info.sampleCount;
+    return {
+        kind,
+        delta: info.delta,
+        dimensions: info.dimensions,
+        origin: info.origin,
+        sampleCount: info.sampleCount,
+        sampleVolume: sc[0] * sc[1] * sc[2]
+    };
+}
+
+export function cartesian(x: number, y: number, z: number): Cartesian {
+    return { 0: x, 1: y, 2: z, kind: Space.Cartesian };
+}
+
+export function fractional(x: number, y: number, z: number): Fractional {
+    return { 0: x, 1: y, 2: z, kind: Space.Fractional };
+}
+
+export function grid<K>(domain: GridDomain<K>, x: number, y: number, z: number): Grid<K> {
+    return { 0: x, 1: y, 2: z, kind: Space.Grid, domain };
+}
+
+export function withCoord<C extends (Coord<Space> | Grid<any>)>(a: C, x: number, y: number, z: number): C {
+    switch (a.kind) {
+        case Space.Cartesian: return cartesian(x, y, z) as C;
+        case Space.Fractional: return fractional(x, y, z) as C;
+        case Space.Grid: return grid((a as Grid<any>).domain, x, y, z) as C;
+    }
+}
+
+export function clone<C extends (Coord<Space> | Grid<any>)>(a: C): C {
+    return withCoord(a, a[0], a[1], a[2]);
+}
+
+///////////////////////////////////////////
+// CONVERSIONS
+///////////////////////////////////////////
+
+export function cartesianToFractional(a: Cartesian, spacegroup: SpacegroupCell): Fractional {
+    const coord = Helpers.transform(a, spacegroup.toFractional);
+    return fractional(coord[0], coord[1], coord[2]);
+}
+
+export function fractionalToGrid<K>(a: Fractional, domain: GridDomain<K>, snap: 'bottom' | 'top'): Grid<K> {
+    const { origin, delta } = domain;
+    const coord = grid(domain, 0.1, 0.1, 0.1);
+    for (let i = 0; i < 3; i++) {
+        coord[i] = Helpers.snap((a[i] - origin[i]) / delta[i], snap);
+    }
+    return coord;
+}
+
+export function gridToFractional<K>(a: Grid<K>): Fractional {
+    const { origin, delta } = a.domain;
+    const coord = fractional(0.1, 0.1, 0.1);
+    for (let i = 0; i < 3; i++) {
+        coord[i] = a[i] * delta[i] + origin[i];
+    }
+    return coord;
+}
+
+///////////////////////////////////////////
+// MISC
+///////////////////////////////////////////
+
+export function clampGridToSamples<K>(a: Grid<K>): Grid<K> {
+    const { sampleCount } = a.domain;
+    const coord = withCoord(a, 0, 0, 0);
+    for (let i = 0; i < 3; i++) {
+        if (a[i] < 0) coord[i] = 0;
+        else if (a[i] > sampleCount[i]) coord[i] = sampleCount[i];
+        else coord[i] = a[i];
+    }
+    return coord;
+}
+
+export function add<S extends Space>(a: Coord<S>, b: Coord<S>): Coord<S> {
+    return withCoord(a, a[0] + b[0], a[1] + b[1], a[2] + b[2]);
+}
+
+export function sub<S extends Space>(a: Coord<S>, b: Coord<S>): Coord<S> {
+    return withCoord(a, a[0] - b[0], a[1] - b[1], a[2] - b[2]);
+}
+
+export function invert<S extends Space>(a: Coord<S>): Coord<S> {
+    return withCoord(a, -a[0], -a[1], -a[2]);
+}
+
+/** Maps each grid point to a unique integer */
+export function linearGridIndex<K>(a: Grid<K>) {
+    const samples = a.domain.sampleCount;
+    return a[0] + samples[0] * (a[1] + a[2] * samples[1]);
+}
+
+export function gridMetrics(dimensions: { [i: number]: number }) {
+    return {
+        sizeX: dimensions[0],
+        sizeXY: dimensions[0] * dimensions[1],
+        sizeXYZ: dimensions[0] * dimensions[1] * dimensions[2]
+    };
+}
+
+export function sampleCounts(dimensions: Fractional, delta: Fractional) {
+    return [
+        Helpers.snap(dimensions[0] / delta[0], 'top'),
+        Helpers.snap(dimensions[1] / delta[1], 'top'),
+        Helpers.snap(dimensions[2] / delta[2], 'top')
+    ];
+}
+
+// to prevent floating point rounding errors
+export function round(v: number) {
+    return Math.round(10000000 * v) / 10000000;
+}
+
+namespace Helpers {
+    export function transform(x: { [index: number]: number }, matrix: Mat4) {
+        return Vec3.transformMat4(Vec3.zero(), x as Vec3, matrix);
+    }
+
+    export function snap(v: number, to: 'bottom' | 'top') {
+        switch (to) {
+            case 'bottom': return Math.floor(round(v)) | 0;
+            case 'top': return Math.ceil(round(v)) | 0;
+        }
+    }
+}

+ 72 - 0
src/servers/volume/server/api.ts

@@ -0,0 +1,72 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as File from '../common/file'
+import execute from './query/execute'
+import * as Data from './query/data-model'
+import * as Logger from './utils/logger'
+import * as DataFormat from '../common/data-format'
+import ServerConfig from '../server-config'
+
+export function getOutputFilename(source: string, id: string, { asBinary, box, detail, forcedSamplingLevel }: Data.QueryParams) {
+    function n(s: string) { return (s || '').replace(/[ \n\t]/g, '').toLowerCase() }
+    function r(v: number) { return Math.round(10 * v) / 10; }
+    const det = forcedSamplingLevel !== void 0
+        ? `l${forcedSamplingLevel}`
+        : `d${Math.min(Math.max(0, detail | 0), ServerConfig.limits.maxOutputSizeInVoxelCountByPrecisionLevel.length - 1)}`;
+    const boxInfo = box.kind === 'Cell'
+        ? 'cell'
+        : `${box.kind === 'Cartesian' ? 'cartn' : 'frac'}_${r(box.a[0])}_${r(box.a[1])}_${r(box.a[2])}_${r(box.b[0])}_${r(box.b[1])}_${r(box.b[2])}`;
+    return `${n(source)}_${n(id)}-${boxInfo}_${det}.${asBinary ? 'bcif' : 'cif'}`;
+}
+
+/** Reads the header and includes information about available detail levels */
+export async function getHeaderJson(filename: string | undefined, sourceId: string) {
+    Logger.logPlain('Header', sourceId);
+    try {
+        if (!filename || !File.exists(filename)) {
+            Logger.errorPlain(`Header ${sourceId}`, 'File not found.');
+            return void 0;
+        }
+        const header = { ...await readHeader(filename, sourceId) } as DataFormat.Header;
+        const { sampleCount } = header!.sampling[0];
+        const maxVoxelCount = sampleCount[0] * sampleCount[1] * sampleCount[2];
+        const precisions = ServerConfig.limits.maxOutputSizeInVoxelCountByPrecisionLevel
+            .map((maxVoxels, precision) => ({ precision, maxVoxels }));
+        const availablePrecisions = [];
+        for (const p of precisions) {
+            availablePrecisions.push(p);
+            if (p.maxVoxels > maxVoxelCount) break;
+        }
+        (header as any).availablePrecisions = availablePrecisions;
+        (header as any).isAvailable = true;
+        return JSON.stringify(header, null, 2);
+    } catch (e) {
+        Logger.errorPlain(`Header ${sourceId}`, e);
+        return void 0;
+    }
+}
+
+export async function queryBox(params: Data.QueryParams, outputProvider: () => Data.QueryOutputStream) {
+    return await execute(params, outputProvider);
+}
+
+async function readHeader(filename: string | undefined, sourceId: string) {
+    let file: number | undefined = void 0;
+    try {
+        if (!filename) return void 0;
+        file = await File.openRead(filename);
+        const header = await DataFormat.readHeader(file);
+        return header.header;
+    } catch (e) {
+        Logger.errorPlain(`Info ${sourceId}`, e);
+        return void 0;
+    } finally {
+        File.close(file);
+    }
+}

+ 174 - 0
src/servers/volume/server/documentation.ts

@@ -0,0 +1,174 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import VERSION from './version'
+import ServerConfig from '../server-config'
+
+function detail(i: number) {
+     return `<span class='id'>${i}</span><small> (${Math.round(100 * ServerConfig.limits.maxOutputSizeInVoxelCountByPrecisionLevel[i] / 1000 / 1000) / 100 }M voxels)</small>`;
+}
+const detailMax = ServerConfig.limits.maxOutputSizeInVoxelCountByPrecisionLevel.length - 1;
+const dataSource = `Specifies the data source (determined by the experiment method). Currently, <span class='id'>x-ray</span> and <span class='id'>em</span> sources are supported.`;
+const entryId = `Id of the entry. For <span class='id'>x-ray</span>, use PDB ID (i.e. <span class='id'>1cbs</span>) and for <span class='id'>em</span> use EMDB id (i.e. <span class='id'>emd-8116</span>).`;
+
+export default `
+<!DOCTYPE html>
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta charset="utf-8" />
+<link rel='shortcut icon' href='data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAMAAABEpIrGAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAnUExURQAAAMIrHrspHr0oH7soILonHrwqH7onILsoHrsoH7soH7woILwpIKgVokoAAAAMdFJOUwAQHzNxWmBHS5XO6jdtAmoAAACZSURBVDjLxZNRCsQgDAVNXmwb9f7nXZEaLRgXloXOhwQdjMYYwpOLw55fBT46KhbOKhmRR2zLcFJQj8UR+HxFgArIF5BKJbEncC6NDEdI5SatBRSDJwGAoiFDONrEJXWYhGMIcRJGCrb1TOtDahfUuQXd10jkFYq0ViIrbUpNcVT6redeC1+b9tH2WLR93Sx2VCzkv/7NjfABxjQHksGB7lAAAAAASUVORK5CYII=' />
+<title>VolumeServer (${VERSION})</title>
+<style>
+html { -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; }
+body { margin: 0; font-family: "Helvetica Neue",Helvetica,Arial,sans-serif; font-weight: 300; color: #333; line-height: 1.42857143; font-size: 14px }
+.container { padding: 0 15px; max-width: 970px; margin: 0 auto; }
+small { font-size: 80% }
+h2, h4 { font-weight: 500; line-height: 1.1; }
+h2 { color: black; font-size: 24px; }
+h4 { font-size: 18px; margin: 20px 0 10px 0 }
+h2 small { color: #777; font-weight: 300 }
+hr { box-sizing: content-box; height: 0; overflow: visible; }
+a { background-color: transparent; -webkit-text-decoration-skip: objects; text-decoration: none }
+a:active, a:hover { outline-width: 0; }
+a:focus, a:hover { text-decoration: underline; color: #23527c }
+.list-unstyled { padding: 0; list-style: none; margin: 0 0 10px 0 }
+.cs-docs-query-wrap { padding: 24px 0; border-bottom: 1px solid #eee }
+.cs-docs-query-wrap > h2 { margin: 0; color: black; }
+.cs-docs-query-wrap > h2 > span { color: #DE4D4E; font-family: Menlo,Monaco,Consolas,"Courier New",monospace; font-size: 90% }
+.cs-docs-param-name, .cs-docs-template-link { color: #DE4D4E; font-family: Menlo,Monaco,Consolas,"Courier New",monospace }
+table {margin: 0; padding: 0; }
+table th { font-weight: bold; border-bottom: none; text-align: left; padding: 6px 12px }
+td { padding: 6px 12px }
+td:not(:last-child), th:not(:last-child) { border-right: 1px dotted #ccc }
+tr:nth-child(even) { background: #f9f9f9 }
+span.id  { color: #DE4D4E; font-family: Menlo,Monaco,Consolas,"Courier New",monospace; }
+</style>
+</head>
+<body>
+<div class="container">
+<div style='text-align: center; margin-top: 24px;'><span style='font-weight: bold; font-size: 16pt'>VolumeServer</span> <span>${VERSION}</span></div>
+
+<div style='text-align: justify; padding: 24px 0; border-bottom: 1px solid #eee'>
+  <p>
+    <b>VolumeServer</b> is a service for accessing subsets of volumetric density data. It automatically downsamples the data
+    depending on the volume of the requested region to reduce the bandwidth requirements and provide near-instant access to even the
+    largest data sets.
+  </p>
+  <p>
+    It uses the text based <a href='https://en.wikipedia.org/wiki/Crystallographic_Information_File'>CIF</a> and binary
+    <a href='https://github.com/dsehnal/BinaryCIF' style='font-weight: bold'>BinaryCIF</a>
+    formats to deliver the data to the client.
+    The server support is integrated into the <a href='https://github.com/dsehnal/LiteMol' style='font-weight: bold'>LiteMol Viewer</a>.
+  </p>
+</div>
+
+<div class="cs-docs-query-wrap">
+  <h2>Data Header / Check Availability <span>/&lt;source&gt;/&lt;id&gt;</span><br>
+  <small>Returns a JSON response specifying if data is available and the maximum region that can be queried.</small></h2>
+  <div id="coordserver-documentation-ambientResidues-body" style="margin: 24px 24px 0 24px">
+    <h4>Examples</h4>
+    <a href="/VolumeServer/x-ray/1cbs" class="cs-docs-template-link" target="_blank" rel="nofollow">/x-ray/1cbs</a><br>
+    <a href="/VolumeServer/em/emd-8116" class="cs-docs-template-link" target="_blank" rel="nofollow">/em/emd-8116</a>
+    <h4>Parameters</h4>
+    <table cellpadding="0" cellspacing="0" style='width: 100%'>
+    <tbody><tr><th style='width: 80px'>Name</th><th>Description</th></tr>
+    <tr>
+    <td class="cs-docs-param-name">source</td>
+    <td>${dataSource}</td>
+    </tr>
+    <tr>
+    <td class="cs-docs-param-name">id</td>
+    <td>${entryId}</td>
+    </tr>
+    </tbody></table>
+  </div>
+</div>
+
+<div class="cs-docs-query-wrap">
+  <h2>Box <span>/&lt;source&gt;/&lt;id&gt;/box/&lt;a,b,c&gt;/&lt;u,v,w&gt;?&lt;optional parameters&gt;</span><br>
+  <small>Returns density data inside the specified box for the given entry. For X-ray data, returns 2Fo-Fc and Fo-Fc volumes in a single response.</small></h2>
+  <div style="margin: 24px 24px 0 24px">
+    <h4>Examples</h4>
+    <a href="/VolumeServer/em/emd-8003/box/-2,7,10/4,10,15.5?encoding=cif&space=cartesian" class="cs-docs-template-link" target="_blank" rel="nofollow">/em/emd-8003/box/-2,7,10/4,10,15.5?excoding=cif&space=cartesian</a><br>
+    <a href="/VolumeServer/x-ray/1cbs/box/0.1,0.1,0.1/0.23,0.31,0.18?space=fractional" class="cs-docs-template-link" target="_blank" rel="nofollow">/x-ray/1cbs/box/0.1,0.1,0.1/0.23,0.31,0.18?space=fractional</a>
+    <h4>Parameters</h4>
+    <table cellpadding="0" cellspacing="0" style='width: 100%'>
+    <tbody><tr><th style='width: 80px'>Name</th><th>Description</th></tr>
+    <tr>
+    <td class="cs-docs-param-name">source</td>
+    <td>${dataSource}</td>
+    </tr>
+    <tr>
+    <td class="cs-docs-param-name">id</td>
+    <td>${entryId}</td>
+    </tr>
+    <tr>
+    <td class="cs-docs-param-name">a,b,c</td>
+    <td>Bottom left corner of the query region in Cartesian or fractional coordinates (determined by the <span class='id'>&amp;space</span> query parameter).</td>
+    </tr>
+    <tr>
+    <td class="cs-docs-param-name">u,v,w</td>
+    <td>Top right corner of the query region in Cartesian or fractional coordinates (determined by the <span class='id'>&amp;space</span> query parameter).</td>
+    </tr>
+    <tr>
+    <td class="cs-docs-param-name">encoding</td>
+    <td>Determines if text based <span class='id'>CIF</span> or binary <span class='id'>BinaryCIF</span> encoding is used. An optional argument, default is <span class='id'>BinaryCIF</span> encoding.</td>
+    </tr>
+    <tr>
+    <td class="cs-docs-param-name">space</td>
+    <td>Determines the coordinate space the query is in. Can be <span class='id'>cartesian</span> or <span class='id'>fractional</span>. An optional argument, default values is <span class='id'>cartesian</span>.</td>
+    </tr>
+    <tr>
+      <td class="cs-docs-param-name">detail</td>
+      <td>
+        Determines the maximum number of voxels the query can return. Possible values are in the range from ${detail(0)} to ${detail(detailMax)}.
+        Default value is <span class='id'>0</span>. Note: different detail levels might lead to the same result.
+      </td>
+    </tr>
+    </tbody></table>
+  </div>
+</div>
+
+<div class="cs-docs-query-wrap">
+  <h2>Cell <span>/&lt;source&gt;/&lt;id&gt;/cell?&lt;optional parameters&gt;</span><br>
+  <small>Returns (downsampled) volume data for the entire "data cell". For X-ray data, returns unit cell of 2Fo-Fc and Fo-Fc volumes, for EM data returns everything.</small></h2>
+  <div style="margin: 24px 24px 0 24px">
+    <h4>Example</h4>
+    <a href="/VolumeServer/em/emd-8116/cell?detail=1" class="cs-docs-template-link" target="_blank" rel="nofollow">/em/emd-8116/cell?detail=1</a><br>
+    <h4>Parameters</h4>
+    <table cellpadding="0" cellspacing="0" style='width: 100%'>
+    <tbody><tr><th style='width: 80px'>Name</th><th>Description</th></tr>
+    <tr>
+    <td class="cs-docs-param-name">source</td>
+    <td>${dataSource}</td>
+    </tr>
+    <tr>
+    <td class="cs-docs-param-name">id</td>
+    <td>${entryId}</td>
+    </tr>
+    <tr>
+    <td class="cs-docs-param-name">encoding</td>
+    <td>Determines if text based <span class='id'>CIF</span> or binary <span class='id'>BinaryCIF</span> encoding is used. An optional argument, default is <span class='id'>BinaryCIF</span> encoding.</td>
+    </tr>
+    <tr>
+      <td class="cs-docs-param-name">detail</td>
+      <td>
+        Determines the maximum number of voxels the query can return. Possible values are in the range from ${detail(0)} to ${detail(detailMax)}.
+        Default value is <span class='id'>0</span>. Note: different detail levels might lead to the same result.
+      </td>
+    </tr>
+    </tbody></table>
+  </div>
+</div>
+
+
+<div style="color: #999;font-size:smaller;margin: 20px 0; text-align: right">&copy; 2016 &ndash; now, David Sehnal | Node ${process.version}</div>
+
+</body>
+</html>
+`;

+ 143 - 0
src/servers/volume/server/local-api.ts

@@ -0,0 +1,143 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as Api from './api'
+import * as Data from './query/data-model'
+import * as Coordinate from './algebra/coordinate'
+
+import * as fs from 'fs'
+import * as path from 'path'
+
+export interface JobEntry {
+    source: {
+        filename: string,
+        name: string,
+        id: string
+    },
+    query: {
+        kind: 'box' | 'cell',
+        space?: 'fractional' | 'cartesian',
+        bottomLeft?: number[],
+        topRight?: number[],
+    }
+    params: {
+        /** Determines the detail level as specified in server-config */
+        detail?: number,
+        /** 
+         * Determines the sampling level:
+         * 1: Original data
+         * 2: Downsampled by factor 1/2
+         * ...
+         * N: downsampled 1/2^(N-1)
+         */
+        forcedSamplingLevel?: number,
+        asBinary: boolean,
+    },
+    outputFolder: string
+}
+
+export async function run(jobs: JobEntry[]) {
+    let progress = 0;
+    let started = getTime();
+    for (const job of jobs) {
+        try {
+            await query(job);
+        } catch (e) {
+            console.error(e);
+        }
+        progress++;
+        const elapsed = (getTime() - started) / 1000;
+        console.log(`[Progress] ${progress}/${jobs.length} in ${elapsed.toFixed(2)}s`);
+    }
+}
+
+function getTime() {
+    let t = process.hrtime();
+    return t[0] * 1000 + t[1] / 1000000;
+}
+
+async function query(job: JobEntry) {
+    let box: Data.QueryParamsBox;
+
+    if (job.query.kind.toLocaleLowerCase() === 'cell') {
+        box = { kind: 'Cell' };
+    } else if (job.query.space === 'fractional') {
+        box = {
+            kind: 'Fractional',
+            a: Coordinate.fractional(job.query.bottomLeft![0], job.query.bottomLeft![1], job.query.bottomLeft![2]),
+            b: Coordinate.fractional(job.query.topRight![0], job.query.topRight![1], job.query.topRight![2]),
+        }
+    } else {
+        box = {
+            kind: 'Cartesian',
+            a: Coordinate.cartesian(job.query.bottomLeft![0], job.query.bottomLeft![1], job.query.bottomLeft![2]),
+            b: Coordinate.cartesian(job.query.topRight![0], job.query.topRight![1], job.query.topRight![2]),
+        }
+    }
+
+    const params: Data.QueryParams = {
+        sourceFilename: job.source.filename,
+        sourceId: job.source.id,
+        asBinary: job.params.asBinary,
+        box,
+        detail: !job.params.detail ? 0 : job.params.detail,
+        forcedSamplingLevel: job.params.forcedSamplingLevel
+    };
+
+    if (!fs.existsSync(job.outputFolder)) {
+        makeDir(job.outputFolder);
+    }
+
+    const filename = path.join(job.outputFolder, Api.getOutputFilename(job.source.name, job.source.id, params))
+    const res = () => wrapFile(filename);
+    await Api.queryBox(params, res)
+}
+
+function makeDir(path: string, root?: string): boolean {
+    let dirs = path.split(/\/|\\/g),
+        dir = dirs.shift();
+
+    root = (root || '') + dir + '/';
+
+    try { fs.mkdirSync(root); }
+    catch (e) {
+        if (!fs.statSync(root).isDirectory()) throw new Error(e);
+    }
+
+    return !dirs.length || makeDir(dirs.join('/'), root);
+}
+
+function wrapFile(fn: string) {
+    const w = {
+        open(this: any) {
+            if (this.opened) return;
+            this.file = fs.openSync(fn, 'w');
+            this.opened = true;
+        },
+        writeBinary(this: any, data: Uint8Array) {
+            this.open();
+            fs.writeSync(this.file, new Buffer(data));
+            return true;
+        },
+        writeString(this: any, data: string) {
+            this.open();
+            fs.writeSync(this.file, data);
+            return true;
+        },
+        end(this: any) {
+            if (!this.opened || this.ended) return;
+            fs.close(this.file, function () { });
+            this.ended = true;
+        },
+        file: 0,
+        ended: false,
+        opened: false
+    };
+
+    return w;
+}

+ 94 - 0
src/servers/volume/server/query/compose.ts

@@ -0,0 +1,94 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as DataFormat from '../../common/data-format'
+import * as Data from './data-model'
+import * as Box from '../algebra/box'
+import * as Coords from '../algebra/coordinate'
+import * as File from '../../common/file'
+
+export default async function compose(query: Data.QueryContext.Data) {
+    for (const block of query.samplingInfo.blocks) {
+        await fillBlock(query, block);
+    }
+}
+
+async function readBlock(query: Data.QueryContext.Data, coord: Coords.Grid<'Block'>, blockBox: Box.Fractional): Promise<Data.BlockData> {
+    const numChannels = query.data.header.channels.length;
+    const blockSampleCount = Box.dimensions(Box.fractionalToGrid(blockBox, query.samplingInfo.sampling.dataDomain));
+    const size = numChannels * blockSampleCount[0] * blockSampleCount[1] * blockSampleCount[2];
+    const { valueType, blockSize } = query.data.header;
+    const dataSampleCount = query.data.header.sampling[query.samplingInfo.sampling.index].sampleCount;
+    const buffer = File.createTypedArrayBufferContext(size, valueType);
+    const byteOffset = query.samplingInfo.sampling.byteOffset
+        + DataFormat.getValueByteSize(valueType) * numChannels * blockSize
+        * (blockSampleCount[1] * blockSampleCount[2] * coord[0]
+            + dataSampleCount[0] * blockSampleCount[2] * coord[1]
+            + dataSampleCount[0] * dataSampleCount[1] * coord[2]);
+
+    const values = await File.readTypedArray(buffer, query.data.file, byteOffset, size, 0);
+    return {
+        sampleCount: blockSampleCount,
+        values
+    };
+}
+
+function fillData(query: Data.QueryContext.Data, blockData: Data.BlockData, blockGridBox: Box.Grid<'BlockGrid'>, queryGridBox: Box.Grid<'Query'>) {
+    const source = blockData.values;
+
+    const { sizeX: tSizeH, sizeXY: tSizeHK } = Coords.gridMetrics(query.samplingInfo.gridDomain.sampleCount);
+    const { sizeX: sSizeH, sizeXY: sSizeHK } = Coords.gridMetrics(blockData.sampleCount);
+
+    const offsetTarget = queryGridBox.a[0] + queryGridBox.a[1] * tSizeH + queryGridBox.a[2] * tSizeHK;
+
+    const [maxH, maxK, maxL] = Box.dimensions(blockGridBox);
+
+    for (let channelIndex = 0, _ii = query.data.header.channels.length; channelIndex < _ii; channelIndex++) {
+        const target = query.values[channelIndex];
+        const offsetSource = channelIndex * blockGridBox.a.domain.sampleVolume
+            + blockGridBox.a[0] + blockGridBox.a[1] * sSizeH + blockGridBox.a[2] * sSizeHK;
+
+        for (let l = 0; l < maxL; l++) {
+            for (let k = 0; k < maxK; k++) {
+                for (let h = 0; h < maxH; h++) {
+                    target[offsetTarget + h + k * tSizeH + l * tSizeHK]
+                        = source[offsetSource + h + k * sSizeH + l * sSizeHK];
+                }
+            }
+        }
+    }
+}
+
+function createBlockGridDomain(block: Coords.Grid<'Block'>, grid: Coords.GridDomain<'Data'>): Coords.GridDomain<'BlockGrid'> {
+    const blockBox = Box.fractionalFromBlock(block);
+    const origin = blockBox.a;
+    const dimensions = Coords.sub(blockBox.b, blockBox.a);
+    const sampleCount = Coords.sampleCounts(dimensions, grid.delta);
+    return Coords.domain<'BlockGrid'>('BlockGrid', { origin, dimensions, delta: grid.delta, sampleCount });
+}
+
+/** Read the block data and fill all the overlaps with the query region. */
+async function fillBlock(query: Data.QueryContext.Data, block: Data.QueryBlock) {
+    const baseBox = Box.fractionalFromBlock(block.coord);
+    const blockGridDomain = createBlockGridDomain(block.coord, query.samplingInfo.sampling.dataDomain);
+
+    const blockData: Data.BlockData = await readBlock(query, block.coord, baseBox);
+
+    for (const offset of block.offsets) {
+        const offsetQueryBox = Box.shift(query.samplingInfo.fractionalBox, offset);
+        const dataBox = Box.intersect(baseBox, offsetQueryBox);
+        if (!dataBox) continue;
+
+        const offsetDataBox = Box.shift(dataBox, Coords.invert(offset));
+
+        const blockGridBox = Box.clampGridToSamples(Box.fractionalToGrid(dataBox, blockGridDomain));
+        const queryGridBox = Box.clampGridToSamples(Box.fractionalToGrid(offsetDataBox, query.samplingInfo.gridDomain));
+
+        fillData(query, blockData, blockGridBox, queryGridBox);
+    }
+}

+ 78 - 0
src/servers/volume/server/query/data-model.ts

@@ -0,0 +1,78 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as DataFormat from '../../common/data-format'
+import * as Coords from '../algebra/coordinate'
+import * as Box from '../algebra/box'
+import Writer from 'mol-io/writer/writer'
+import { SpacegroupCell } from 'mol-math/geometry';
+
+//////////////////////////////////////
+// DATA
+//////////////////////////////////////
+
+export interface Sampling {
+    index: number,
+    rate: number,
+    byteOffset: number,
+    dataDomain: Coords.GridDomain<'Data'>,
+    blockDomain: Coords.GridDomain<'Block'>
+}
+
+export interface DataContext {
+    file: number,
+    header: DataFormat.Header,
+    spacegroup: SpacegroupCell,
+    dataBox: Box.Fractional,
+    sampling: Sampling[]
+}
+
+export interface BlockData {
+    sampleCount: number[],
+    values: DataFormat.ValueArray
+}
+
+//////////////////////////////////////
+// QUERY
+//////////////////////////////////////
+
+export type QueryOutputStream = Writer & { end: () => void }
+
+export namespace QueryParamsBox {
+    export type Cartesian = { kind: 'Cartesian', a: Coords.Cartesian, b: Coords.Cartesian }
+    export type Fractional = { kind: 'Fractional', a: Coords.Fractional, b: Coords.Fractional }
+    export type Cell = { kind: 'Cell' }
+}
+export type QueryParamsBox = QueryParamsBox.Cartesian | QueryParamsBox.Fractional | QueryParamsBox.Cell
+
+export interface QueryParams {
+    sourceFilename: string,
+    sourceId: string,
+    asBinary: boolean,
+    box: QueryParamsBox,
+    detail: number,
+    forcedSamplingLevel?: number
+}
+
+export type QueryBlock = { coord: Coords.Grid<'Block'>, offsets: Coords.Fractional[] }
+
+export interface QuerySamplingInfo {
+    sampling: Sampling,
+    fractionalBox: Box.Fractional,
+    gridDomain: Coords.GridDomain<'Query'>,
+    blocks: QueryBlock[]
+}
+
+export type QueryContext = QueryContext.Error | QueryContext.Empty | QueryContext.Data
+
+export namespace QueryContext {
+    type Base = { guid: string, params: QueryParams }
+    export type Error = { kind: 'Error', message: string } & Base
+    export type Empty = { kind: 'Empty', data: DataContext } & Base
+    export type Data = { kind: 'Data', data: DataContext, samplingInfo: QuerySamplingInfo, values: DataFormat.ValueArray[] } & Base
+}

+ 217 - 0
src/servers/volume/server/query/encode.ts

@@ -0,0 +1,217 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as Encoder from 'mol-io/writer/cif'
+import * as Data from './data-model'
+import * as Coords from '../algebra/coordinate'
+import VERSION from '../version'
+import * as DataFormat from '../../common/data-format'
+import { Column } from 'mol-data/db';
+import { Iterator } from 'mol-data';
+//import { ArrayEncoding, ArrayEncoder } from 'mol-io/common/binary-cif';
+
+export default function encode(query: Data.QueryContext, output: Data.QueryOutputStream) {
+    let w = Encoder.create({ binary: query.params.asBinary, encoderName: `VolumeServer ${VERSION}` });
+    write(w, query);
+    w.encode();
+    w.writeTo(output);
+}
+
+interface ResultContext {
+    query: Data.QueryContext.Data,
+    channelIndex: number
+}
+
+//type Writer = CIF.Writer<ResultContext | Data.QueryContext>
+
+type FieldDesc<T> = Encoder.FieldDefinition<number, T>
+type CategoryInstance = Encoder.CategoryInstance
+
+//import E = CIF.Binary.Encoder
+
+function string<T>(name: string, str: (data: T) => string, isSpecified?: (data: T) => boolean): FieldDesc<T> {
+    if (isSpecified) {
+        return { name, type: Encoder.FieldType.Str, value: (i, d) => str(d), valueKind: (i, d) => isSpecified(d) ? Column.ValueKind.Present : Column.ValueKind.NotPresent };
+    }
+    return { name, type: Encoder.FieldType.Str, value: (i, d) => str(d) };
+}
+
+function int32<T>(name: string, value: (data: T) => number): FieldDesc<T> {
+    return { name, type: Encoder.FieldType.Int, value: (i, d) => value(d) };
+}
+
+function float64<T>(name: string, value: (data: T) => number, precisionMultiplier: number = 1000000): FieldDesc<T> {
+    return { name, type: Encoder.FieldType.Float, value: (i, d) => value(d) };
+}
+
+interface _vd3d_Ctx {
+    header: DataFormat.Header,
+    channelIndex: number,
+    grid: Coords.GridDomain<'Query'>,
+    sampleRate: number,
+    globalValuesInfo: DataFormat.ValuesInfo,
+    sampledValuesInfo: DataFormat.ValuesInfo,
+}
+
+const _volume_data_3d_info_fields: FieldDesc<_vd3d_Ctx>[] = [
+    string<_vd3d_Ctx>('name', ctx => ctx.header.channels[ctx.channelIndex]),
+
+    int32<_vd3d_Ctx>('axis_order[0]', ctx => ctx.header.axisOrder[0]),
+    int32<_vd3d_Ctx>('axis_order[1]', ctx => ctx.header.axisOrder[1]),
+    int32<_vd3d_Ctx>('axis_order[2]', ctx => ctx.header.axisOrder[2]),
+
+    float64<_vd3d_Ctx>('origin[0]', ctx => ctx.grid.origin[0]),
+    float64<_vd3d_Ctx>('origin[1]', ctx => ctx.grid.origin[1]),
+    float64<_vd3d_Ctx>('origin[2]', ctx => ctx.grid.origin[2]),
+
+    float64<_vd3d_Ctx>('dimensions[0]', ctx => ctx.grid.dimensions[0]),
+    float64<_vd3d_Ctx>('dimensions[1]', ctx => ctx.grid.dimensions[1]),
+    float64<_vd3d_Ctx>('dimensions[2]', ctx => ctx.grid.dimensions[2]),
+
+    int32<_vd3d_Ctx>('sample_rate', ctx => ctx.sampleRate),
+    int32<_vd3d_Ctx>('sample_count[0]', ctx => ctx.grid.sampleCount[0]),
+    int32<_vd3d_Ctx>('sample_count[1]', ctx => ctx.grid.sampleCount[1]),
+    int32<_vd3d_Ctx>('sample_count[2]', ctx => ctx.grid.sampleCount[2]),
+
+    int32<_vd3d_Ctx>('spacegroup_number', ctx => ctx.header.spacegroup.number),
+
+    float64<_vd3d_Ctx>('spacegroup_cell_size[0]', ctx => ctx.header.spacegroup.size[0], 1000),
+    float64<_vd3d_Ctx>('spacegroup_cell_size[1]', ctx => ctx.header.spacegroup.size[1], 1000),
+    float64<_vd3d_Ctx>('spacegroup_cell_size[2]', ctx => ctx.header.spacegroup.size[2], 1000),
+
+    float64<_vd3d_Ctx>('spacegroup_cell_angles[0]', ctx => ctx.header.spacegroup.angles[0], 1000),
+    float64<_vd3d_Ctx>('spacegroup_cell_angles[1]', ctx => ctx.header.spacegroup.angles[1], 1000),
+    float64<_vd3d_Ctx>('spacegroup_cell_angles[2]', ctx => ctx.header.spacegroup.angles[2], 1000),
+
+    float64<_vd3d_Ctx>('mean_source', ctx => ctx.globalValuesInfo.mean),
+    float64<_vd3d_Ctx>('mean_sampled', ctx => ctx.sampledValuesInfo.mean),
+    float64<_vd3d_Ctx>('sigma_source', ctx => ctx.globalValuesInfo.sigma),
+    float64<_vd3d_Ctx>('sigma_sampled', ctx => ctx.sampledValuesInfo.sigma),
+    float64<_vd3d_Ctx>('min_source', ctx => ctx.globalValuesInfo.min),
+    float64<_vd3d_Ctx>('min_sampled', ctx => ctx.sampledValuesInfo.min),
+    float64<_vd3d_Ctx>('max_source', ctx => ctx.globalValuesInfo.max),
+    float64<_vd3d_Ctx>('max_sampled', ctx => ctx.sampledValuesInfo.max)
+];
+
+function _volume_data_3d_info(result: ResultContext): CategoryInstance {
+    const ctx: _vd3d_Ctx = {
+        header: result.query.data.header,
+        channelIndex: result.channelIndex,
+        grid: result.query.samplingInfo.gridDomain,
+        sampleRate: result.query.samplingInfo.sampling.rate,
+        globalValuesInfo: result.query.data.header.sampling[0].valuesInfo[result.channelIndex],
+        sampledValuesInfo: result.query.data.header.sampling[result.query.samplingInfo.sampling.index].valuesInfo[result.channelIndex]
+    };
+
+    return {
+        data: ctx,
+        definition: { name: 'volume_data_3d_info', fields: _volume_data_3d_info_fields },
+        keys: () => Iterator.Value(0),
+        rowCount: 1
+    };
+}
+
+function _volume_data_3d_number(i: number, ctx: DataFormat.ValueArray): number {
+    return ctx[i];
+}
+
+function _volume_data_3d(ctx: ResultContext) {
+    const data = ctx.query.values[ctx.channelIndex];
+
+    // const E = ArrayEncoding;
+    // let encoder: ArrayEncoder;
+    // let typedArray: any;
+    // if (ctx.query.data.header.valueType === DataFormat.ValueType.Float32 || ctx.query.data.header.valueType === DataFormat.ValueType.Int16) {
+    //     let min: number, max: number;
+    //     min = data[0], max = data[0];
+    //     for (let i = 0, n = data.length; i < n; i++) {
+    //         let v = data[i];
+    //         if (v < min) min = v;
+    //         else if (v > max) max = v;
+    //     }
+    //     typedArray = Float32Array;
+    //     // encode into 255 steps and store each value in 1 byte.
+    //     encoder = E.by(E.intervalQuantizaiton(min, max, 255, Uint8Array)).and(E.byteArray);
+    // } else {
+    //     typedArray = Int8Array;
+    //     // just encode the bytes
+    //     encoder = E.by(E.byteArray)
+    // }
+
+    let fields: FieldDesc<typeof data>[] = [{
+        name: 'values', type: Encoder.FieldType.Float, value: _volume_data_3d_number
+    }];
+
+    return {
+        data,
+        definition: { name: 'volume_data_3d', fields },
+        keys: () => Iterator.Range(0, data.length - 1),
+        rowCount: data.length
+    };
+}
+
+function pickQueryBoxDimension(ctx: Data.QueryContext, e: 'a' | 'b', d: number) {
+    const box = ctx.params.box;
+    switch (box.kind) {
+        case 'Cartesian':
+        case 'Fractional':
+            return `${Math.round(1000000 * box[e][d]) / 1000000}`;
+        default: return '';
+    }
+}
+
+function queryBoxDimension(e: 'a' | 'b', d: number) {
+    return string<Data.QueryContext>(`query_box_${e}[${d}]`, ctx => pickQueryBoxDimension(ctx, e, d), ctx => ctx.params.box.kind !== 'Cell');
+}
+
+const _density_server_result_fields: FieldDesc<Data.QueryContext>[] = [
+    string<Data.QueryContext>('server_version', ctx => VERSION),
+    string<Data.QueryContext>('datetime_utc', ctx => new Date().toISOString().replace(/T/, ' ').replace(/\..+/, '')),
+    string<Data.QueryContext>('guid', ctx => ctx.guid),
+    string<Data.QueryContext>('is_empty', ctx => ctx.kind === 'Empty' || ctx.kind === 'Error' ? 'yes' : 'no'),
+    string<Data.QueryContext>('has_error', ctx => ctx.kind === 'Error' ? 'yes' : 'no'),
+    string<Data.QueryContext>('error', ctx => ctx.kind === 'Error' ? ctx.message : '', (ctx) => ctx.kind === 'Error'),
+    string<Data.QueryContext>('query_source_id', ctx => ctx.params.sourceId),
+    string<Data.QueryContext>('query_type', ctx => 'box'),
+    string<Data.QueryContext>('query_box_type', ctx => ctx.params.box.kind.toLowerCase()),
+    queryBoxDimension('a', 0),
+    queryBoxDimension('a', 1),
+    queryBoxDimension('a', 2),
+    queryBoxDimension('b', 0),
+    queryBoxDimension('b', 1),
+    queryBoxDimension('b', 2)
+]
+
+function _density_server_result(ctx: Data.QueryContext) {
+    return {
+        data: ctx,
+        definition: { name: 'density_server_result', fields: _density_server_result_fields },
+        keys: () => Iterator.Value(0),
+        rowCount: 1
+    };
+}
+
+function write(encoder: Encoder.EncoderInstance, query: Data.QueryContext) {
+    encoder.startDataBlock('SERVER');
+    encoder.writeCategory(_density_server_result, [query]);
+
+    switch (query.kind) {
+        case 'Data':
+    }
+
+    if (query.kind === 'Data') {
+        const header = query.data.header;
+        for (let i = 0; i < header.channels.length; i++) {
+            encoder.startDataBlock(header.channels[i]);
+            const ctx: ResultContext[] = [{ query, channelIndex: i }];
+
+            encoder.writeCategory(_volume_data_3d_info, ctx);
+            encoder.writeCategory(_volume_data_3d, ctx);
+        }
+    }
+}

+ 234 - 0
src/servers/volume/server/query/execute.ts

@@ -0,0 +1,234 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as DataFormat from '../../common/data-format'
+import * as File from '../../common/file'
+import * as Data from './data-model'
+import * as Coords from '../algebra/coordinate'
+import * as Box from '../algebra/box'
+import * as Logger from '../utils/logger'
+import { State } from '../state'
+import ServerConfig from '../../server-config'
+
+import identify from './identify'
+import compose from './compose'
+import encode from './encode'
+import { SpacegroupCell } from 'mol-math/geometry';
+import { Vec3 } from 'mol-math/linear-algebra';
+import { UUID } from 'mol-util';
+
+export default async function execute(params: Data.QueryParams, outputProvider: () => Data.QueryOutputStream) {
+    const start = getTime();
+    State.pendingQueries++;
+
+    const guid = UUID.create() as any as string;
+    params.detail = Math.min(Math.max(0, params.detail | 0), ServerConfig.limits.maxOutputSizeInVoxelCountByPrecisionLevel.length - 1);
+    Logger.log(guid, 'Info', `id=${params.sourceId},encoding=${params.asBinary ? 'binary' : 'text'},detail=${params.detail},${queryBoxToString(params.box)}`);
+
+    let sourceFile: number | undefined = void 0;
+    try {
+        sourceFile = await File.openRead(params.sourceFilename);
+        await _execute(sourceFile, params, guid, outputProvider);
+        return true;
+    } catch (e) {
+        Logger.error(guid, e);
+        return false;
+    } finally {
+        File.close(sourceFile);
+        Logger.log(guid, 'Time', `${Math.round(getTime() - start)}ms`);
+        State.pendingQueries--;
+    }
+}
+
+function getTime() {
+    let t = process.hrtime();
+    return t[0] * 1000 + t[1] / 1000000;
+}
+
+function blockDomain(domain: Coords.GridDomain<'Data'>, blockSize: number): Coords.GridDomain<'Block'> {
+    const delta = Coords.fractional(blockSize * domain.delta[0], blockSize * domain.delta[1], blockSize * domain.delta[2]);
+    return Coords.domain<'Block'>('Block', {
+        origin: domain.origin,
+        dimensions: domain.dimensions,
+        delta,
+        sampleCount: Coords.sampleCounts(domain.dimensions, delta)
+    });
+}
+
+function createSampling(header: DataFormat.Header, index: number, dataOffset: number): Data.Sampling {
+    const sampling = header.sampling[index];
+    const dataDomain = Coords.domain<'Data'>('Data', {
+        origin: Coords.fractional(header.origin[0], header.origin[1], header.origin[2]),
+        dimensions: Coords.fractional(header.dimensions[0], header.dimensions[1], header.dimensions[2]),
+        delta: Coords.fractional(
+            header.dimensions[0] / sampling.sampleCount[0],
+            header.dimensions[1] / sampling.sampleCount[1],
+            header.dimensions[2] / sampling.sampleCount[2]),
+        sampleCount: sampling.sampleCount
+    });
+    return {
+        index,
+        rate: sampling.rate,
+        byteOffset: sampling.byteOffset + dataOffset,
+        dataDomain,
+        blockDomain: blockDomain(dataDomain, header.blockSize)
+    }
+}
+
+async function createDataContext(file: number): Promise<Data.DataContext> {
+    const { header, dataOffset } = await DataFormat.readHeader(file);
+
+    const origin = Coords.fractional(header.origin[0], header.origin[1], header.origin[2]);
+    const dimensions = Coords.fractional(header.dimensions[0], header.dimensions[1], header.dimensions[2]);
+
+    return {
+        file,
+        header,
+        spacegroup: SpacegroupCell.create(header.spacegroup.number, Vec3.ofArray(header.spacegroup.size), Vec3.scale(Vec3.zero(), Vec3.ofArray(header.spacegroup.angles), Math.PI / 180)),
+        dataBox: { a: origin, b: Coords.add(origin, dimensions) },
+        sampling: header.sampling.map((s, i) => createSampling(header, i, dataOffset))
+    }
+}
+
+function createQuerySampling(data: Data.DataContext, sampling: Data.Sampling, queryBox: Box.Fractional): Data.QuerySamplingInfo {
+    const fractionalBox = Box.gridToFractional(Box.expandGridBox(Box.fractionalToGrid(queryBox, sampling.dataDomain), 1));
+    const blocks = identify(data, sampling, fractionalBox);
+    let ret = {
+        sampling,
+        fractionalBox,
+        gridDomain: Box.fractionalToDomain<'Query'>(fractionalBox, 'Query', sampling.dataDomain.delta),
+        blocks
+    };
+    return ret;
+}
+
+function pickSampling(data: Data.DataContext, queryBox: Box.Fractional, forcedLevel: number, precision: number): Data.QuerySamplingInfo {
+    if (forcedLevel > 0) {
+        return createQuerySampling(data, data.sampling[Math.min(data.sampling.length, forcedLevel) - 1], queryBox);
+    }
+
+    const sizeLimit = ServerConfig.limits.maxOutputSizeInVoxelCountByPrecisionLevel[precision] || (2 * 1024 * 1024);
+
+    for (const s of data.sampling) {
+        const gridBox = Box.fractionalToGrid(queryBox, s.dataDomain);
+        const approxSize = Box.volume(gridBox);
+
+        if (approxSize <= sizeLimit) {
+            const sampling = createQuerySampling(data, s, queryBox);
+            if (sampling.blocks.length <= ServerConfig.limits.maxRequestBlockCount) {
+                return sampling;
+            }
+        }
+    }
+
+    return createQuerySampling(data, data.sampling[data.sampling.length - 1], queryBox);
+}
+
+function emptyQueryContext(data: Data.DataContext, params: Data.QueryParams, guid: string): Data.QueryContext {
+    return { kind: 'Empty', guid, params, data }
+}
+
+function getQueryBox(data: Data.DataContext, queryBox: Data.QueryParamsBox) {
+    switch (queryBox.kind) {
+        case 'Cartesian': return Box.fractionalBoxReorderAxes(Box.cartesianToFractional(queryBox, data.spacegroup), data.header.axisOrder);
+        case 'Fractional': return Box.fractionalBoxReorderAxes(queryBox, data.header.axisOrder);
+        default: return data.dataBox;
+    }
+}
+
+function allocateValues(domain: Coords.GridDomain<'Query'>, numChannels: number, valueType: DataFormat.ValueType) {
+    const values = [];
+    for (let i = 0; i < numChannels; i++) {
+        values[values.length] = DataFormat.createValueArray(valueType, domain.sampleVolume);
+    }
+    return values;
+}
+
+function createQueryContext(data: Data.DataContext, params: Data.QueryParams, guid: string): Data.QueryContext {
+    const inputQueryBox = getQueryBox(data, params.box);
+    let queryBox;
+    if (!data.header.spacegroup.isPeriodic) {
+        if (!Box.areIntersecting(data.dataBox, inputQueryBox)) {
+            return emptyQueryContext(data, params, guid);
+        }
+        queryBox = Box.intersect(data.dataBox, inputQueryBox)!;
+    } else {
+        queryBox = inputQueryBox;
+    }
+
+    const dimensions = Box.dimensions(queryBox);
+    if (dimensions.some(d => isNaN(d))) {
+        throw `The query box is not defined.`;
+    }
+
+    if (dimensions[0] * dimensions[1] * dimensions[2] > ServerConfig.limits.maxFractionalBoxVolume) {
+        throw `The query box volume is too big.`;
+    }
+
+    const samplingInfo = pickSampling(data, queryBox, params.forcedSamplingLevel !== void 0 ? params.forcedSamplingLevel : 0, params.detail);
+
+    if (samplingInfo.blocks.length === 0) return emptyQueryContext(data, params, guid);
+
+    return {
+        kind: 'Data',
+        guid,
+        data,
+        params,
+        samplingInfo,
+        values: allocateValues(samplingInfo.gridDomain, data.header.channels.length, data.header.valueType)
+    }
+}
+
+
+async function _execute(file: number, params: Data.QueryParams, guid: string, outputProvider: () => Data.QueryOutputStream) {
+    let output: any = void 0;
+    try {
+        // Step 1a: Create data context
+        const data = await createDataContext(file);
+
+        // Step 1b: Create query context
+        const query = createQueryContext(data, params, guid);
+
+        if (query.kind === 'Data') {
+            // Step 3b: Compose the result data
+            await compose(query);
+        }
+
+        // Step 4: Encode the result
+        output = outputProvider();
+        encode(query, output);
+        output.end();
+    } catch (e) {
+        const query: Data.QueryContext = { kind: 'Error', guid, params, message: `${e}` }
+        try {
+            if (!output) output = outputProvider();
+            encode(query, output);
+        } catch (f) {
+            throw f;
+        }
+        throw e;
+    } finally {
+        if (output) output.end();
+    }
+}
+
+function roundCoord(c: number) {
+    return Math.round(100000 * c) / 100000;
+}
+
+function queryBoxToString(queryBox: Data.QueryParamsBox) {
+    switch (queryBox.kind) {
+        case 'Cartesian':
+        case 'Fractional':
+            const { a, b } = queryBox;
+            const r = roundCoord;
+            return `box-type=${queryBox.kind},box-a=(${r(a[0])},${r(a[1])},${r(a[2])}),box-b=(${r(b[0])},${r(b[1])},${r(b[2])})`;
+        default:
+            return `box-type=${queryBox.kind}`;
+    }
+}

+ 123 - 0
src/servers/volume/server/query/identify.ts

@@ -0,0 +1,123 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as Coords from '../algebra/coordinate'
+import * as Box from '../algebra/box'
+import * as Data from './data-model'
+//import { FastMap } from '../utils/collections'
+
+/** Find a list of unique blocks+offsets that overlap with the query region. */
+export default function findUniqueBlocks(data: Data.DataContext, sampling: Data.Sampling, queryBox: Box.Fractional) {
+    const translations = data.header.spacegroup.isPeriodic
+        // find all query box translations that overlap with the unit cell.
+        ? findDataOverlapTranslationList(queryBox, sampling.dataDomain)
+        // no translations
+        : [Coords.fractional(0, 0, 0)];
+
+    const blocks: UniqueBlocks = new Map<number, Data.QueryBlock>();
+    for (const t of translations) {
+        findUniqueBlocksOffset(data, sampling, queryBox, t, blocks);
+    }
+
+    const blockList = [] as Data.QueryBlock[];
+    blocks.forEach(function (this: Data.QueryBlock[], b) { this.push(b) }, blockList);
+
+    // sort the data so that the first coodinate changes the fastest
+    // this is because that's how the data is laid out in the underlaying
+    // data format and reading the data 'in order' makes it faster.
+    blockList.sort((a, b) => {
+        const x = a.coord, y = b.coord;
+        for (let i = 2; i >= 0; i--) {
+            if (x[i] !== y[i]) return x[i] - y[i];
+        }
+        return 0;
+    });
+    return blockList;
+}
+
+type Translations = Coords.Fractional[]
+
+/**
+ * Find the integer interval [x, y] so that for all k \in [x, y]
+ * [a + k, b + k] intersects with (u, v)
+ */
+function overlapMultiplierRange(a: number, b: number, u: number, v: number): number[] | undefined {
+    let x = Math.ceil(u - b) | 0, y = Math.floor(v - a) | 0;
+    if (Coords.round(b + x) <= Coords.round(u)) x++;
+    if (Coords.round(a + y) >= Coords.round(v)) y--;
+    if (x > y) return void 0;
+    return [x, y];
+}
+
+/**
+ * Finds that list of "unit" offsets (in fractional space) so that
+ * shift(box, offset) has non-empty interaction with the region 
+ * described in the give domain.
+ */
+function findDataOverlapTranslationList(box: Box.Fractional, domain: Coords.GridDomain<'Data'>): Translations {
+    const ranges = [];
+    const translations: Translations = [];
+    const { origin, dimensions } = domain;
+
+    for (let i = 0; i < 3; i++) {
+        const range = overlapMultiplierRange(
+            box.a[i], box.b[i],
+            origin[i], origin[i] + dimensions[i]);
+        if (!range) return translations;
+        ranges[i] = range;
+    }
+
+    const [u, v, w] = ranges;
+
+    for (let k = w[0]; k <= w[1]; k++) {
+        for (let j = v[0]; j <= v[1]; j++) {
+            for (let i = u[0]; i <= u[1]; i++) {
+                translations.push(Coords.fractional(i, j, k));
+            }
+        }
+    }
+
+    return translations;
+}
+
+type UniqueBlocks = Map<number, Data.QueryBlock>
+
+function addUniqueBlock(blocks: UniqueBlocks, coord: Coords.Grid<'Block'>, offset: Coords.Fractional) {
+    const hash = Coords.linearGridIndex(coord);
+    if (blocks.has(hash)) {
+        const entry = blocks.get(hash)!;
+        entry.offsets.push(offset);
+    } else {
+        blocks.set(hash, { coord, offsets: [offset] });
+    }
+}
+
+function findUniqueBlocksOffset(data: Data.DataContext, sampling: Data.Sampling, queryBox: Box.Fractional, offset: Coords.Fractional, blocks: UniqueBlocks) {
+    const shifted = Box.shift(queryBox, offset);
+    const intersection = Box.intersect(shifted, data.dataBox);
+
+    // Intersection can be empty in the case of "aperiodic spacegroups"
+    if (!intersection) return;
+
+    const blockDomain = sampling.blockDomain;
+
+    // this gets the "3d range" of block indices that contain data that overlaps 
+    // with the query region.
+    //
+    // Clamping the data makes sure we avoid silly rounding errors (hopefully :))
+    const { a: min, b: max }
+        = Box.clampGridToSamples(Box.fractionalToGrid(intersection, blockDomain));
+
+    for (let i = min[0]; i < max[0]; i++) {
+        for (let j = min[1]; j < max[1]; j++) {
+            for (let k = min[2]; k < max[2]; k++) {
+                addUniqueBlock(blocks, Coords.grid(blockDomain, i, j, k), offset);
+            }
+        }
+    }
+}

+ 13 - 0
src/servers/volume/server/state.ts

@@ -0,0 +1,13 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+export const State = {
+    pendingQueries: 0,
+    shutdownOnZeroPending: false,
+    querySerial: 0
+}

+ 42 - 0
src/servers/volume/server/utils/logger.ts

@@ -0,0 +1,42 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+export function formatTime(t: number) {
+    if (isNaN(t)) return 'n/a';
+
+    let h = Math.floor(t / (60 * 60 * 1000)),
+        m = Math.floor(t / (60 * 1000) % 60),
+        s = Math.floor(t / 1000 % 60),
+        ms = Math.floor(t % 1000).toString();
+
+    while (ms.length < 3) ms = '0' + ms;
+
+    if (h > 0) return `${h}h${m}m${s}.${ms}s`;
+    if (m > 0) return `${m}m${s}.${ms}s`;
+    if (s > 0) return `${s}.${ms}s`;
+    return `${t.toFixed(0)}ms`;
+}
+
+export function logPlain(tag: string, msg: string) {
+    console.log(`[${tag}] ${msg}`);
+}
+
+export function log(guid: string, tag: string, msg: string) {
+    console.log(`[${guid}][${tag}] ${msg}`);
+}
+
+export function errorPlain(ctx: string, e: any) {
+    console.error(`[Error] (${ctx}) ${e}`);
+    if (e.stack) console.error(e.stack);
+}
+
+
+export function error(guid: string, e: any) {
+    console.error(`[${guid}][Error] ${e}`);
+    if (e.stack) console.error(e.stack);
+}

+ 1 - 0
src/servers/volume/server/version.ts

@@ -0,0 +1 @@
+export default '0.9.5'

+ 186 - 0
src/servers/volume/server/web-api.ts

@@ -0,0 +1,186 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as express from 'express'
+
+import * as Api from './api'
+
+import * as Data from './query/data-model'
+import * as Coords from './algebra/coordinate'
+import Docs from './documentation'
+import ServerConfig from '../server-config'
+import * as Logger from './utils/logger'
+import { State } from './state'
+
+export default function init(app: express.Express) {
+    function makePath(p: string) {
+        return ServerConfig.apiPrefix + '/' + p;
+    }
+
+    // Header
+    app.get(makePath(':source/:id/?$'), (req, res) => getHeader(req, res));
+    // Box /:src/:id/box/:a1,:a2,:a3/:b1,:b2,:b3?text=0|1&space=cartesian|fractional
+    app.get(makePath(':source/:id/box/:a1,:a2,:a3/:b1,:b2,:b3/?'), (req, res) => queryBox(req, res, getQueryParams(req, false)));
+    // Cell /:src/:id/cell/?text=0|1&space=cartesian|fractional
+    app.get(makePath(':source/:id/cell/?'), (req, res) => queryBox(req, res, getQueryParams(req, true)));
+
+    app.get('*', (req, res) => {
+        res.writeHead(200, { 'Content-Type': 'text/html; charset=utf-8' });
+        res.end(Docs);
+    });
+}
+
+function mapFile(type: string, id: string) {
+    return ServerConfig.mapFile(type || '', id || '');
+}
+
+function wrapResponse(fn: string, res: express.Response) {
+    const w = {
+        do404(this: any) {
+            if (!this.headerWritten) {
+                res.writeHead(404);
+                this.headerWritten = true;
+            }
+            this.end();
+        },
+        writeHeader(this: any, binary: boolean) {
+            if (this.headerWritten) return;
+            res.writeHead(200, {
+                'Content-Type': binary ? 'application/octet-stream' : 'text/plain; charset=utf-8',
+                'Access-Control-Allow-Origin': '*',
+                'Access-Control-Allow-Headers': 'X-Requested-With',
+                'Content-Disposition': `inline; filename="${fn}"`
+            });
+            this.headerWritten = true;
+        },
+        writeBinary(this: any, data: Uint8Array) {
+            if (!this.headerWritten) this.writeHeader(true);
+            return res.write(new Buffer(data.buffer));
+        },
+        writeString(this: any, data: string) {
+            if (!this.headerWritten) this.writeHeader(false);
+            return res.write(data);
+        },
+        end(this: any) {
+            if (this.ended) return;
+            res.end();
+            this.ended = true;
+        },
+        ended: false,
+        headerWritten: false
+    };
+
+    return w;
+}
+
+function getSourceInfo(req: express.Request) {
+    return {
+        filename: mapFile(req.params.source, req.params.id),
+        id: `${req.params.source}/${req.params.id}`
+    };
+}
+
+function validateSourndAndId(req: express.Request, res: express.Response) {
+    if (!req.params.source || req.params.source.length > 32 || !req.params.id || req.params.source.id > 32) {
+        res.writeHead(404);
+        res.end();
+        Logger.errorPlain(`Query Box`, 'Invalid source and/or id');
+        return true;
+    }
+    return false;
+}
+
+async function getHeader(req: express.Request, res: express.Response) {
+    if (validateSourndAndId(req, res)) {
+        return;
+    }
+
+    let headerWritten = false;
+
+    try {
+        const { filename, id } = getSourceInfo(req);
+        const header = await Api.getHeaderJson(filename, id);
+        if (!header) {
+            res.writeHead(404);
+            return;
+        }
+        res.writeHead(200, {
+            'Content-Type': 'application/json; charset=utf-8',
+            'Access-Control-Allow-Origin': '*',
+            'Access-Control-Allow-Headers': 'X-Requested-With'
+        });
+        headerWritten = true;
+        res.write(header);
+    } catch (e) {
+        Logger.errorPlain(`Header ${req.params.source}/${req.params.id}`, e);
+        if (!headerWritten) {
+            res.writeHead(404);
+        }
+    } finally {
+        res.end();
+    }
+}
+
+function getQueryParams(req: express.Request, isCell: boolean): Data.QueryParams {
+    const a = [+req.params.a1, +req.params.a2, +req.params.a3];
+    const b = [+req.params.b1, +req.params.b2, +req.params.b3];
+
+    const detail = Math.min(Math.max(0, (+req.query.detail) | 0), ServerConfig.limits.maxOutputSizeInVoxelCountByPrecisionLevel.length - 1)
+    const isCartesian = (req.query.space || '').toLowerCase() !== 'fractional';
+
+    const box: Data.QueryParamsBox = isCell
+        ? { kind: 'Cell' }
+        : (isCartesian
+            ? { kind: 'Cartesian', a: Coords.cartesian(a[0], a[1], a[2]), b: Coords.cartesian(b[0], b[1], b[2]) }
+            : { kind: 'Fractional', a: Coords.fractional(a[0], a[1], a[2]), b: Coords.fractional(b[0], b[1], b[2]) });
+
+    const asBinary = (req.query.encoding || '').toLowerCase() !== 'cif';
+    const sourceFilename = mapFile(req.params.source, req.params.id)!;
+
+    return {
+        sourceFilename,
+        sourceId: `${req.params.source}/${req.params.id}`,
+        asBinary,
+        box,
+        detail
+    };
+}
+
+async function queryBox(req: express.Request, res: express.Response, params: Data.QueryParams) {
+    if (validateSourndAndId(req, res)) {
+        return;
+    }
+
+    const outputFilename = Api.getOutputFilename(req.params.source, req.params.id, params);
+    const response = wrapResponse(outputFilename, res);
+
+    try {
+        if (!params.sourceFilename) {
+            response.do404();
+            return;
+        }
+
+        let ok = await Api.queryBox(params, () => response);
+        if (!ok) {
+            response.do404();
+            return;
+        }
+    } catch (e) {
+        Logger.errorPlain(`Query Box ${JSON.stringify(req.params || {})} | ${JSON.stringify(req.query || {})}`, e);
+        response.do404();
+    } finally {
+        response.end();
+        queryDone();
+    }
+}
+
+function queryDone() {
+    if (State.shutdownOnZeroPending) {
+        process.exit(0);
+    }
+}