Browse Source

Merge pull request #393 from molstar/zenodo-import

Zenodo import
Alexander Rose 3 years ago
parent
commit
0cb2e5857a
37 changed files with 3097 additions and 165 deletions
  1. 8 0
      CHANGELOG.md
  2. 8 5
      src/apps/viewer/app.ts
  3. 30 0
      src/extensions/zenodo/index.ts
  4. 302 0
      src/extensions/zenodo/ui.tsx
  5. 453 0
      src/mol-io/common/io-buffer.ts
  6. 527 0
      src/mol-io/common/netcdf/reader.ts
  7. 89 0
      src/mol-io/reader/nctraj/parser.ts
  8. 176 0
      src/mol-io/reader/prmtop/parser.ts
  9. 303 0
      src/mol-io/reader/top/parser.ts
  10. 157 0
      src/mol-io/reader/trr/parser.ts
  11. 1 1
      src/mol-model-formats/structure/cif-core.ts
  12. 4 1
      src/mol-model-formats/structure/gro.ts
  13. 1 1
      src/mol-model-formats/structure/mol2.ts
  14. 52 0
      src/mol-model-formats/structure/nctraj.ts
  15. 174 0
      src/mol-model-formats/structure/prmtop.ts
  16. 13 2
      src/mol-model-formats/structure/property/symmetry.ts
  17. 4 1
      src/mol-model-formats/structure/psf.ts
  18. 226 0
      src/mol-model-formats/structure/top.ts
  19. 39 0
      src/mol-model-formats/structure/trr.ts
  20. 20 21
      src/mol-model-formats/structure/util.ts
  21. 52 34
      src/mol-plugin-state/actions/file.ts
  22. 128 3
      src/mol-plugin-state/actions/structure.ts
  23. 86 0
      src/mol-plugin-state/formats/coordinates.ts
  24. 9 7
      src/mol-plugin-state/formats/registry.ts
  25. 0 64
      src/mol-plugin-state/formats/structure.ts
  26. 78 0
      src/mol-plugin-state/formats/topology.ts
  27. 3 3
      src/mol-plugin-state/manager/snapshots.ts
  28. 5 1
      src/mol-plugin-state/objects.ts
  29. 36 0
      src/mol-plugin-state/transforms/data.ts
  30. 77 3
      src/mol-plugin-state/transforms/model.ts
  31. 17 1
      src/mol-plugin-ui/left-panel.tsx
  32. 2 1
      src/mol-plugin/context.ts
  33. 1 1
      src/mol-plugin/spec.ts
  34. 3 1
      src/mol-util/data-source.ts
  35. 2 13
      src/mol-util/file-info.ts
  36. 6 0
      src/mol-util/index.ts
  37. 5 1
      src/mol-util/zip/zip.ts

+ 8 - 0
CHANGELOG.md

@@ -9,6 +9,14 @@ Note that since we don't clearly distinguish between a public and private interf
 - Fix handling of mmcif with empty ``label_*`` fields
 - Improve saccharide detection (compare against list from CCD)
 - Fix legend label of hydrophobicity color theme
+- Add ``LoadTrajectory`` action
+- Add ``CustomImportControls`` to left panel
+- Add Zenodo import extension (load structures, trajectories, volumes, and zip files)
+- Fix loading of some compressed files within sessions
+- Fix wrong element assignment for atoms with Charmm ion names
+- Fix handling of empty symmetry cell data
+- Add support for ``trr`` and ``nctraj`` coordinates files
+- Add support for ``prmtop`` and ``top`` topology files
 
 ## [v3.3.1] - 2022-02-27
 

+ 8 - 5
src/apps/viewer/app.ts

@@ -17,13 +17,15 @@ import { ModelExport } from '../../extensions/model-export';
 import { Mp4Export } from '../../extensions/mp4-export';
 import { PDBeStructureQualityReport } from '../../extensions/pdbe';
 import { RCSBAssemblySymmetry, RCSBValidationReport } from '../../extensions/rcsb';
+import { ZenodoImport } from '../../extensions/zenodo';
 import { Volume } from '../../mol-model/volume';
 import { DownloadStructure, PdbDownloadProvider } from '../../mol-plugin-state/actions/structure';
 import { DownloadDensity } from '../../mol-plugin-state/actions/volume';
 import { PresetTrajectoryHierarchy } from '../../mol-plugin-state/builder/structure/hierarchy-preset';
 import { PresetStructureRepresentations, StructureRepresentationPresetProvider } from '../../mol-plugin-state/builder/structure/representation-preset';
 import { DataFormatProvider } from '../../mol-plugin-state/formats/provider';
-import { BuildInStructureFormat } from '../../mol-plugin-state/formats/structure';
+import { BuiltInTopologyFormat } from '../../mol-plugin-state/formats/topology';
+import { BuiltInCoordinatesFormat } from '../../mol-plugin-state/formats/coordinates';
 import { BuiltInTrajectoryFormat } from '../../mol-plugin-state/formats/trajectory';
 import { BuildInVolumeFormat } from '../../mol-plugin-state/formats/volume';
 import { createVolumeRepresentationParams } from '../../mol-plugin-state/helpers/volume-representation-params';
@@ -63,6 +65,7 @@ const Extensions = {
     'mp4-export': PluginSpec.Behavior(Mp4Export),
     'geo-export': PluginSpec.Behavior(GeometryExport),
     'ma-quality-assessment': PluginSpec.Behavior(MAQualityAssessment),
+    'zenodo-import': PluginSpec.Behavior(ZenodoImport),
 };
 
 const DefaultViewerOptions = {
@@ -453,11 +456,11 @@ export interface VolumeIsovalueInfo {
 export interface LoadTrajectoryParams {
     model: { kind: 'model-url', url: string, format?: BuiltInTrajectoryFormat /* mmcif */, isBinary?: boolean }
     | { kind: 'model-data', data: string | number[] | ArrayBuffer | Uint8Array, format?: BuiltInTrajectoryFormat /* mmcif */ }
-    | { kind: 'topology-url', url: string, format: BuildInStructureFormat, isBinary?: boolean }
-    | { kind: 'topology-data', data: string | number[] | ArrayBuffer | Uint8Array, format: BuildInStructureFormat },
+    | { kind: 'topology-url', url: string, format: BuiltInTopologyFormat, isBinary?: boolean }
+    | { kind: 'topology-data', data: string | number[] | ArrayBuffer | Uint8Array, format: BuiltInTopologyFormat },
     modelLabel?: string,
-    coordinates: { kind: 'coordinates-url', url: string, format: BuildInStructureFormat, isBinary?: boolean }
-    | { kind: 'coordinates-data', data: string | number[] | ArrayBuffer | Uint8Array, format: BuildInStructureFormat },
+    coordinates: { kind: 'coordinates-url', url: string, format: BuiltInCoordinatesFormat, isBinary?: boolean }
+    | { kind: 'coordinates-data', data: string | number[] | ArrayBuffer | Uint8Array, format: BuiltInCoordinatesFormat },
     coordinatesLabel?: string,
     preset?: keyof PresetTrajectoryHierarchy
 }

+ 30 - 0
src/extensions/zenodo/index.ts

@@ -0,0 +1,30 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { PluginBehavior } from '../../mol-plugin/behavior/behavior';
+import { ZenodoImportUI } from './ui';
+
+export const ZenodoImport = PluginBehavior.create<{ }>({
+    name: 'extension-zenodo-import',
+    category: 'misc',
+    display: {
+        name: 'Zenodo Export'
+    },
+    ctor: class extends PluginBehavior.Handler<{ }> {
+        register(): void {
+            this.ctx.customImportControls.set('zenodo-import', ZenodoImportUI as any);
+        }
+
+        update() {
+            return false;
+        }
+
+        unregister() {
+            this.ctx.customImportControls.delete('zenodo-import');
+        }
+    },
+    params: () => ({ })
+});

+ 302 - 0
src/extensions/zenodo/ui.tsx

@@ -0,0 +1,302 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { DownloadFile } from '../../mol-plugin-state/actions/file';
+import { DownloadStructure, LoadTrajectory } from '../../mol-plugin-state/actions/structure';
+import { DownloadDensity } from '../../mol-plugin-state/actions/volume';
+import { CoordinatesFormatCategory } from '../../mol-plugin-state/formats/coordinates';
+import { TopologyFormatCategory } from '../../mol-plugin-state/formats/topology';
+import { TrajectoryFormatCategory } from '../../mol-plugin-state/formats/trajectory';
+import { VolumeFormatCategory } from '../../mol-plugin-state/formats/volume';
+import { CollapsableControls, CollapsableState } from '../../mol-plugin-ui/base';
+import { Button } from '../../mol-plugin-ui/controls/common';
+import { OpenInBrowserSvg } from '../../mol-plugin-ui/controls/icons';
+import { ParameterControls } from '../../mol-plugin-ui/controls/parameters';
+import { PluginContext } from '../../mol-plugin/context';
+import { formatBytes } from '../../mol-util';
+import { ParamDefinition as PD } from '../../mol-util/param-definition';
+
+type ZenodoFile = {
+    bucket: string
+    checksum: string
+    key: string
+    links: {
+        [key: string]: string
+        self: string
+    }
+    size: number
+    type: string
+}
+
+type ZenodoRecord = {
+    id: number
+    conceptdoi: string
+    conceptrecid: string
+    created: string
+    doi: string
+    files: ZenodoFile[]
+    revision: number
+    updated: string
+    metadata: {
+        title: string
+    }
+}
+
+interface State {
+    busy?: boolean
+    recordValues: PD.Values<typeof ZenodoImportParams>
+    importValues?: PD.Values<ImportParams>
+    importParams?: ImportParams
+    record?: ZenodoRecord
+    files?: ZenodoFile[]
+}
+
+const ZenodoImportParams = {
+    record: PD.Text('', { description: 'Zenodo ID.' })
+};
+
+function createImportParams(files: ZenodoFile[], plugin: PluginContext) {
+    const modelOpts: [string, string][] = [];
+    const topologyOpts: [string, string][] = [];
+    const coordinatesOpts: [string, string][] = [];
+    const volumeOpts: [string, string][] = [];
+    const compressedOpts: [string, string][] = [];
+
+    const structureExts = new Map<string, { format: string, isBinary: boolean }>();
+    const coordinatesExts = new Map<string, { format: string, isBinary: boolean }>();
+    const topologyExts = new Map<string, { format: string, isBinary: boolean }>();
+    const volumeExts = new Map<string, { format: string, isBinary: boolean }>();
+
+    for (const { provider: { category, binaryExtensions, stringExtensions }, name } of plugin.dataFormats.list) {
+        if (category === TrajectoryFormatCategory) {
+            if (binaryExtensions) for (const e of binaryExtensions) structureExts.set(e, { format: name, isBinary: true });
+            if (stringExtensions) for (const e of stringExtensions) structureExts.set(e, { format: name, isBinary: false });
+        } else if (category === VolumeFormatCategory) {
+            if (binaryExtensions) for (const e of binaryExtensions) volumeExts.set(e, { format: name, isBinary: true });
+            if (stringExtensions) for (const e of stringExtensions) volumeExts.set(e, { format: name, isBinary: false });
+        } else if (category === CoordinatesFormatCategory) {
+            if (binaryExtensions) for (const e of binaryExtensions) coordinatesExts.set(e, { format: name, isBinary: true });
+            if (stringExtensions) for (const e of stringExtensions) coordinatesExts.set(e, { format: name, isBinary: false });
+        } else if (category === TopologyFormatCategory) {
+            if (binaryExtensions) for (const e of binaryExtensions) topologyExts.set(e, { format: name, isBinary: true });
+            if (stringExtensions) for (const e of stringExtensions) topologyExts.set(e, { format: name, isBinary: false });
+        }
+    }
+
+    for (const file of files) {
+        const label = `${file.key} (${formatBytes(file.size)})`;
+        if (structureExts.has(file.type)) {
+            const { format, isBinary } = structureExts.get(file.type)!;
+            modelOpts.push([`${file.links.self}|${format}|${isBinary}`, label]);
+            topologyOpts.push([`${file.links.self}|${format}|${isBinary}`, label]);
+        } else if (volumeExts.has(file.type)) {
+            const { format, isBinary } = volumeExts.get(file.type)!;
+            volumeOpts.push([`${file.links.self}|${format}|${isBinary}`, label]);
+        } else if (topologyExts.has(file.type)) {
+            const { format, isBinary } = topologyExts.get(file.type)!;
+            topologyOpts.push([`${file.links.self}|${format}|${isBinary}`, label]);
+        } else if (coordinatesExts.has(file.type)) {
+            const { format, isBinary } = coordinatesExts.get(file.type)!;
+            coordinatesOpts.push([`${file.links.self}|${format}|${isBinary}`, label]);
+        } else if (file.type === 'zip') {
+            compressedOpts.push([`${file.links.self}|${file.type}|true`, label]);
+        }
+    }
+
+    const params: PD.Params = {};
+    let defaultType = '';
+
+    if (modelOpts.length) {
+        defaultType = 'structure';
+        params.structure = PD.Select(modelOpts[0][0], modelOpts);
+    }
+
+    if (topologyOpts.length && coordinatesOpts.length) {
+        if (!defaultType) defaultType = 'trajectory';
+        params.trajectory = PD.Group({
+            topology: PD.Select(topologyOpts[0][0], topologyOpts),
+            coordinates: PD.Select(coordinatesOpts[0][0], coordinatesOpts),
+        }, { isFlat: true });
+    }
+
+    if (volumeOpts.length) {
+        if (!defaultType) defaultType = 'volume';
+        params.volume = PD.Select(volumeOpts[0][0], volumeOpts);
+    }
+
+    if (compressedOpts.length) {
+        if (!defaultType) defaultType = 'compressed';
+        params.compressed = PD.Select(compressedOpts[0][0], compressedOpts);
+    }
+
+    return {
+        type: PD.MappedStatic(defaultType, Object.keys(params).length ? params : { '': PD.EmptyGroup() })
+    };
+}
+type ImportParams = ReturnType<typeof createImportParams>
+
+export class ZenodoImportUI extends CollapsableControls<{}, State> {
+    protected defaultState(): State & CollapsableState {
+        return {
+            header: 'Zenodo Import',
+            isCollapsed: true,
+            brand: { accent: 'cyan', svg: OpenInBrowserSvg },
+            recordValues: PD.getDefaultValues(ZenodoImportParams),
+            importValues: undefined,
+            importParams: undefined,
+            record: undefined,
+            files: undefined,
+        };
+    }
+
+    private recordParamsOnChange = (values: any) => {
+        this.setState({ recordValues: values });
+    };
+
+    private importParamsOnChange = (values: any) => {
+        this.setState({ importValues: values });
+    };
+
+    private loadRecord = async () => {
+        try {
+            this.setState({ busy: true });
+            const record: ZenodoRecord = await this.plugin.runTask(this.plugin.fetch({ url: `https://zenodo.org/api/records/${this.state.recordValues.record}`, type: 'json' }));
+            const importParams = createImportParams(record.files, this.plugin);
+            this.setState({
+                record,
+                files: record.files,
+                busy: false,
+                importValues: PD.getDefaultValues(importParams),
+                importParams
+            });
+        } catch (e) {
+            console.error(e);
+            this.plugin.log.error(`Failed to load Zenodo record '${this.state.recordValues.record}'`);
+            this.setState({ busy: false });
+        }
+    };
+
+    private loadFile = async (values: PD.Values<ImportParams>) => {
+        try {
+            this.setState({ busy: true });
+
+            const t = values.type;
+            if (t.name === 'structure') {
+                const defaultParams = DownloadStructure.createDefaultParams(this.plugin.state.data.root.obj!, this.plugin);
+
+                const [url, format, isBinary] = t.params.split('|');
+
+                await this.plugin.runTask(this.plugin.state.data.applyAction(DownloadStructure, {
+                    source: {
+                        name: 'url',
+                        params: {
+                            url,
+                            format: format as any,
+                            isBinary: isBinary === 'true',
+                            options: defaultParams.source.params.options,
+                        }
+                    }
+                }));
+            } else if (t.name === 'trajectory') {
+                const [topologyUrl, topologyFormat, topologyIsBinary] = t.params.topology.split('|');
+                const [coordinatesUrl, coordinatesFormat, coordinatesIsBinary] = t.params.coordinates.split('|');
+
+                await this.plugin.runTask(this.plugin.state.data.applyAction(LoadTrajectory, {
+                    source: {
+                        name: 'url',
+                        params: {
+                            model: {
+                                url: topologyUrl,
+                                format: topologyFormat as any,
+                                isBinary: topologyIsBinary === 'true',
+                            },
+                            coordinates: {
+                                url: coordinatesUrl,
+                                format: coordinatesFormat as any,
+                                isBinary: coordinatesIsBinary === 'true',
+                            },
+                        }
+                    }
+                }));
+            } else if (t.name === 'volume') {
+                const [url, format, isBinary] = t.params.split('|');
+
+                await this.plugin.runTask(this.plugin.state.data.applyAction(DownloadDensity, {
+                    source: {
+                        name: 'url',
+                        params: {
+                            url,
+                            format: format as any,
+                            isBinary: isBinary === 'true',
+                        }
+                    }
+                }));
+            } else if (t.name === 'compressed') {
+                const [url, format, isBinary] = t.params.split('|');
+
+                await this.plugin.runTask(this.plugin.state.data.applyAction(DownloadFile, {
+                    url,
+                    format: format as any,
+                    isBinary: isBinary === 'true',
+                    visuals: true
+                }));
+            }
+        } catch (e) {
+            console.error(e);
+            this.plugin.log.error(`Failed to load Zenodo file`);
+        } finally {
+            this.setState({ busy: false });
+        }
+    };
+
+    private clearRecord = () => {
+        this.setState({
+            importValues: undefined,
+            importParams: undefined,
+            record: undefined,
+            files: undefined
+        });
+    };
+
+    private renderLoadRecord() {
+        return <div style={{ marginBottom: 10 }}>
+            <ParameterControls params={ZenodoImportParams} values={this.state.recordValues} onChangeValues={this.recordParamsOnChange} isDisabled={this.state.busy} />
+            <Button onClick={this.loadRecord} style={{ marginTop: 1 }} disabled={this.state.busy || !this.state.recordValues.record}>
+                Load Record
+            </Button>
+        </div>;
+    }
+
+    private renderRecordInfo(record: ZenodoRecord) {
+        return <div style={{ marginBottom: 10 }}>
+            <div className='msp-help-text'>
+                <div>Record {`${record.id}`}: <i>{`${record.metadata.title}`}</i></div>
+            </div>
+            <Button onClick={this.clearRecord} style={{ marginTop: 1 }} disabled={this.state.busy}>
+                Clear
+            </Button>
+        </div>;
+    }
+
+    private renderImportFile(params: ImportParams, values: PD.Values<ImportParams>) {
+        return values.type.name ? <div style={{ marginBottom: 10 }}>
+            <ParameterControls params={params} values={this.state.importValues} onChangeValues={this.importParamsOnChange} isDisabled={this.state.busy} />
+            <Button onClick={() => this.loadFile(values)} style={{ marginTop: 1 }} disabled={this.state.busy}>
+                Import File
+            </Button>
+        </div> : <div className='msp-help-text' style={{ marginBottom: 10 }}>
+            <div>No supported files</div>
+        </div>;
+    }
+
+    protected renderControls(): JSX.Element | null {
+        return <>
+            {!this.state.record ? this.renderLoadRecord() : null}
+            {this.state.record ? this.renderRecordInfo(this.state.record) : null}
+            {this.state.importParams && this.state.importValues ? this.renderImportFile(this.state.importParams, this.state.importValues) : null}
+        </>;
+    }
+}

+ 453 - 0
src/mol-io/common/io-buffer.ts

@@ -0,0 +1,453 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Adapted and converted to TypeScript from https://github.com/image-js/iobuffer
+ * MIT License, Copyright (c) 2015 Michaël Zasso
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { TypedArray } from '../../mol-util/type-helpers';
+
+const defaultByteLength = 1024 * 8;
+const charArray: string[] = [];
+
+export interface IOBufferParameters {
+    offset?: number // Ignore the first n bytes of the ArrayBuffer
+}
+
+/**
+ * Class for writing and reading binary data
+ */
+export class IOBuffer {
+    private _lastWrittenByte: number;
+    private _mark = 0;
+    private _marks: number[] = [];
+    private _data: DataView;
+
+    offset = 0; // The current offset of the buffer's pointer
+    littleEndian = true;
+    buffer: ArrayBuffer; // Reference to the internal ArrayBuffer object
+    length: number; // Byte length of the internal ArrayBuffer
+    byteLength: number; // Byte length of the internal ArrayBuffer
+    byteOffset: number; // Byte offset of the internal ArrayBuffer
+
+    /**
+     * If it's a number, it will initialize the buffer with the number as
+     * the buffer's length. If it's undefined, it will initialize the buffer
+     * with a default length of 8 Kb. If its an ArrayBuffer, a TypedArray,
+     * it will create a view over the underlying ArrayBuffer.
+     */
+    constructor(data: number | ArrayBuffer | TypedArray, params: IOBufferParameters = {}) {
+        let dataIsGiven = false;
+        if (data === undefined) {
+            data = defaultByteLength;
+        }
+        if (typeof data === 'number') {
+            data = new ArrayBuffer(data);
+        } else {
+            dataIsGiven = true;
+        }
+
+        const offset = params.offset ? params.offset >>> 0 : 0;
+        const byteLength = data.byteLength - offset;
+        let dvOffset = offset;
+        if (!(data instanceof ArrayBuffer)) {
+            if (data.byteLength !== data.buffer.byteLength) {
+                dvOffset = data.byteOffset + offset;
+            }
+            data = data.buffer;
+        }
+        if (dataIsGiven) {
+            this._lastWrittenByte = byteLength;
+        } else {
+            this._lastWrittenByte = 0;
+        }
+
+        this.buffer = data;
+        this.length = byteLength;
+        this.byteLength = byteLength;
+        this.byteOffset = dvOffset;
+
+        this._data = new DataView(this.buffer, dvOffset, byteLength);
+    }
+
+    /**
+     * Checks if the memory allocated to the buffer is sufficient to store more bytes after the offset
+     * @param byteLength The needed memory in bytes
+     */
+    available(byteLength: number = 1) {
+        return (this.offset + byteLength) <= this.length;
+    }
+
+    /**
+     * Check if little-endian mode is used for reading and writing multi-byte values
+     * Returns true if little-endian mode is used, false otherwise
+     */
+    isLittleEndian() {
+        return this.littleEndian;
+    }
+
+    /**
+     * Set little-endian mode for reading and writing multi-byte values
+     */
+    setLittleEndian() {
+        this.littleEndian = true;
+        return this;
+    }
+
+    /**
+     * Check if big-endian mode is used for reading and writing multi-byte values
+     * Returns true if big-endian mode is used, false otherwise
+     */
+    isBigEndian() {
+        return !this.littleEndian;
+    }
+
+    /**
+     * Switches to big-endian mode for reading and writing multi-byte values
+     */
+    setBigEndian() {
+        this.littleEndian = false;
+        return this;
+    }
+
+    /**
+     * Move the pointer n bytes forward
+     */
+    skip(n: number) {
+        if (n === undefined) n = 1;
+        this.offset += n;
+        return this;
+    }
+
+    /**
+     * Move the pointer to the given offset
+     */
+    seek(offset: number) {
+        this.offset = offset;
+        return this;
+    }
+
+    /**
+     * Store the current pointer offset.
+     */
+    mark() {
+        this._mark = this.offset;
+        return this;
+    }
+
+    /**
+     * Move the pointer back to the last pointer offset set by mark
+     */
+    reset() {
+        this.offset = this._mark;
+        return this;
+    }
+
+    /**
+     * Push the current pointer offset to the mark stack
+     */
+    pushMark() {
+        this._marks.push(this.offset);
+        return this;
+    }
+
+    /**
+     * Pop the last pointer offset from the mark stack, and set the current pointer offset to the popped value
+     */
+    popMark() {
+        const offset = this._marks.pop();
+        if (offset === undefined) throw new Error('Mark stack empty');
+        this.seek(offset);
+        return this;
+    }
+
+    /**
+     * Move the pointer offset back to 0
+     */
+    rewind() {
+        this.offset = 0;
+        return this;
+    }
+
+    /**
+     * Make sure the buffer has sufficient memory to write a given byteLength at the current pointer offset
+     * If the buffer's memory is insufficient, this method will create a new buffer (a copy) with a length
+     * that is twice (byteLength + current offset)
+     */
+    ensureAvailable(byteLength: number) {
+        if (byteLength === undefined) byteLength = 1;
+        if (!this.available(byteLength)) {
+            const lengthNeeded = this.offset + byteLength;
+            const newLength = lengthNeeded * 2;
+            const newArray = new Uint8Array(newLength);
+            newArray.set(new Uint8Array(this.buffer));
+            this.buffer = newArray.buffer;
+            this.length = this.byteLength = newLength;
+            this._data = new DataView(this.buffer);
+        }
+        return this;
+    }
+
+    /**
+     * Read a byte and return false if the byte's value is 0, or true otherwise
+     * Moves pointer forward
+     */
+    readBoolean() {
+        return this.readUint8() !== 0;
+    }
+
+    /**
+     * Read a signed 8-bit integer and move pointer forward
+     */
+    readInt8() {
+        return this._data.getInt8(this.offset++);
+    }
+
+    /**
+     * Read an unsigned 8-bit integer and move pointer forward
+     */
+    readUint8() {
+        return this._data.getUint8(this.offset++);
+    }
+
+    /**
+     * Alias for {@link IOBuffer#readUint8}
+     */
+    readByte() {
+        return this.readUint8();
+    }
+
+    /**
+     * Read n bytes and move pointer forward.
+     */
+    readBytes(n: number) {
+        if (n === undefined) n = 1;
+        const bytes = new Uint8Array(n);
+        for (let i = 0; i < n; i++) {
+            bytes[i] = this.readByte();
+        }
+        return bytes;
+    }
+
+    /**
+     * Read a 16-bit signed integer and move pointer forward
+     */
+    readInt16() {
+        const value = this._data.getInt16(this.offset, this.littleEndian);
+        this.offset += 2;
+        return value;
+    }
+
+    /**
+     * Read a 16-bit unsigned integer and move pointer forward
+     */
+    readUint16() {
+        const value = this._data.getUint16(this.offset, this.littleEndian);
+        this.offset += 2;
+        return value;
+    }
+
+    /**
+     * Read a 32-bit signed integer and move pointer forward
+     */
+    readInt32() {
+        const value = this._data.getInt32(this.offset, this.littleEndian);
+        this.offset += 4;
+        return value;
+    }
+
+    /**
+     * Read a 32-bit unsigned integer and move pointer forward
+     */
+    readUint32() {
+        const value = this._data.getUint32(this.offset, this.littleEndian);
+        this.offset += 4;
+        return value;
+    }
+
+    /**
+     * Read a 32-bit floating number and move pointer forward
+     */
+    readFloat32() {
+        const value = this._data.getFloat32(this.offset, this.littleEndian);
+        this.offset += 4;
+        return value;
+    }
+
+    /**
+     * Read a 64-bit floating number and move pointer forward
+     */
+    readFloat64() {
+        const value = this._data.getFloat64(this.offset, this.littleEndian);
+        this.offset += 8;
+        return value;
+    }
+
+    /**
+     * Read 1-byte ascii character and move pointer forward
+     */
+    readChar() {
+        return String.fromCharCode(this.readInt8());
+    }
+
+    /**
+     * Read n 1-byte ascii characters and move pointer forward
+     */
+    readChars(n = 1) {
+        charArray.length = n;
+        for (let i = 0; i < n; i++) {
+            charArray[i] = this.readChar();
+        }
+        return charArray.join('');
+    }
+
+    /**
+     * Write 0xff if the passed value is truthy, 0x00 otherwise
+     */
+    writeBoolean(value = false) {
+        this.writeUint8(value ? 0xff : 0x00);
+        return this;
+    }
+
+    /**
+     * Write value as an 8-bit signed integer
+     */
+    writeInt8(value: number) {
+        this.ensureAvailable(1);
+        this._data.setInt8(this.offset++, value);
+        this._updateLastWrittenByte();
+        return this;
+    }
+
+    /**
+     * Write value as a 8-bit unsigned integer
+     */
+    writeUint8(value: number) {
+        this.ensureAvailable(1);
+        this._data.setUint8(this.offset++, value);
+        this._updateLastWrittenByte();
+        return this;
+    }
+
+    /**
+     * An alias for IOBuffer#writeUint8
+     */
+    writeByte(value: number) {
+        return this.writeUint8(value);
+    }
+
+    /**
+     * Write bytes
+     */
+    writeBytes(bytes: number[] | Uint8Array) {
+        this.ensureAvailable(bytes.length);
+        for (let i = 0; i < bytes.length; i++) {
+            this._data.setUint8(this.offset++, bytes[i]);
+        }
+        this._updateLastWrittenByte();
+        return this;
+    }
+
+    /**
+     * Write value as an 16-bit signed integer
+     */
+    writeInt16(value: number) {
+        this.ensureAvailable(2);
+        this._data.setInt16(this.offset, value, this.littleEndian);
+        this.offset += 2;
+        this._updateLastWrittenByte();
+        return this;
+    }
+
+    /**
+     * Write value as a 16-bit unsigned integer
+     */
+    writeUint16(value: number) {
+        this.ensureAvailable(2);
+        this._data.setUint16(this.offset, value, this.littleEndian);
+        this.offset += 2;
+        this._updateLastWrittenByte();
+        return this;
+    }
+
+    /**
+     * Write a 32-bit signed integer at the current pointer offset
+     */
+    writeInt32(value: number) {
+        this.ensureAvailable(4);
+        this._data.setInt32(this.offset, value, this.littleEndian);
+        this.offset += 4;
+        this._updateLastWrittenByte();
+        return this;
+    }
+
+    /**
+     * Write a 32-bit unsigned integer at the current pointer offset
+     */
+    writeUint32(value: number) {
+        this.ensureAvailable(4);
+        this._data.setUint32(this.offset, value, this.littleEndian);
+        this.offset += 4;
+        this._updateLastWrittenByte();
+        return this;
+    }
+
+    /**
+     * Write a 32-bit floating number at the current pointer offset
+     */
+    writeFloat32(value: number) {
+        this.ensureAvailable(4);
+        this._data.setFloat32(this.offset, value, this.littleEndian);
+        this.offset += 4;
+        this._updateLastWrittenByte();
+        return this;
+    }
+
+    /**
+     * Write a 64-bit floating number at the current pointer offset
+     */
+    writeFloat64(value: number) {
+        this.ensureAvailable(8);
+        this._data.setFloat64(this.offset, value, this.littleEndian);
+        this.offset += 8;
+        this._updateLastWrittenByte();
+        return this;
+    }
+
+    /**
+     * Write the charCode of the passed string's first character to the current pointer offset
+     */
+    writeChar(str: string) {
+        return this.writeUint8(str.charCodeAt(0));
+    }
+
+    /**
+     * Write the charCodes of the passed string's characters to the current pointer offset
+     */
+    writeChars(str: string) {
+        for (let i = 0; i < str.length; i++) {
+            this.writeUint8(str.charCodeAt(i));
+        }
+        return this;
+    }
+
+    /**
+     * Export a Uint8Array view of the internal buffer.
+     * The view starts at the byte offset and its length
+     * is calculated to stop at the last written byte or the original length.
+     */
+    toArray() {
+        return new Uint8Array(this.buffer, this.byteOffset, this._lastWrittenByte);
+    }
+
+    /**
+     * Update the last written byte offset
+     */
+    private _updateLastWrittenByte() {
+        if (this.offset > this._lastWrittenByte) {
+            this._lastWrittenByte = this.offset;
+        }
+    }
+}

+ 527 - 0
src/mol-io/common/netcdf/reader.ts

@@ -0,0 +1,527 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Adapted from https://github.com/cheminfo-js/netcdfjs
+ * MIT License, Copyright (c) 2016 cheminfo
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+
+import { IOBuffer } from '../io-buffer';
+
+export interface NetCDFRecordDimension {
+    length: number,
+    id?: number,
+    name?: string,
+    recordStep?: number
+}
+
+export interface NetCDFVariable {
+    name: string
+    dimensions: any[]
+    attributes: any[]
+    type: string
+    size: number
+    offset: number
+    record: boolean
+}
+
+export interface NetCDFHeader {
+    recordDimension: NetCDFRecordDimension,
+    version: number,
+    dimensions: { name: string, size: number }[],
+    globalAttributes: { name: string, type: string, value: string | number }[],
+    variables: NetCDFVariable[]
+}
+
+export interface NetCDFDimension {
+    name: string,
+    size: number
+}
+
+/**
+ * Throws a non-valid NetCDF exception if the statement it's true
+ */
+function notNetcdf(statement: boolean, reason: string) {
+    if (statement) {
+        throw new TypeError('Not a valid NetCDF v3.x file: ' + reason);
+    }
+}
+
+/**
+ * Moves 1, 2, or 3 bytes to next 4-byte boundary
+ */
+function padding(buffer: IOBuffer) {
+    if ((buffer.offset % 4) !== 0) {
+        buffer.skip(4 - (buffer.offset % 4));
+    }
+}
+
+/**
+ * Reads the name
+ */
+function readName(buffer: IOBuffer) {
+    // Read name
+    const nameLength = buffer.readUint32();
+    const name = buffer.readChars(nameLength);
+
+    // validate name
+    // TODO
+
+    // Apply padding
+    padding(buffer);
+    return name;
+}
+
+const types = {
+    BYTE: 1,
+    CHAR: 2,
+    SHORT: 3,
+    INT: 4,
+    FLOAT: 5,
+    DOUBLE: 6
+};
+
+/**
+ * Parse a number into their respective type
+ */
+function num2str(type: number) {
+    switch (Number(type)) {
+        case types.BYTE:
+            return 'byte';
+        case types.CHAR:
+            return 'char';
+        case types.SHORT:
+            return 'short';
+        case types.INT:
+            return 'int';
+        case types.FLOAT:
+            return 'float';
+        case types.DOUBLE:
+            return 'double';
+        default:
+            return 'undefined';
+    }
+}
+
+/**
+ * Parse a number type identifier to his size in bytes
+ */
+function num2bytes(type: number) {
+    switch (Number(type)) {
+        case types.BYTE:
+            return 1;
+        case types.CHAR:
+            return 1;
+        case types.SHORT:
+            return 2;
+        case types.INT:
+            return 4;
+        case types.FLOAT:
+            return 4;
+        case types.DOUBLE:
+            return 8;
+        default:
+            return -1;
+    }
+}
+
+/**
+ * Reverse search of num2str
+ */
+function str2num(type: string) {
+    switch (String(type)) {
+        case 'byte':
+            return types.BYTE;
+        case 'char':
+            return types.CHAR;
+        case 'short':
+            return types.SHORT;
+        case 'int':
+            return types.INT;
+        case 'float':
+            return types.FLOAT;
+        case 'double':
+            return types.DOUBLE;
+        default:
+            return -1;
+    }
+}
+
+/**
+ * Auxiliary function to read numeric data
+ */
+function readNumber(size: number, bufferReader: Function) {
+    if (size !== 1) {
+        const numbers = new Array(size);
+        for (let i = 0; i < size; i++) {
+            numbers[i] = bufferReader();
+        }
+        return numbers;
+    } else {
+        return bufferReader();
+    }
+}
+
+/**
+ * Given a type and a size reads the next element
+ */
+function readType(buffer: IOBuffer, type: number, size: number) {
+    switch (type) {
+        case types.BYTE:
+            return buffer.readBytes(size);
+        case types.CHAR:
+            return trimNull(buffer.readChars(size));
+        case types.SHORT:
+            return readNumber(size, buffer.readInt16.bind(buffer));
+        case types.INT:
+            return readNumber(size, buffer.readInt32.bind(buffer));
+        case types.FLOAT:
+            return readNumber(size, buffer.readFloat32.bind(buffer));
+        case types.DOUBLE:
+            return readNumber(size, buffer.readFloat64.bind(buffer));
+        default:
+            notNetcdf(true, 'non valid type ' + type);
+            return undefined;
+    }
+}
+
+/**
+ * Removes null terminate value
+ */
+function trimNull(value: string) {
+    if (value.charCodeAt(value.length - 1) === 0) {
+        return value.substring(0, value.length - 1);
+    }
+    return value;
+}
+
+// const STREAMING = 4294967295;
+
+/**
+ * Read data for the given non-record variable
+ */
+function nonRecord(buffer: IOBuffer, variable: { type: string, size: number }) {
+    // variable type
+    const type = str2num(variable.type);
+
+    // size of the data
+    const size = variable.size / num2bytes(type);
+
+    // iterates over the data
+    const data = new Array(size);
+    for (let i = 0; i < size; i++) {
+        data[i] = readType(buffer, type, 1);
+    }
+
+    return data;
+}
+
+/**
+ * Read data for the given record variable
+ */
+function record(buffer: IOBuffer, variable: { type: string, size: number }, recordDimension: NetCDFRecordDimension) {
+    // variable type
+    const type = str2num(variable.type);
+    const width = variable.size ? variable.size / num2bytes(type) : 1;
+
+    // size of the data
+    // TODO streaming data
+    const size = recordDimension.length;
+
+    // iterates over the data
+    const data = new Array(size);
+    const step = recordDimension.recordStep;
+
+    for (let i = 0; i < size; i++) {
+        const currentOffset = buffer.offset;
+        data[i] = readType(buffer, type, width);
+        buffer.seek(currentOffset + step!);
+    }
+
+    return data;
+}
+
+// Grammar constants
+const ZERO = 0;
+const NC_DIMENSION = 10;
+const NC_VARIABLE = 11;
+const NC_ATTRIBUTE = 12;
+
+/**
+ * Read the header of the file
+ * Returns object with the fields:
+ *  - `recordDimension`: Number with the length of record dimension
+ *  - `dimensions`: List of dimensions
+ *  - `globalAttributes`: List of global attributes
+ *  - `variables`: List of variables
+ */
+function header(buffer: IOBuffer, version: number) {
+    // Length of record dimension
+    // sum of the varSize's of all the record variables.
+    const header: Partial<NetCDFHeader> = { recordDimension: { length: buffer.readUint32() } };
+
+    // Version
+    header.version = version;
+
+    // List of dimensions
+    const dimList = dimensionsList(buffer) as { dimensions: NetCDFDimension[], recordId: number, recordName: string };
+    header.recordDimension!.id = dimList.recordId;
+    header.recordDimension!.name = dimList.recordName;
+    header.dimensions = dimList.dimensions;
+
+    // List of global attributes
+    header.globalAttributes = attributesList(buffer);
+
+    // List of variables
+    const variables = variablesList(buffer, dimList.recordId, version) as { variables: any[], recordStep: number };
+    header.variables = variables.variables;
+    header.recordDimension!.recordStep = variables.recordStep;
+
+    return header;
+}
+
+/**
+ * List of dimensions
+ */
+function dimensionsList(buffer: IOBuffer) {
+    let dimensions: NetCDFDimension[], recordId, recordName;
+    const dimList = buffer.readUint32();
+    if (dimList === ZERO) {
+        notNetcdf((buffer.readUint32() !== ZERO), 'wrong empty tag for list of dimensions');
+        return [];
+    } else {
+        notNetcdf((dimList !== NC_DIMENSION), 'wrong tag for list of dimensions');
+
+        // Length of dimensions
+        const dimensionSize = buffer.readUint32();
+        dimensions = new Array(dimensionSize);
+        for (let dim = 0; dim < dimensionSize; dim++) {
+            // Read name
+            const name = readName(buffer);
+
+            // Read dimension size
+            const size = buffer.readUint32();
+            if (size === 0) {
+                recordId = dim;
+                recordName = name;
+            }
+
+            dimensions[dim] = {
+                name: name,
+                size: size
+            };
+        }
+        return {
+            dimensions: dimensions,
+            recordId: recordId,
+            recordName: recordName
+        };
+    }
+}
+
+/**
+ * List of attributes
+ */
+function attributesList(buffer: IOBuffer) {
+    let attributes: { name: string, type: ReturnType<typeof num2str>, value: any }[];
+    const gAttList = buffer.readUint32();
+    if (gAttList === ZERO) {
+        notNetcdf((buffer.readUint32() !== ZERO), 'wrong empty tag for list of attributes');
+        return [];
+    } else {
+        notNetcdf((gAttList !== NC_ATTRIBUTE), 'wrong tag for list of attributes');
+
+        // Length of attributes
+        const attributeSize = buffer.readUint32();
+        attributes = new Array(attributeSize);
+        for (let gAtt = 0; gAtt < attributeSize; gAtt++) {
+            // Read name
+            const name = readName(buffer);
+
+            // Read type
+            const type = buffer.readUint32();
+            notNetcdf(((type < 1) || (type > 6)), 'non valid type ' + type);
+
+            // Read attribute
+            const size = buffer.readUint32();
+            const value = readType(buffer, type, size);
+
+            // Apply padding
+            padding(buffer);
+
+            attributes[gAtt] = {
+                name: name,
+                type: num2str(type),
+                value: value
+            };
+        }
+    }
+    return attributes;
+}
+
+/**
+ * List of variables
+ */
+function variablesList(buffer: IOBuffer, recordId: number, version: number) {
+    const varList = buffer.readUint32();
+    let recordStep = 0;
+    let variables;
+    if (varList === ZERO) {
+        notNetcdf(
+            (buffer.readUint32() !== ZERO),
+            'wrong empty tag for list of variables'
+        );
+        return [];
+    } else {
+        notNetcdf((varList !== NC_VARIABLE), 'wrong tag for list of variables');
+
+        // Length of variables
+        const variableSize = buffer.readUint32();
+        variables = new Array(variableSize);
+        for (let v = 0; v < variableSize; v++) {
+            // Read name
+            const name = readName(buffer);
+
+            // Read dimensionality of the variable
+            const dimensionality = buffer.readUint32();
+
+            // Index into the list of dimensions
+            const dimensionsIds = new Array(dimensionality);
+            for (let dim = 0; dim < dimensionality; dim++) {
+                dimensionsIds[dim] = buffer.readUint32();
+            }
+
+            // Read variables size
+            const attributes = attributesList(buffer);
+
+            // Read type
+            const type = buffer.readUint32();
+            notNetcdf(((type < 1) && (type > 6)), 'non valid type ' + type);
+
+            // Read variable size
+            // The 32-bit varSize field is not large enough to contain the
+            // size of variables that require more than 2^32 - 4 bytes,
+            // so 2^32 - 1 is used in the varSize field for such variables.
+            const varSize = buffer.readUint32();
+
+            // Read offset
+            let offset = buffer.readUint32();
+            if (version === 2) {
+                notNetcdf((offset > 0), 'offsets larger than 4GB not supported');
+                offset = buffer.readUint32();
+            }
+
+            // Count amount of record variables
+            if (dimensionsIds[0] === recordId) {
+                recordStep += varSize;
+            }
+
+            variables[v] = {
+                name: name,
+                dimensions: dimensionsIds,
+                attributes: attributes,
+                type: num2str(type),
+                size: varSize,
+                offset: offset,
+                record: (dimensionsIds[0] === recordId)
+            };
+        }
+    }
+
+    return {
+        variables: variables,
+        recordStep: recordStep
+    };
+}
+
+/**
+ * Reads a NetCDF v3.x file
+ * https://www.unidata.ucar.edu/software/netcdf/docs/file_format_specifications.html
+ */
+export class NetcdfReader {
+    header: Partial<NetCDFHeader>;
+    buffer: IOBuffer;
+
+    constructor(data: ArrayBuffer) {
+        const buffer = new IOBuffer(data);
+        buffer.setBigEndian();
+
+        // Validate that it's a NetCDF file
+        notNetcdf((buffer.readChars(3) !== 'CDF'), 'should start with CDF');
+
+        // Check the NetCDF format
+        const version = buffer.readByte();
+        notNetcdf((version > 2), 'unknown version');
+
+        // Read the header
+        this.header = header(buffer, version);
+        this.buffer = buffer;
+    }
+
+    /**
+     * Version for the NetCDF format
+     */
+    get version() {
+        if (this.header.version === 1) {
+            return 'classic format';
+        } else {
+            return '64-bit offset format';
+        }
+    }
+
+    get recordDimension() {
+        return this.header.recordDimension;
+    }
+
+    get dimensions() {
+        return this.header.dimensions;
+    }
+
+    get globalAttributes() {
+        return this.header.globalAttributes;
+    }
+
+    get variables() {
+        return this.header.variables;
+    }
+
+    /**
+     * Checks if a variable is available
+     * @param {string|object} variableName - Name of the variable to check
+     * @return {Boolean} - Variable existence
+     */
+    hasDataVariable(variableName: string) {
+        return this.header.variables && this.header.variables.findIndex(val => val.name === variableName) !== -1;
+    }
+
+    /**
+     * Retrieves the data for a given variable
+     * @param {string|object} variableName - Name of the variable to search or variable object
+     * @return {Array} - List with the variable values
+     */
+    getDataVariable(variableName: string | NetCDFVariable) {
+        let variable: NetCDFVariable | undefined;
+        if (typeof variableName === 'string') {
+            // search the variable
+            variable = this.header.variables?.find((val) => val.name === variableName);
+        } else {
+            variable = variableName;
+        }
+
+        // throws if variable not found
+        if (variable === undefined) throw new Error('variable not found');
+
+        // go to the offset position
+        this.buffer.seek(variable.offset);
+
+        if (variable.record) {
+            // record variable case
+            return record(this.buffer, variable, this.header.recordDimension!);
+        } else {
+            // non-record variable case
+            return nonRecord(this.buffer, variable);
+        }
+    }
+}

+ 89 - 0
src/mol-io/reader/nctraj/parser.ts

@@ -0,0 +1,89 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { Task } from '../../../mol-task';
+import { Mutable } from '../../../mol-util/type-helpers';
+import { NetcdfReader } from '../../common/netcdf/reader';
+import { ReaderResult as Result } from '../result';
+
+export interface NctrajFile {
+    coordinates: number[][],
+    velocities?: number[][],
+    forces?: number[][],
+    cell_lengths?: number[][],
+    cell_angles?: number[][],
+    time?: number[],
+    timeOffset: number,
+    deltaTime: number
+}
+
+async function parseInternal(data: Uint8Array) {
+    // http://ambermd.org/netcdf/nctraj.xhtml
+
+    const nc = new NetcdfReader(data);
+
+    const f: Mutable<NctrajFile> = {
+        coordinates: [],
+        time: [],
+        timeOffset: 0,
+        deltaTime: 1
+    };
+
+    for (const c of nc.getDataVariable('coordinates')) f.coordinates.push(c);
+
+    if (nc.hasDataVariable('velocities')) {
+        const velocities: number[][] = [];
+        for (const v of nc.getDataVariable('velocities')) velocities.push(v);
+        f.velocities = velocities;
+    }
+
+    if (nc.hasDataVariable('forces')) {
+        const forces: number[][] = [];
+        for (const f of nc.getDataVariable('forces')) forces.push(f);
+        f.forces = forces;
+    }
+
+    if (nc.hasDataVariable('cell_lengths')) {
+        const cell_lengths: number[][] = [];
+        for (const l of nc.getDataVariable('cell_lengths')) cell_lengths.push(l);
+        f.cell_lengths = cell_lengths;
+    }
+
+    if (nc.hasDataVariable('cell_angles')) {
+        const cell_angles: number[][] = [];
+        for (const a of nc.getDataVariable('cell_angles')) cell_angles.push(a);
+        f.cell_angles = cell_angles;
+    }
+
+    if (nc.hasDataVariable('time')) {
+        const time: number[] = [];
+        for (const t of nc.getDataVariable('time')) time.push(t);
+        f.time = time;
+    }
+
+    if (f.time) {
+        if (f.time.length >= 1) {
+            f.timeOffset = f.time[0];
+        }
+        if (f.time.length >= 2) {
+            f.deltaTime = f.time[1] - f.time[0];
+        }
+    }
+
+    return f;
+}
+
+export function parseNctraj(data: Uint8Array) {
+    return Task.create<Result<NctrajFile>>('Parse NCTRAJ', async ctx => {
+        try {
+            ctx.update({ canAbort: true, message: 'Parsing trajectory...' });
+            const file = await parseInternal(data);
+            return Result.success(file);
+        } catch (e) {
+            return Result.error('' + e);
+        }
+    });
+}

+ 176 - 0
src/mol-io/reader/prmtop/parser.ts

@@ -0,0 +1,176 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { Task, RuntimeContext } from '../../../mol-task';
+import { Tokenizer, TokenBuilder, Tokens } from '../common/text/tokenizer';
+import { ReaderResult as Result } from '../result';
+import { TokenColumnProvider as TokenColumn } from '../common/text/column/token';
+import { Column } from '../../../mol-data/db';
+import { Mutable } from '../../../mol-util/type-helpers';
+
+// http://ambermd.org/prmtop.pdf
+// https://ambermd.org/FileFormats.php#topology
+
+const Pointers = {
+    'NATOM': '', 'NTYPES': '', 'NBONH': '', 'MBONA': '', 'NTHETH': '', 'MTHETA': '',
+    'NPHIH': '', 'MPHIA': '', 'NHPARM': '', 'NPARM': '', 'NNB': '', 'NRES': '',
+    'NBONA': '', 'NTHETA': '', 'NPHIA': '', 'NUMBND': '', 'NUMANG': '', 'NPTRA': '',
+    'NATYP': '', 'NPHB': '', 'IFPERT': '', 'NBPER': '', 'NGPER': '', 'NDPER': '',
+    'MBPER': '', 'MGPER': '', 'MDPER': '', 'IFBOX': '', 'NMXRS': '', 'IFCAP': '',
+    'NUMEXTRA': '', 'NCOPY': '',
+};
+type PointerName = keyof typeof Pointers;
+const PointersNames = Object.keys(Pointers) as PointerName[];
+
+export interface PrmtopFile {
+    readonly version: string
+    readonly title: ReadonlyArray<string>
+    readonly pointers: Readonly<Record<PointerName, number>>
+    readonly atomName: Column<string>
+    readonly charge: Column<number>
+    readonly mass: Column<number>
+    readonly residueLabel: Column<string>
+    readonly residuePointer: Column<number>
+    readonly bondsIncHydrogen: Column<number>
+    readonly bondsWithoutHydrogen: Column<number>
+    readonly radii: Column<number>
+}
+
+const { readLine, markLine, trim } = Tokenizer;
+
+function State(tokenizer: Tokenizer, runtimeCtx: RuntimeContext) {
+    return {
+        tokenizer,
+        runtimeCtx,
+    };
+}
+type State = ReturnType<typeof State>
+
+function handleTitle(state: State): string[] {
+    const { tokenizer } = state;
+    const title: string[] = [];
+
+    while (tokenizer.tokenEnd < tokenizer.length) {
+        if (tokenizer.data[tokenizer.position] === '%') break;
+        const line = readLine(tokenizer).trim();
+        if (line) title.push(line);
+    }
+
+    return title;
+}
+
+function handlePointers(state: State): Record<PointerName, number> {
+    const { tokenizer } = state;
+
+    const pointers: Record<PointerName, number> = Object.create(null);
+    PointersNames.forEach(name => { pointers[name] = 0; });
+
+    let curIdx = 0;
+    while (tokenizer.tokenEnd < tokenizer.length) {
+        if (tokenizer.data[tokenizer.position] === '%') break;
+        const line = readLine(tokenizer);
+
+        const n = Math.min(curIdx + 10, 32);
+        for (let i = 0; curIdx < n; ++i, ++curIdx) {
+            pointers[PointersNames[curIdx]] = parseInt(
+                line.substring(i * 8, i * 8 + 8).trim()
+            );
+        }
+    }
+
+    return pointers;
+}
+
+function handleTokens(state: State, count: number, countPerLine: number, itemSize: number): Tokens {
+    const { tokenizer } = state;
+
+    const tokens = TokenBuilder.create(tokenizer.data, count * 2);
+
+    let curIdx = 0;
+    while (tokenizer.tokenEnd < tokenizer.length) {
+        if (tokenizer.data[tokenizer.position] === '%') break;
+
+        tokenizer.tokenStart = tokenizer.position;
+        const n = Math.min(curIdx + countPerLine, count);
+        for (let i = 0; curIdx < n; ++i, ++curIdx) {
+            const p = tokenizer.position;
+            trim(tokenizer, tokenizer.position, tokenizer.position + itemSize);
+            TokenBuilder.addUnchecked(tokens, tokenizer.tokenStart, tokenizer.tokenEnd);
+            tokenizer.position = p + itemSize;
+        }
+
+        markLine(tokenizer);
+    }
+
+    return tokens;
+}
+
+async function parseInternal(data: string, ctx: RuntimeContext): Promise<Result<PrmtopFile>> {
+    const t = Tokenizer(data);
+    const state = State(t, ctx);
+
+    const result: Mutable<PrmtopFile> = Object.create(null);
+    let prevPosition = 0;
+
+    while (t.tokenEnd < t.length) {
+        if (t.position - prevPosition > 100000 && ctx.shouldUpdate) {
+            prevPosition = t.position;
+            await ctx.update({ current: t.position, max: t.length });
+        }
+
+        const line = readLine(state.tokenizer).trim();
+        if (line.startsWith('%VERSION')) {
+            result.version = line.substring(8).trim();
+        } else if (line.startsWith('%FLAG')) {
+            const flag = line.substring(5).trim();
+            const formatLine = readLine(state.tokenizer).trim();
+            if (!formatLine.startsWith('%FORMAT')) throw new Error('expected %FORMAT');
+
+            if (flag === 'TITLE') {
+                result.title = handleTitle(state);
+            } else if (flag === 'POINTERS') {
+                result.pointers = handlePointers(state);
+            } else if (flag === 'ATOM_NAME') {
+                const tokens = handleTokens(state, result.pointers['NATOM'], 20, 4);
+                result.atomName = TokenColumn(tokens)(Column.Schema.str);
+            } else if (flag === 'CHARGE') {
+                const tokens = handleTokens(state, result.pointers['NATOM'], 5, 16);
+                result.charge = TokenColumn(tokens)(Column.Schema.float);
+            } else if (flag === 'MASS') {
+                const tokens = handleTokens(state, result.pointers['NATOM'], 5, 16);
+                result.mass = TokenColumn(tokens)(Column.Schema.float);
+            } else if (flag === 'RESIDUE_LABEL') {
+                const tokens = handleTokens(state, result.pointers['NRES'], 20, 4);
+                result.residueLabel = TokenColumn(tokens)(Column.Schema.str);
+            } else if (flag === 'RESIDUE_POINTER') {
+                const tokens = handleTokens(state, result.pointers['NRES'], 10, 8);
+                result.residuePointer = TokenColumn(tokens)(Column.Schema.int);
+            } else if (flag === 'BONDS_INC_HYDROGEN') {
+                const tokens = handleTokens(state, result.pointers['NBONH'] * 3, 10, 8);
+                result.bondsIncHydrogen = TokenColumn(tokens)(Column.Schema.int);
+            } else if (flag === 'BONDS_WITHOUT_HYDROGEN') {
+                const tokens = handleTokens(state, result.pointers['NBONA'] * 3, 10, 8);
+                result.bondsWithoutHydrogen = TokenColumn(tokens)(Column.Schema.int);
+            } else if (flag === 'RADII') {
+                const tokens = handleTokens(state, result.pointers['NATOM'], 5, 16);
+                result.radii = TokenColumn(tokens)(Column.Schema.float);
+            } else {
+                while (t.tokenEnd < t.length) {
+                    if (t.data[t.position] === '%') break;
+                    markLine(t);
+                }
+            }
+        }
+    }
+
+    return Result.success(result);
+}
+
+export function parsePrmtop(data: string) {
+    return Task.create<Result<PrmtopFile>>('Parse PRMTOP', async ctx => {
+        return await parseInternal(data, ctx);
+    });
+}

+ 303 - 0
src/mol-io/reader/top/parser.ts

@@ -0,0 +1,303 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { Task, RuntimeContext } from '../../../mol-task';
+import { Tokenizer, TokenBuilder } from '../common/text/tokenizer';
+import { ReaderResult as Result } from '../result';
+import { TokenColumnProvider as TokenColumn } from '../common/text/column/token';
+import { Column, Table } from '../../../mol-data/db';
+import { Mutable } from '../../../mol-util/type-helpers';
+
+// https://manual.gromacs.org/2021-current/reference-manual/file-formats.html#top
+
+const AtomsSchema = {
+    nr: Column.Schema.Int(),
+    type: Column.Schema.Str(),
+    resnr: Column.Schema.Int(),
+    residu: Column.Schema.Str(),
+    atom: Column.Schema.Str(),
+    cgnr: Column.Schema.Int(),
+    charge: Column.Schema.Float(),
+    mass: Column.Schema.Float(),
+};
+
+const BondsSchema = {
+    ai: Column.Schema.Int(),
+    aj: Column.Schema.Int(),
+};
+
+const MoleculesSchema = {
+    compound: Column.Schema.Str(),
+    molCount: Column.Schema.Int(),
+};
+
+type Compound = {
+    atoms: Table<typeof AtomsSchema>
+    bonds?: Table<typeof BondsSchema>
+}
+
+export interface TopFile {
+    readonly system: string
+    readonly molecules: Table<typeof MoleculesSchema>
+    readonly compounds: Record<string, Compound>
+}
+
+const { readLine, markLine, skipWhitespace, markStart, eatValue, eatLine } = Tokenizer;
+
+function State(tokenizer: Tokenizer, runtimeCtx: RuntimeContext) {
+    return {
+        tokenizer,
+        runtimeCtx,
+    };
+}
+type State = ReturnType<typeof State>
+
+const reField = /\[ (.+) \]/;
+const reWhitespace = /\s+/;
+
+function handleMoleculetype(state: State) {
+    const { tokenizer } = state;
+
+    let molName: string | undefined = undefined;
+
+    while (tokenizer.tokenEnd < tokenizer.length) {
+        skipWhitespace(tokenizer);
+        const c = tokenizer.data[tokenizer.position];
+        if (c === '[') break;
+        if (c === ';' || c === '*') {
+            markLine(tokenizer);
+            continue;
+        }
+
+        if (molName !== undefined) throw new Error('more than one molName');
+
+        const line = readLine(tokenizer);
+        molName = line.split(reWhitespace)[0];
+    }
+
+    if (molName === undefined) throw new Error('missing molName');
+
+    return molName;
+}
+
+function handleAtoms(state: State) {
+    const { tokenizer } = state;
+
+    const nr = TokenBuilder.create(tokenizer.data, 64);
+    const type = TokenBuilder.create(tokenizer.data, 64);
+    const resnr = TokenBuilder.create(tokenizer.data, 64);
+    const residu = TokenBuilder.create(tokenizer.data, 64);
+    const atom = TokenBuilder.create(tokenizer.data, 64);
+    const cgnr = TokenBuilder.create(tokenizer.data, 64);
+    const charge = TokenBuilder.create(tokenizer.data, 64);
+    const mass = TokenBuilder.create(tokenizer.data, 64);
+
+    while (tokenizer.tokenEnd < tokenizer.length) {
+        skipWhitespace(tokenizer);
+        const c = tokenizer.data[tokenizer.position];
+        if (c === '[') break;
+        if (c === ';' || c === '*') {
+            markLine(tokenizer);
+            continue;
+        }
+
+        for (let j = 0; j < 8; ++j) {
+            skipWhitespace(tokenizer);
+            markStart(tokenizer);
+            eatValue(tokenizer);
+
+            switch (j) {
+                case 0: TokenBuilder.add(nr, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+                case 1: TokenBuilder.add(type, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+                case 2: TokenBuilder.add(resnr, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+                case 3: TokenBuilder.add(residu, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+                case 4: TokenBuilder.add(atom, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+                case 5: TokenBuilder.add(cgnr, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+                case 6: TokenBuilder.add(charge, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+                case 7: TokenBuilder.add(mass, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+            }
+        }
+        // ignore any extra columns
+        markLine(tokenizer);
+    }
+
+    return Table.ofColumns(AtomsSchema, {
+        nr: TokenColumn(nr)(Column.Schema.int),
+        type: TokenColumn(type)(Column.Schema.str),
+        resnr: TokenColumn(resnr)(Column.Schema.int),
+        residu: TokenColumn(residu)(Column.Schema.str),
+        atom: TokenColumn(atom)(Column.Schema.str),
+        cgnr: TokenColumn(cgnr)(Column.Schema.int),
+        charge: TokenColumn(charge)(Column.Schema.float),
+        mass: TokenColumn(mass)(Column.Schema.float),
+    });
+}
+
+function handleBonds(state: State) {
+    const { tokenizer } = state;
+
+    const ai = TokenBuilder.create(tokenizer.data, 64);
+    const aj = TokenBuilder.create(tokenizer.data, 64);
+
+    while (tokenizer.tokenEnd < tokenizer.length) {
+        skipWhitespace(tokenizer);
+        const c = tokenizer.data[tokenizer.position];
+        if (c === '[') break;
+        if (c === ';' || c === '*') {
+            markLine(tokenizer);
+            continue;
+        }
+
+        for (let j = 0; j < 2; ++j) {
+            skipWhitespace(tokenizer);
+            markStart(tokenizer);
+            eatValue(tokenizer);
+
+            switch (j) {
+                case 0: TokenBuilder.add(ai, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+                case 1: TokenBuilder.add(aj, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+            }
+        }
+        // ignore any extra columns
+        markLine(tokenizer);
+    }
+
+    return Table.ofColumns(BondsSchema, {
+        ai: TokenColumn(ai)(Column.Schema.int),
+        aj: TokenColumn(aj)(Column.Schema.int),
+    });
+}
+
+function handleSystem(state: State) {
+    const { tokenizer } = state;
+
+    let system: string | undefined = undefined;
+
+    while (tokenizer.tokenEnd < tokenizer.length) {
+        skipWhitespace(tokenizer);
+        const c = tokenizer.data[tokenizer.position];
+        if (c === '[') break;
+        if (c === ';' || c === '*') {
+            markLine(tokenizer);
+            continue;
+        }
+
+        if (system !== undefined) throw new Error('more than one system');
+        system = readLine(tokenizer).trim();
+    }
+
+    if (system === undefined) throw new Error('missing system');
+
+    return system;
+}
+
+function handleMolecules(state: State) {
+    const { tokenizer } = state;
+
+    const compound = TokenBuilder.create(tokenizer.data, 64);
+    const molCount = TokenBuilder.create(tokenizer.data, 64);
+
+    while (tokenizer.tokenEnd < tokenizer.length) {
+        skipWhitespace(tokenizer);
+        if (tokenizer.position >= tokenizer.length) break;
+
+        const c = tokenizer.data[tokenizer.position];
+        if (c === '[') break;
+        if (c === ';' || c === '*') {
+            markLine(tokenizer);
+            continue;
+        }
+
+        for (let j = 0; j < 2; ++j) {
+            skipWhitespace(tokenizer);
+            markStart(tokenizer);
+            eatValue(tokenizer);
+
+            switch (j) {
+                case 0: TokenBuilder.add(compound, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+                case 1: TokenBuilder.add(molCount, tokenizer.tokenStart, tokenizer.tokenEnd); break;
+            }
+        }
+        // ignore any extra columns
+        eatLine(tokenizer);
+        markStart(tokenizer);
+    }
+
+    return Table.ofColumns(MoleculesSchema, {
+        compound: TokenColumn(compound)(Column.Schema.str),
+        molCount: TokenColumn(molCount)(Column.Schema.int),
+    });
+}
+
+async function parseInternal(data: string, ctx: RuntimeContext): Promise<Result<TopFile>> {
+    const t = Tokenizer(data);
+    const state = State(t, ctx);
+
+    const result: Mutable<TopFile> = Object.create(null);
+    let prevPosition = 0;
+
+    result.compounds = {};
+    let currentCompound: Partial<Compound> = {};
+    let currentMolName = '';
+
+    function addMol() {
+        if (currentMolName && currentCompound.atoms) {
+            result.compounds[currentMolName] = currentCompound as Compound;
+            currentCompound = {};
+            currentMolName = '';
+        }
+    }
+
+    while (t.tokenEnd < t.length) {
+        if (t.position - prevPosition > 100000 && ctx.shouldUpdate) {
+            prevPosition = t.position;
+            await ctx.update({ current: t.position, max: t.length });
+        }
+
+        const line = readLine(state.tokenizer).trim();
+
+        if (!line || line[0] === '*' || line[0] === ';') {
+            continue;
+        }
+
+        if (line.startsWith('#include')) {
+            throw new Error('#include statements not allowed');
+        }
+
+        if (line.startsWith('[')) {
+            const fieldMatch = line.match(reField);
+            if (fieldMatch === null) throw new Error('expected field name');
+
+            const fieldName = fieldMatch[1];
+            if (fieldName === 'moleculetype') {
+                addMol();
+                currentMolName = handleMoleculetype(state);
+            } else if (fieldName === 'atoms') {
+                currentCompound.atoms = handleAtoms(state);
+            } else if (fieldName === 'bonds') {
+                currentCompound.bonds = handleBonds(state);
+            } else if (fieldName === 'system') {
+                result.system = handleSystem(state);
+            } else if (fieldName === 'molecules') {
+                addMol(); // add the last compound
+                result.molecules = handleMolecules(state);
+            } else {
+                while (t.tokenEnd < t.length) {
+                    if (t.data[t.position] === '[') break;
+                    markLine(t);
+                }
+            }
+        }
+    }
+
+    return Result.success(result);
+}
+
+export function parseTop(data: string) {
+    return Task.create<Result<TopFile>>('Parse TOP', async ctx => {
+        return await parseInternal(data, ctx);
+    });
+}

+ 157 - 0
src/mol-io/reader/trr/parser.ts

@@ -0,0 +1,157 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Adapted from NGL.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { Task } from '../../../mol-task';
+import { ReaderResult as Result } from '../result';
+
+export interface TrrFile {
+    frames: { count: number, x: Float32Array, y: Float32Array, z: Float32Array }[],
+    boxes: number[][],
+    times: number[],
+    timeOffset: number,
+    deltaTime: number
+}
+
+async function parseInternal(data: Uint8Array) {
+    // https://github.com/gromacs/gromacs/blob/master/src/gromacs/fileio/trrio.cpp
+
+    const dv = new DataView(data.buffer);
+
+    const f: TrrFile = {
+        frames: [],
+        boxes: [],
+        times: [],
+        timeOffset: 0,
+        deltaTime: 0
+    };
+    const coordinates = f.frames;
+    const boxes = f.boxes;
+    const times = f.times;
+
+    let offset = 0;
+
+    while (true) {
+        // const magicnum = dv.getInt32(offset)
+        // const i1 = dv.getFloat32(offset + 4)
+        offset += 8;
+
+        const versionSize = dv.getInt32(offset);
+        offset += 4;
+        offset += versionSize;
+
+        // const irSize = dv.getInt32(offset)
+        // const eSize = dv.getInt32(offset + 4)
+        const boxSize = dv.getInt32(offset + 8);
+        const virSize = dv.getInt32(offset + 12);
+        const presSize = dv.getInt32(offset + 16);
+        // const topSize = dv.getInt32(offset + 20)
+        // const symSize = dv.getInt32(offset + 24)
+        const coordSize = dv.getInt32(offset + 28);
+        const velocitySize = dv.getInt32(offset + 32);
+        const forceSize = dv.getInt32(offset + 36);
+        const natoms = dv.getInt32(offset + 40);
+        // const step = dv.getInt32(offset + 44)
+        // const nre = dv.getInt32(offset + 48)
+        offset += 52;
+
+        const floatSize = boxSize / 9;
+        const natoms3 = natoms * 3;
+
+        // let lambda
+        if (floatSize === 8) {
+            times.push(dv.getFloat64(offset));
+            // lambda = dv.getFloat64(offset + 8)
+        } else {
+            times.push(dv.getFloat32(offset));
+            // lambda = dv.getFloat32(offset + 4)
+        }
+        offset += 2 * floatSize;
+
+        if (boxSize) {
+            const box = new Float32Array(9);
+            if (floatSize === 8) {
+                for (let i = 0; i < 9; ++i) {
+                    box[i] = dv.getFloat64(offset) * 10;
+                    offset += 8;
+                }
+            } else {
+                for (let i = 0; i < 9; ++i) {
+                    box[i] = dv.getFloat32(offset) * 10;
+                    offset += 4;
+                }
+            }
+            boxes.push(box as unknown as number[]);
+        }
+
+        // ignore, unused
+        offset += virSize;
+
+        // ignore, unused
+        offset += presSize;
+
+        if (coordSize) {
+            const x = new Float32Array(natoms);
+            const y = new Float32Array(natoms);
+            const z = new Float32Array(natoms);
+            if (floatSize === 8) {
+                for (let i = 0; i < natoms; ++i) {
+                    x[i] = dv.getFloat64(offset) * 10;
+                    y[i] = dv.getFloat64(offset + 8) * 10;
+                    z[i] = dv.getFloat64(offset + 16) * 10;
+                    offset += 24;
+                }
+            } else {
+                const tmp = new Uint32Array(data.buffer, offset, natoms3);
+                for (let i = 0; i < natoms3; ++i) {
+                    const value = tmp[i];
+                    tmp[i] = (
+                        ((value & 0xFF) << 24) | ((value & 0xFF00) << 8) |
+                        ((value >> 8) & 0xFF00) | ((value >> 24) & 0xFF)
+                    );
+                }
+                const frameCoords = new Float32Array(data.buffer, offset, natoms3);
+                for (let i = 0; i < natoms; ++i) {
+                    x[i] = frameCoords[i * 3] * 10;
+                    y[i] = frameCoords[i * 3 + 1] * 10;
+                    z[i] = frameCoords[i * 3 + 2] * 10;
+                    offset += 12;
+                }
+            }
+            coordinates.push({ count: natoms, x, y, z });
+        }
+
+        // ignore, unused
+        offset += velocitySize;
+
+        // ignore, unused
+        offset += forceSize;
+
+        if (offset >= data.byteLength) break;
+    }
+
+    if (times.length >= 1) {
+        f.timeOffset = times[0];
+    }
+    if (times.length >= 2) {
+        f.deltaTime = times[1] - times[0];
+    }
+
+    return f;
+}
+
+export function parseTrr(data: Uint8Array) {
+    return Task.create<Result<TrrFile>>('Parse TRR', async ctx => {
+        try {
+            ctx.update({ canAbort: true, message: 'Parsing trajectory...' });
+            const file = await parseInternal(data);
+            return Result.success(file);
+        } catch (e) {
+            return Result.error('' + e);
+        }
+    });
+}

+ 1 - 1
src/mol-model-formats/structure/cif-core.ts

@@ -100,7 +100,7 @@ async function getModels(db: CifCore_Database, format: CifCoreFormat, ctx: Runti
         const element_symbol = new Array<string>(atomCount);
         for (let i = 0; i < atomCount; ++i) {
             // TODO can take as is if type_symbol not given?
-            element_symbol[i] = guessElementSymbolString(label.value(i));
+            element_symbol[i] = guessElementSymbolString(label.value(i), '');
         }
         typeSymbol = Column.ofStringArray(element_symbol);
         formalCharge = Column.Undefined(atomCount, Column.Schema.int);

+ 4 - 1
src/mol-model-formats/structure/gro.ts

@@ -27,6 +27,7 @@ function getBasic(atoms: GroAtoms, modelNum: number): BasicData {
     const asymIds = new Array<string>(atoms.count);
     const seqIds = new Uint32Array(atoms.count);
     const ids = new Uint32Array(atoms.count);
+    const typeSymbol = new Array<string>(atoms.count);
 
     const entityBuilder = new EntityBuilder();
     const componentBuilder = new ComponentBuilder(atoms.residueNumber, atoms.atomName);
@@ -66,6 +67,8 @@ function getBasic(atoms: GroAtoms, modelNum: number): BasicData {
         asymIds[i] = currentAsymId;
         seqIds[i] = currentSeqId;
         ids[i] = i;
+
+        typeSymbol[i] = guessElementSymbolString(atoms.atomName.value(i), atoms.residueName.value(i));
     }
 
     const auth_asym_id = Column.ofStringArray(asymIds);
@@ -87,7 +90,7 @@ function getBasic(atoms: GroAtoms, modelNum: number): BasicData {
         label_entity_id: Column.ofStringArray(entityIds),
 
         occupancy: Column.ofConst(1, atoms.count, Column.Schema.float),
-        type_symbol: Column.ofStringArray(Column.mapToArray(atoms.atomName, s => guessElementSymbolString(s))),
+        type_symbol: Column.ofStringArray(typeSymbol),
 
         pdbx_PDB_model_num: Column.ofConst(modelNum, atoms.count, Column.Schema.int),
     }, atoms.count);

+ 1 - 1
src/mol-model-formats/structure/mol2.ts

@@ -41,7 +41,7 @@ async function getModels(mol2: Mol2File, ctx: RuntimeContext) {
         for (let i = 0; i < atoms.count; ++i) {
             type_symbol[i] = hasAtomType
                 ? atoms.atom_type.value(i).split('.')[0].toUpperCase()
-                : guessElementSymbolString(atoms.atom_name.value(i));
+                : guessElementSymbolString(atoms.atom_name.value(i), atoms.subst_name.value(i));
         }
 
         const atom_site = Table.ofPartialColumns(BasicSchema.atom_site, {

+ 52 - 0
src/mol-model-formats/structure/nctraj.ts

@@ -0,0 +1,52 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { Task } from '../../mol-task';
+import { NctrajFile } from '../../mol-io/reader/nctraj/parser';
+import { Coordinates, Frame, Time } from '../../mol-model/structure/coordinates';
+import { Cell } from '../../mol-math/geometry/spacegroup/cell';
+import { Vec3 } from '../../mol-math/linear-algebra';
+import { Mutable } from '../../mol-util/type-helpers';
+
+export function coordinatesFromNctraj(file: NctrajFile): Task<Coordinates> {
+    return Task.create('Parse NCTRAJ', async ctx => {
+        await ctx.update('Converting to coordinates');
+
+        const deltaTime = Time(file.deltaTime, 'step');
+        const offsetTime = Time(file.timeOffset, deltaTime.unit);
+
+        const frames: Frame[] = [];
+        for (let i = 0, il = file.coordinates.length; i < il; ++i) {
+            const c = file.coordinates[i];
+            const elementCount = c.length / 3;
+            const x = new Float32Array(elementCount);
+            const y = new Float32Array(elementCount);
+            const z = new Float32Array(elementCount);
+            for (let j = 0, jl = c.length; j < jl; j += 3) {
+                x[j / 3] = c[j];
+                y[j / 3] = c[j + 1];
+                z[j / 3] = c[j + 2];
+            }
+            const frame: Mutable<Frame> = {
+                elementCount,
+                x, y, z,
+                xyzOrdering: { isIdentity: true },
+                time: Time(offsetTime.value + deltaTime.value * i, deltaTime.unit)
+            };
+            // TODO: handle case where cell_lengths and cell_angles are set, i.e., angles not 90deg
+            if (file.cell_lengths) {
+                const lengths = file.cell_lengths[i];
+                const x = Vec3.scale(Vec3(), Vec3.unitX, lengths[0]);
+                const y = Vec3.scale(Vec3(), Vec3.unitY, lengths[1]);
+                const z = Vec3.scale(Vec3(), Vec3.unitZ, lengths[2]);
+                frame.cell = Cell.fromBasis(x, y, z);
+            }
+            frames.push(frame);
+        }
+
+        return Coordinates.create(frames, deltaTime, offsetTime);
+    });
+}

+ 174 - 0
src/mol-model-formats/structure/prmtop.ts

@@ -0,0 +1,174 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { Column, Table } from '../../mol-data/db';
+import { PrmtopFile } from '../../mol-io/reader/prmtop/parser';
+import { getMoleculeType, MoleculeType } from '../../mol-model/structure/model/types';
+import { Topology } from '../../mol-model/structure/topology/topology';
+import { Task } from '../../mol-task';
+import { ModelFormat } from '../format';
+import { BasicSchema, createBasic } from './basic/schema';
+import { ComponentBuilder } from './common/component';
+import { EntityBuilder } from './common/entity';
+import { getChainId } from './common/util';
+import { guessElementSymbolString } from './util';
+
+function getBasic(prmtop: PrmtopFile) {
+    const { pointers, residuePointer, residueLabel, atomName } = prmtop;
+    const atomCount = pointers.NATOM;
+    const residueCount = pointers.NRES;
+
+    //
+
+    const residueIds = new Uint32Array(atomCount);
+    const residueNames: string[] = [];
+
+    const addResidue = (i: number, from: number, to: number) => {
+        const rn = residueLabel.value(i);
+        for (let j = from, jl = to; j < jl; ++j) {
+            residueIds[j] = i + 1;
+            residueNames[j] = rn;
+        }
+    };
+
+    for (let i = 0, il = residueCount - 1; i < il; ++i) {
+        addResidue(i, residuePointer.value(i) - 1, residuePointer.value(i + 1) - 1);
+
+    }
+    addResidue(residueCount - 1, residuePointer.value(residueCount - 1) - 1, atomCount);
+
+    const residueId = Column.ofIntArray(residueIds);
+    const residueName = Column.ofStringArray(residueNames);
+
+    //
+
+    const entityIds = new Array<string>(atomCount);
+    const asymIds = new Array<string>(atomCount);
+    const seqIds = new Uint32Array(atomCount);
+    const ids = new Uint32Array(atomCount);
+
+    const entityBuilder = new EntityBuilder();
+    const componentBuilder = new ComponentBuilder(residueId, atomName);
+
+    let currentEntityId = '';
+    let currentAsymIndex = 0;
+    let currentAsymId = '';
+    let currentSeqId = 0;
+    let prevMoleculeType = MoleculeType.Unknown;
+    let prevResidueNumber = -1;
+
+    for (let i = 0, il = atomCount; i < il; ++i) {
+        const residueNumber = residueId.value(i);
+        if (residueNumber !== prevResidueNumber) {
+            const compId = residueName.value(i);
+            const moleculeType = getMoleculeType(componentBuilder.add(compId, i).type, compId);
+
+            if (moleculeType !== prevMoleculeType) {
+                currentAsymId = getChainId(currentAsymIndex);
+                currentAsymIndex += 1;
+                currentSeqId = 0;
+            }
+
+            currentEntityId = entityBuilder.getEntityId(compId, moleculeType, currentAsymId);
+            currentSeqId += 1;
+
+            prevResidueNumber = residueNumber;
+            prevMoleculeType = moleculeType;
+        }
+
+        entityIds[i] = currentEntityId;
+        asymIds[i] = currentAsymId;
+        seqIds[i] = currentSeqId;
+        ids[i] = i;
+    }
+
+    const id = Column.ofIntArray(ids);
+    const asym_id = Column.ofStringArray(asymIds);
+
+    //
+
+    const type_symbol = new Array<string>(atomCount);
+    for (let i = 0; i < atomCount; ++i) {
+        type_symbol[i] = guessElementSymbolString(atomName.value(i), residueName.value(i));
+    }
+
+    const atom_site = Table.ofPartialColumns(BasicSchema.atom_site, {
+        auth_asym_id: asym_id,
+        auth_atom_id: Column.asArrayColumn(atomName),
+        auth_comp_id: residueName,
+        auth_seq_id: residueId,
+        id: Column.asArrayColumn(id),
+
+        label_asym_id: asym_id,
+        label_atom_id: Column.asArrayColumn(atomName),
+        label_comp_id: residueName,
+        label_seq_id: Column.ofIntArray(seqIds),
+        label_entity_id: Column.ofStringArray(entityIds),
+
+        occupancy: Column.ofConst(1, atomCount, Column.Schema.float),
+        type_symbol: Column.ofStringArray(type_symbol),
+
+        pdbx_PDB_model_num: Column.ofConst(1, atomCount, Column.Schema.int),
+    }, atomCount);
+
+    const basic = createBasic({
+        entity: entityBuilder.getEntityTable(),
+        chem_comp: componentBuilder.getChemCompTable(),
+        atom_site
+    });
+
+    return basic;
+}
+
+//
+
+export { PrmtopFormat };
+
+type PrmtopFormat = ModelFormat<PrmtopFile>
+
+namespace PrmtopFormat {
+    export function is(x?: ModelFormat): x is PrmtopFormat {
+        return x?.kind === 'prmtop';
+    }
+
+    export function fromPrmtop(prmtop: PrmtopFile): PrmtopFormat {
+        return { kind: 'prmtop', name: prmtop.title.join(' ') || 'PRMTOP', data: prmtop };
+    }
+}
+
+export function topologyFromPrmtop(prmtop: PrmtopFile): Task<Topology> {
+    return Task.create('Parse PRMTOP', async ctx => {
+        const format = PrmtopFormat.fromPrmtop(prmtop);
+        const basic = getBasic(prmtop);
+
+        const { pointers: { NBONH, NBONA }, bondsIncHydrogen, bondsWithoutHydrogen } = prmtop;
+        const bondCount = NBONH + NBONA;
+
+        const bonds = {
+            indexA: Column.ofLambda({
+                value: (row: number) => {
+                    return row < NBONH
+                        ? bondsIncHydrogen.value(row * 3) / 3
+                        : bondsWithoutHydrogen.value((row - NBONH) * 3) / 3;
+                },
+                rowCount: bondCount,
+                schema: Column.Schema.int,
+            }),
+            indexB: Column.ofLambda({
+                value: (row: number) => {
+                    return row < NBONH
+                        ? bondsIncHydrogen.value(row * 3 + 1) / 3
+                        : bondsWithoutHydrogen.value((row - NBONH) * 3 + 1) / 3;
+                },
+                rowCount: bondCount,
+                schema: Column.Schema.int,
+            }),
+            order: Column.ofConst(1, bondCount, Column.Schema.int)
+        };
+
+        return Topology.create(prmtop.title.join(' ') || 'PRMTOP', basic, bonds, format);
+    });
+}

+ 13 - 2
src/mol-model-formats/structure/property/symmetry.ts

@@ -63,10 +63,21 @@ function getSpacegroupNameOrNumber(symmetry: Table<mmCIF_Schema['symmetry']>) {
 
 function getSpacegroup(symmetry: Table<mmCIF_Schema['symmetry']>, cell: Table<mmCIF_Schema['cell']>): Spacegroup {
     if (symmetry._rowCount === 0 || cell._rowCount === 0) return Spacegroup.ZeroP1;
+
+    const a = cell.length_a.value(0);
+    const b = cell.length_b.value(0);
+    const c = cell.length_c.value(0);
+    if (a === 0 || b === 0 || c === 0) return Spacegroup.ZeroP1;
+
+    const alpha = cell.angle_alpha.value(0);
+    const beta = cell.angle_beta.value(0);
+    const gamma = cell.angle_gamma.value(0);
+    if (alpha === 0 || beta === 0 || gamma === 0) return Spacegroup.ZeroP1;
+
     const nameOrNumber = getSpacegroupNameOrNumber(symmetry);
     const spaceCell = SpacegroupCell.create(nameOrNumber,
-        Vec3.create(cell.length_a.value(0), cell.length_b.value(0), cell.length_c.value(0)),
-        Vec3.scale(Vec3.zero(), Vec3.create(cell.angle_alpha.value(0), cell.angle_beta.value(0), cell.angle_gamma.value(0)), Math.PI / 180));
+        Vec3.create(a, b, c),
+        Vec3.scale(Vec3(), Vec3.create(alpha, beta, gamma), Math.PI / 180));
 
     return Spacegroup.create(spaceCell);
 }

+ 4 - 1
src/mol-model-formats/structure/psf.ts

@@ -21,6 +21,7 @@ function getBasic(atoms: PsfFile['atoms']) {
     const asymIds = new Array<string>(atoms.count);
     const seqIds = new Uint32Array(atoms.count);
     const ids = new Uint32Array(atoms.count);
+    const typeSymbol = new Array<string>(atoms.count);
 
     const entityBuilder = new EntityBuilder();
     const componentBuilder = new ComponentBuilder(atoms.residueId, atoms.atomName);
@@ -68,6 +69,8 @@ function getBasic(atoms: PsfFile['atoms']) {
         asymIds[i] = currentAsymId;
         seqIds[i] = currentSeqId;
         ids[i] = i;
+
+        typeSymbol[i] = guessElementSymbolString(atoms.atomName.value(i), atoms.residueName.value(i));
     }
 
     const atom_site = Table.ofPartialColumns(BasicSchema.atom_site, {
@@ -84,7 +87,7 @@ function getBasic(atoms: PsfFile['atoms']) {
         label_entity_id: Column.ofStringArray(entityIds),
 
         occupancy: Column.ofConst(1, atoms.count, Column.Schema.float),
-        type_symbol: Column.ofStringArray(Column.mapToArray(atoms.atomName, s => guessElementSymbolString(s))),
+        type_symbol: Column.ofStringArray(typeSymbol),
 
         pdbx_PDB_model_num: Column.ofConst(1, atoms.count, Column.Schema.int),
     }, atoms.count);

+ 226 - 0
src/mol-model-formats/structure/top.ts

@@ -0,0 +1,226 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { Column, Table } from '../../mol-data/db';
+import { TopFile } from '../../mol-io/reader/top/parser';
+import { getMoleculeType, MoleculeType } from '../../mol-model/structure/model/types';
+import { Topology } from '../../mol-model/structure/topology/topology';
+import { Task } from '../../mol-task';
+import { ModelFormat } from '../format';
+import { BasicSchema, createBasic } from './basic/schema';
+import { ComponentBuilder } from './common/component';
+import { EntityBuilder } from './common/entity';
+import { getChainId } from './common/util';
+import { guessElementSymbolString } from './util';
+
+function getBasic(top: TopFile) {
+    const { molecules, compounds } = top;
+
+    const singleResidue: Record<string, boolean> = {};
+    let atomCount = 0;
+
+    for (let i = 0, il = molecules._rowCount; i < il; ++i) {
+        const mol = molecules.compound.value(i);
+        const count = molecules.molCount.value(i);
+        const { atoms } = compounds[mol];
+
+        Column.asArrayColumn(atoms.atom);
+        Column.asArrayColumn(atoms.resnr);
+        Column.asArrayColumn(atoms.residu);
+
+        atomCount += count * atoms._rowCount;
+
+        let prevResnr = atoms.resnr.value(0);
+        singleResidue[mol] = true;
+        for (let j = 1, jl = atoms._rowCount; j < jl; ++j) {
+            const resnr = atoms.resnr.value(j);
+            if (resnr !== prevResnr) {
+                singleResidue[mol] = false;
+                break;
+            }
+            prevResnr = resnr;
+        }
+    }
+
+    //
+
+    const atomNames = new Array<string>(atomCount);
+    const residueIds = new Uint32Array(atomCount);
+    const residueNames = new Array<string>(atomCount);
+
+    let k = 0;
+    for (let i = 0, il = molecules._rowCount; i < il; ++i) {
+        const mol = molecules.compound.value(i);
+        const count = molecules.molCount.value(i);
+        const { atoms } = compounds[mol];
+        const isSingleResidue = singleResidue[mol];
+        for (let j = 0; j < count; ++j) {
+            for (let l = 0, ll = atoms._rowCount; l < ll; ++l) {
+                atomNames[k] = atoms.atom.value(l);
+                residueIds[k] = atoms.resnr.value(l);
+                residueNames[k] = atoms.residu.value(l);
+
+                if (isSingleResidue) residueIds[k] += j;
+
+                k += 1;
+            }
+        }
+    }
+
+    const atomName = Column.ofStringArray(atomNames);
+    const residueId = Column.ofIntArray(residueIds);
+    const residueName = Column.ofStringArray(residueNames);
+
+    //
+
+    const entityIds = new Array<string>(atomCount);
+    const asymIds = new Array<string>(atomCount);
+    const seqIds = new Uint32Array(atomCount);
+    const ids = new Uint32Array(atomCount);
+
+    const entityBuilder = new EntityBuilder();
+    const componentBuilder = new ComponentBuilder(residueId, atomName);
+
+    let currentEntityId = '';
+    let currentAsymIndex = 0;
+    let currentAsymId = '';
+    let currentSeqId = 0;
+    let prevMoleculeType = MoleculeType.Unknown;
+    let prevResidueNumber = -1;
+
+    for (let i = 0, il = atomCount; i < il; ++i) {
+        const residueNumber = residueId.value(i);
+        if (residueNumber !== prevResidueNumber) {
+            const compId = residueName.value(i);
+            const moleculeType = getMoleculeType(componentBuilder.add(compId, i).type, compId);
+
+            if (moleculeType !== prevMoleculeType) {
+                currentAsymId = getChainId(currentAsymIndex);
+                currentAsymIndex += 1;
+                currentSeqId = 0;
+            }
+
+            currentEntityId = entityBuilder.getEntityId(compId, moleculeType, currentAsymId);
+            currentSeqId += 1;
+
+            prevResidueNumber = residueNumber;
+            prevMoleculeType = moleculeType;
+        }
+
+        entityIds[i] = currentEntityId;
+        asymIds[i] = currentAsymId;
+        seqIds[i] = currentSeqId;
+        ids[i] = i;
+    }
+
+    const id = Column.ofIntArray(ids);
+    const asym_id = Column.ofStringArray(asymIds);
+
+    //
+
+    const type_symbol = new Array<string>(atomCount);
+    for (let i = 0; i < atomCount; ++i) {
+        type_symbol[i] = guessElementSymbolString(atomName.value(i), residueName.value(i));
+    }
+
+    const atom_site = Table.ofPartialColumns(BasicSchema.atom_site, {
+        auth_asym_id: asym_id,
+        auth_atom_id: Column.asArrayColumn(atomName),
+        auth_comp_id: residueName,
+        auth_seq_id: residueId,
+        id: Column.asArrayColumn(id),
+
+        label_asym_id: asym_id,
+        label_atom_id: Column.asArrayColumn(atomName),
+        label_comp_id: residueName,
+        label_seq_id: Column.ofIntArray(seqIds),
+        label_entity_id: Column.ofStringArray(entityIds),
+
+        occupancy: Column.ofConst(1, atomCount, Column.Schema.float),
+        type_symbol: Column.ofStringArray(type_symbol),
+
+        pdbx_PDB_model_num: Column.ofConst(1, atomCount, Column.Schema.int),
+    }, atomCount);
+
+    const basic = createBasic({
+        entity: entityBuilder.getEntityTable(),
+        chem_comp: componentBuilder.getChemCompTable(),
+        atom_site
+    });
+
+    return basic;
+}
+
+function getBonds(top: TopFile) {
+    const { molecules, compounds } = top;
+
+    const indexA: number[] = [];
+    const indexB: number[] = [];
+
+    let atomOffset = 0;
+
+    for (let i = 0, il = molecules._rowCount; i < il; ++i) {
+        const mol = molecules.compound.value(i);
+        const count = molecules.molCount.value(i);
+        const { atoms, bonds } = compounds[mol];
+
+
+
+        if (bonds) {
+            for (let j = 0; j < count; ++j) {
+
+                for (let l = 0, ll = bonds._rowCount; l < ll; ++l) {
+                    indexA.push(bonds.ai.value(l) - 1 + atomOffset);
+                    indexB.push(bonds.aj.value(l) - 1 + atomOffset);
+                }
+
+                atomOffset += atoms._rowCount;
+            }
+        } else if (mol === 'TIP3') {
+            for (let j = 0; j < count; ++j) {
+                indexA.push(0 + atomOffset);
+                indexB.push(1 + atomOffset);
+                indexA.push(0 + atomOffset);
+                indexB.push(2 + atomOffset);
+                atomOffset += atoms._rowCount;
+            }
+        } else {
+            atomOffset += count * atoms._rowCount;
+        }
+    }
+
+    return {
+        indexA: Column.ofIntArray(indexA),
+        indexB: Column.ofIntArray(indexB),
+        order: Column.ofConst(1, indexA.length, Column.Schema.int)
+    };
+}
+
+//
+
+export { TopFormat };
+
+type TopFormat = ModelFormat<TopFile>
+
+namespace TopFormat {
+    export function is(x?: ModelFormat): x is TopFormat {
+        return x?.kind === 'top';
+    }
+
+    export function fromTop(top: TopFile): TopFormat {
+        return { kind: 'top', name: top.system || 'TOP', data: top };
+    }
+}
+
+export function topologyFromTop(top: TopFile): Task<Topology> {
+    return Task.create('Parse TOP', async ctx => {
+        const format = TopFormat.fromTop(top);
+        const basic = getBasic(top);
+        const bonds = getBonds(top);
+
+        return Topology.create(top.system || 'TOP', basic, bonds, format);
+    });
+}

+ 39 - 0
src/mol-model-formats/structure/trr.ts

@@ -0,0 +1,39 @@
+/**
+ * Copyright (c) 2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { Task } from '../../mol-task';
+import { TrrFile } from '../../mol-io/reader/trr/parser';
+import { Coordinates, Frame, Time } from '../../mol-model/structure/coordinates';
+import { Cell } from '../../mol-math/geometry/spacegroup/cell';
+import { Vec3 } from '../../mol-math/linear-algebra';
+
+export function coordinatesFromTrr(file: TrrFile): Task<Coordinates> {
+    return Task.create('Parse TRR', async ctx => {
+        await ctx.update('Converting to coordinates');
+
+        const deltaTime = Time(file.deltaTime, 'step');
+        const offsetTime = Time(file.timeOffset, deltaTime.unit);
+
+        const frames: Frame[] = [];
+        for (let i = 0, il = file.frames.length; i < il; ++i) {
+            const box = file.boxes[i];
+            const x = Vec3.fromArray(Vec3(), box, 0);
+            const y = Vec3.fromArray(Vec3(), box, 3);
+            const z = Vec3.fromArray(Vec3(), box, 6);
+            frames.push({
+                elementCount: file.frames[i].count,
+                cell: Cell.fromBasis(x, y, z),
+                x: file.frames[i].x,
+                y: file.frames[i].y,
+                z: file.frames[i].z,
+                xyzOrdering: { isIdentity: true },
+                time: Time(offsetTime.value + deltaTime.value * i, deltaTime.unit)
+            });
+        }
+
+        return Coordinates.create(frames, deltaTime, offsetTime);
+    });
+}

+ 20 - 21
src/mol-model-formats/structure/util.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2019-2020 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2019-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
  */
@@ -46,31 +46,30 @@ export function guessElementSymbolTokens(tokens: Tokens, str: string, start: num
     TokenBuilder.add(tokens, s, s); // no reasonable guess, add empty token
 }
 
+const TwoCharElementNames = new Set(['NA', 'CL', 'FE', 'SI', 'BR', 'AS']);
+const OneCharElementNames = new Set(['C', 'H', 'N', 'O', 'P', 'S']);
+
 const reTrimSpacesAndNumbers = /^[\s\d]+|[\s\d]+$/g;
-export function guessElementSymbolString(str: string) {
+export function guessElementSymbolString(atomId: string, compId: string) {
     // trim spaces and numbers, convert to upper case
-    str = str.replace(reTrimSpacesAndNumbers, '').toUpperCase();
-    const l = str.length;
-
-    if (l === 0) return str; // empty
-    if (l === 1) return str; // one char
+    atomId = atomId.replace(reTrimSpacesAndNumbers, '').toUpperCase();
+    const l = atomId.length;
 
-    if (l === 2) { // two chars
-        if (str === 'NA' || str === 'CL' || str === 'FE' || str === 'SI' ||
-            str === 'BR' || str === 'AS'
-        ) return str;
-    }
+    if (l === 0) return atomId; // empty
+    if (l === 1) return atomId; // one char
+    if (TwoCharElementNames.has(atomId)) return atomId; // two chars
 
-    if (l === 3) { // three chars
-        if (str === 'SOD') return 'NA';
-        if (str === 'POT') return 'K';
-        if (str === 'CES') return 'CS';
-        if (str === 'CAL') return 'CA';
-        if (str === 'CLA') return 'CL';
+    // check for Charmm ion names where component and atom id are the same
+    if (l === 3 && compId === atomId) {
+        if (atomId === 'SOD') return 'NA';
+        if (atomId === 'POT') return 'K';
+        if (atomId === 'CES') return 'CS';
+        if (atomId === 'CAL') return 'CA';
+        if (atomId === 'CLA') return 'CL';
     }
 
-    const c = str[0];
-    if (c === 'C' || c === 'H' || c === 'N' || c === 'O' || c === 'P' || c === 'S') return c;
+    if (OneCharElementNames.has(atomId[0])) return atomId[0];
 
     return ''; // no reasonable guess, return empty string
-}
+}
+

+ 52 - 34
src/mol-plugin-state/actions/file.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2019-2020 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2019-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
  */
@@ -13,6 +13,27 @@ import { ParamDefinition as PD } from '../../mol-util/param-definition';
 import { unzip } from '../../mol-util/zip/zip';
 import { PluginStateObject } from '../objects';
 
+async function processFile(file: Asset.File, plugin: PluginContext, format: string, visuals: boolean) {
+    const info = getFileInfo(file.file!);
+    const isBinary = plugin.dataFormats.binaryExtensions.has(info.ext);
+    const { data } = await plugin.builders.data.readFile({ file, isBinary });
+    const provider = format === 'auto'
+        ? plugin.dataFormats.auto(info, data.cell?.obj!)
+        : plugin.dataFormats.get(format);
+
+    if (!provider) {
+        plugin.log.warn(`OpenFiles: could not find data provider for '${info.ext}'`);
+        await plugin.state.data.build().delete(data).commit();
+        return;
+    }
+
+    // need to await so that the enclosing Task finishes after the update is done.
+    const parsed = await provider.parse(plugin, data);
+    if (visuals) {
+        await provider.visuals?.(plugin, parsed);
+    }
+};
+
 export const OpenFiles = StateAction.build({
     display: { name: 'Open Files', description: 'Load one or more files and optionally create default visuals' },
     from: PluginStateObject.Root,
@@ -36,36 +57,19 @@ export const OpenFiles = StateAction.build({
             return;
         }
 
-        const processFile = async (file: Asset.File) => {
-            const info = getFileInfo(file.file!);
-            const isBinary = plugin.dataFormats.binaryExtensions.has(info.ext);
-            const { data } = await plugin.builders.data.readFile({ file, isBinary });
-            const provider = params.format.name === 'auto'
-                ? plugin.dataFormats.auto(info, data.cell?.obj!)
-                : plugin.dataFormats.get(params.format.params);
-
-            if (!provider) {
-                plugin.log.warn(`OpenFiles: could not find data provider for '${info.name}.${info.ext}'`);
-                return;
-            }
-
-            // need to await so that the enclosing Task finishes after the update is done.
-            const parsed = await provider.parse(plugin, data);
-            if (params.visuals) {
-                await provider.visuals?.(plugin, parsed);
-            }
-        };
-
         for (const file of params.files) {
             try {
                 if (file.file && file.name.toLowerCase().endsWith('.zip')) {
                     const zippedFiles = await unzip(taskCtx, await file.file.arrayBuffer());
                     for (const [fn, filedata] of Object.entries(zippedFiles)) {
-                        const asset = Asset.File(new File([filedata as Uint8Array], fn));
-                        await processFile(asset);
+                        if (!(filedata instanceof Uint8Array) || filedata.length === 0) continue;
+
+                        const asset = Asset.File(new File([filedata], fn));
+                        await processFile(asset, plugin, 'auto', params.visuals);
                     }
                 } else {
-                    await processFile(file);
+                    const format = params.format.name === 'auto' ? 'auto' : params.format.params;
+                    await processFile(file, plugin, format, params.visuals);
                 }
             } catch (e) {
                 console.error(e);
@@ -79,7 +83,7 @@ export const DownloadFile = StateAction.build({
     display: { name: 'Download File', description: 'Load one or more file from an URL' },
     from: PluginStateObject.Root,
     params: (a, ctx: PluginContext) => {
-        const { options } = ctx.dataFormats;
+        const options = [...ctx.dataFormats.options, ['zip', 'Zip'] as const];
         return {
             url: PD.Url(''),
             format: PD.Select(options[0][0], options),
@@ -92,16 +96,30 @@ export const DownloadFile = StateAction.build({
 
     await state.transaction(async () => {
         try {
-            const provider = plugin.dataFormats.get(params.format);
-            if (!provider) {
-                plugin.log.warn(`DownloadFile: could not find data provider for '${params.format}'`);
-                return;
-            }
+            if (params.format === 'zip') {
+                // TODO: add ReadZipFile transformer so this can be saved as a simple state snaphot,
+                //       would need support for extracting individual files from zip
+                const data = await plugin.builders.data.download({ url: params.url, isBinary: true });
+                const zippedFiles = await unzip(taskCtx, (data.obj?.data as Uint8Array).buffer);
+                for (const [fn, filedata] of Object.entries(zippedFiles)) {
+                    if (!(filedata instanceof Uint8Array) || filedata.length === 0) continue;
+
+                    const asset = Asset.File(new File([filedata], fn));
 
-            const data = await plugin.builders.data.download({ url: params.url, isBinary: params.isBinary });
-            const parsed = await provider.parse(plugin, data);
-            if (params.visuals) {
-                await provider.visuals?.(plugin, parsed);
+                    await processFile(asset, plugin, 'auto', params.visuals);
+                }
+            } else {
+                const provider = plugin.dataFormats.get(params.format);
+                if (!provider) {
+                    plugin.log.warn(`DownloadFile: could not find data provider for '${params.format}'`);
+                    return;
+                }
+
+                const data = await plugin.builders.data.download({ url: params.url, isBinary: params.isBinary });
+                const parsed = await provider.parse(plugin, data);
+                if (params.visuals) {
+                    await provider.visuals?.(plugin, parsed);
+                }
             }
         } catch (e) {
             console.error(e);

+ 128 - 3
src/mol-plugin-state/actions/structure.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2018-2021 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2018-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author David Sehnal <david.sehnal@gmail.com>
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
@@ -10,16 +10,18 @@ import { StateAction, StateSelection, StateTransformer } from '../../mol-state';
 import { Task } from '../../mol-task';
 import { ParamDefinition as PD } from '../../mol-util/param-definition';
 import { PresetStructureRepresentations, StructureRepresentationPresetProvider } from '../builder/structure/representation-preset';
-import { BuiltInTrajectoryFormat, BuiltInTrajectoryFormats } from '../formats/trajectory';
+import { BuiltInTrajectoryFormat, BuiltInTrajectoryFormats, TrajectoryFormatCategory } from '../formats/trajectory';
 import { RootStructureDefinition } from '../helpers/root-structure';
 import { PluginStateObject } from '../objects';
 import { StateTransforms } from '../transforms';
 import { Download } from '../transforms/data';
-import { CustomModelProperties, CustomStructureProperties, TrajectoryFromModelAndCoordinates } from '../transforms/model';
+import { CustomModelProperties, CustomStructureProperties, ModelFromTrajectory, TrajectoryFromModelAndCoordinates } from '../transforms/model';
 import { Asset } from '../../mol-util/assets';
 import { PluginConfig } from '../../mol-plugin/config';
 import { getFileInfo } from '../../mol-util/file-info';
 import { assertUnreachable } from '../../mol-util/type-helpers';
+import { TopologyFormatCategory } from '../formats/topology';
+import { CoordinatesFormatCategory } from '../formats/coordinates';
 
 const DownloadModelRepresentationOptions = (plugin: PluginContext) => {
     const representationDefault = plugin.config.get(PluginConfig.Structure.DefaultRepresentationPreset) || PresetStructureRepresentations.auto.id;
@@ -311,4 +313,127 @@ export const AddTrajectory = StateAction.build({
         const structure = await ctx.builders.structure.createStructure(model.selector);
         await ctx.builders.structure.representation.applyPreset(structure, 'auto');
     }).runInContext(taskCtx);
+}));
+
+export const LoadTrajectory = StateAction.build({
+    display: { name: 'Load Trajectory', description: 'Load trajectory of model/topology and coordinates from URL or file.' },
+    from: PluginStateObject.Root,
+    params(a, ctx: PluginContext) {
+        const { options } = ctx.dataFormats;
+        const modelOptions = options.filter(o => o[2] === TrajectoryFormatCategory || o[2] === TopologyFormatCategory);
+        const coordinatesOptions = options.filter(o => o[2] === CoordinatesFormatCategory);
+
+        const modelExts: string[] = [];
+        const coordinatesExts: string[] = [];
+        for (const { provider } of ctx.dataFormats.list) {
+            if (provider.category === TrajectoryFormatCategory || provider.category === TopologyFormatCategory) {
+                if (provider.binaryExtensions) modelExts.push(...provider.binaryExtensions);
+                if (provider.stringExtensions) modelExts.push(...provider.stringExtensions);
+            } else if (provider.category === CoordinatesFormatCategory) {
+                if (provider.binaryExtensions) coordinatesExts.push(...provider.binaryExtensions);
+                if (provider.stringExtensions) coordinatesExts.push(...provider.stringExtensions);
+            }
+        }
+
+        return {
+            source: PD.MappedStatic('file', {
+                url: PD.Group({
+                    model: PD.Group({
+                        url: PD.Url(''),
+                        format: PD.Select(modelOptions[0][0], modelOptions),
+                        isBinary: PD.Boolean(false),
+                    }, { isExpanded: true }),
+                    coordinates: PD.Group({
+                        url: PD.Url(''),
+                        format: PD.Select(coordinatesOptions[0][0], coordinatesOptions),
+                    }, { isExpanded: true })
+                }, { isFlat: true }),
+                file: PD.Group({
+                    model: PD.File({ accept: modelExts.map(e => `.${e}`).join(','), label: 'Model' }),
+                    coordinates: PD.File({ accept: coordinatesExts.map(e => `.${e}`).join(','), label: 'Coordinates' }),
+                }, { isFlat: true }),
+            }, { options: [['url', 'URL'], ['file', 'File']] })
+        };
+    }
+})(({ params, state }, ctx: PluginContext) => Task.create('Load Trajectory', taskCtx => {
+    return state.transaction(async () => {
+        const s = params.source;
+
+        if (s.name === 'file' && (s.params.model === null || s.params.coordinates === null)) {
+            ctx.log.error('No file(s) selected');
+            return;
+        }
+
+        if (s.name === 'url' && (!s.params.model || !s.params.coordinates)) {
+            ctx.log.error('No URL(s) given');
+            return;
+        }
+
+        const processUrl = async (url: string | Asset.Url, format: string, isBinary: boolean) => {
+            const data = await ctx.builders.data.download({ url, isBinary });
+            const provider = ctx.dataFormats.get(format);
+
+            if (!provider) {
+                ctx.log.warn(`LoadTrajectory: could not find data provider for '${format}'`);
+                return;
+            }
+
+            return provider.parse(ctx, data);
+        };
+
+        const processFile = async (file: Asset.File | null) => {
+            if (!file) throw new Error('No file selected');
+
+            const info = getFileInfo(file.file!);
+            const isBinary = ctx.dataFormats.binaryExtensions.has(info.ext);
+            const { data } = await ctx.builders.data.readFile({ file, isBinary });
+            const provider = ctx.dataFormats.auto(info, data.cell?.obj!);
+
+            if (!provider) {
+                ctx.log.warn(`LoadTrajectory: could not find data provider for '${info.ext}'`);
+                await ctx.state.data.build().delete(data).commit();
+                return;
+            }
+
+            return provider.parse(ctx, data);
+        };
+
+        try {
+            const modelParsed = s.name === 'url'
+                ? await processUrl(s.params.model.url, s.params.model.format, s.params.model.isBinary)
+                : await processFile(s.params.model);
+
+            let model;
+            if ('trajectory' in modelParsed) {
+                model = await state.build().to(modelParsed.trajectory)
+                    .apply(ModelFromTrajectory, { modelIndex: 0 })
+                    .commit();
+            } else {
+                model = modelParsed.topology;
+            }
+
+            //
+
+            const coordinates = s.name === 'url'
+                ? await processUrl(s.params.coordinates.url, s.params.coordinates.format, true)
+                : await processFile(s.params.coordinates);
+
+            //
+
+            const dependsOn = [model.ref, coordinates.ref];
+            const traj = state.build().toRoot()
+                .apply(TrajectoryFromModelAndCoordinates, {
+                    modelRef: model.ref,
+                    coordinatesRef: coordinates.ref
+                }, { dependsOn })
+                .apply(StateTransforms.Model.ModelFromTrajectory, { modelIndex: 0 });
+
+            await state.updateTree(traj).runInContext(taskCtx);
+            const structure = await ctx.builders.structure.createStructure(traj.selector);
+            await ctx.builders.structure.representation.applyPreset(structure, 'auto');
+        } catch (e) {
+            console.error(e);
+            ctx.log.error(`Error loading trajectory`);
+        }
+    }).runInContext(taskCtx);
 }));

+ 86 - 0
src/mol-plugin-state/formats/coordinates.ts

@@ -0,0 +1,86 @@
+/**
+ * Copyright (c) 2018-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { StateTransforms } from '../transforms';
+import { DataFormatProvider } from './provider';
+
+export const CoordinatesFormatCategory = 'Coordinates';
+
+export { DcdProvider };
+const DcdProvider = DataFormatProvider({
+    label: 'DCD',
+    description: 'DCD',
+    category: CoordinatesFormatCategory,
+    binaryExtensions: ['dcd'],
+    parse: (plugin, data) => {
+        const coordinates = plugin.state.data.build()
+            .to(data)
+            .apply(StateTransforms.Model.CoordinatesFromDcd);
+
+        return coordinates.commit();
+    }
+});
+type DcdProvider = typeof DcdProvider;
+
+export { XtcProvider };
+const XtcProvider = DataFormatProvider({
+    label: 'XTC',
+    description: 'XTC',
+    category: CoordinatesFormatCategory,
+    binaryExtensions: ['xtc'],
+    parse: (plugin, data) => {
+        const coordinates = plugin.state.data.build()
+            .to(data)
+            .apply(StateTransforms.Model.CoordinatesFromXtc);
+
+        return coordinates.commit();
+    }
+});
+type XtcProvider = typeof XtcProvider;
+
+export { TrrProvider };
+const TrrProvider = DataFormatProvider({
+    label: 'TRR',
+    description: 'TRR',
+    category: CoordinatesFormatCategory,
+    binaryExtensions: ['trr'],
+    parse: (plugin, data) => {
+        const coordinates = plugin.state.data.build()
+            .to(data)
+            .apply(StateTransforms.Model.CoordinatesFromTrr);
+
+        return coordinates.commit();
+    }
+});
+type TrrProvider = typeof TrrProvider;
+
+export { NctrajProvider };
+const NctrajProvider = DataFormatProvider({
+    label: 'NCTRAJ',
+    description: 'NCTRAJ',
+    category: CoordinatesFormatCategory,
+    binaryExtensions: ['nc', 'nctraj'],
+    parse: (plugin, data) => {
+        const coordinates = plugin.state.data.build()
+            .to(data)
+            .apply(StateTransforms.Model.CoordinatesFromNctraj);
+
+        return coordinates.commit();
+    }
+});
+type NctrajProvider = typeof NctrajProvider;
+
+export type CoordinatesProvider = DcdProvider | XtcProvider | TrrProvider;
+
+export const BuiltInCoordinatesFormats = [
+    ['dcd', DcdProvider] as const,
+    ['xtc', XtcProvider] as const,
+    ['trr', TrrProvider] as const,
+    ['nctraj', NctrajProvider] as const,
+] as const;
+
+export type BuiltInCoordinatesFormat = (typeof BuiltInCoordinatesFormats)[number][0]

+ 9 - 7
src/mol-plugin-state/formats/registry.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2019-2020 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2019-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
  * @author David Sehnal <david.sehnal@gmail.com>
@@ -11,17 +11,18 @@ import { DataFormatProvider } from './provider';
 import { BuiltInTrajectoryFormats } from './trajectory';
 import { BuiltInVolumeFormats } from './volume';
 import { BuiltInShapeFormats } from './shape';
-import { BuiltInStructureFormats } from './structure';
+import { BuiltInTopologyFormats } from './topology';
+import { BuiltInCoordinatesFormats } from './coordinates';
 
 export class DataFormatRegistry {
     private _list: { name: string, provider: DataFormatProvider }[] = [];
     private _map = new Map<string, DataFormatProvider>();
     private _extensions: Set<string> | undefined = undefined;
     private _binaryExtensions: Set<string> | undefined = undefined;
-    private _options: [string, string, string][] | undefined = undefined;
+    private _options: [name: string, label: string, category: string][] | undefined = undefined;
 
-    get types(): [string, string][] {
-        return this._list.map(e => [e.name, e.provider.label] as [string, string]);
+    get types(): [name: string, label: string][] {
+        return this._list.map(e => [e.name, e.provider.label] as [name: string, label: string]);
     }
 
     get extensions() {
@@ -45,7 +46,7 @@ export class DataFormatRegistry {
 
     get options() {
         if (this._options) return this._options;
-        const options: [string, string, string][] = [];
+        const options: [name: string, label: string, category: string][] = [];
         this._list.forEach(({ name, provider }) => options.push([name, provider.label, provider.category || '']));
         this._options = options;
         return options;
@@ -53,7 +54,8 @@ export class DataFormatRegistry {
 
     constructor() {
         for (const [id, p] of BuiltInVolumeFormats) this.add(id, p);
-        for (const [id, p] of BuiltInStructureFormats) this.add(id, p);
+        for (const [id, p] of BuiltInTopologyFormats) this.add(id, p);
+        for (const [id, p] of BuiltInCoordinatesFormats) this.add(id, p);
         for (const [id, p] of BuiltInShapeFormats) this.add(id, p);
         for (const [id, p] of BuiltInTrajectoryFormats) this.add(id, p);
     };

+ 0 - 64
src/mol-plugin-state/formats/structure.ts

@@ -1,64 +0,0 @@
-/**
- * Copyright (c) 2018-2020 mol* contributors, licensed under MIT, See LICENSE file for more info.
- *
- * @author David Sehnal <david.sehnal@gmail.com>
- * @author Alexander Rose <alexander.rose@weirdbyte.de>
- */
-
-import { StateTransforms } from '../transforms';
-import { DataFormatProvider } from './provider';
-
-export const StructureFormatCategory = 'Structure';
-
-export const PsfProvider = DataFormatProvider({
-    label: 'PSF',
-    description: 'PSF',
-    category: StructureFormatCategory,
-    stringExtensions: ['psf'],
-    parse: async (plugin, data) => {
-        const format = plugin.state.data.build()
-            .to(data)
-            .apply(StateTransforms.Data.ParsePsf, {}, { state: { isGhost: true } });
-        const topology = format.apply(StateTransforms.Model.TopologyFromPsf);
-
-        await format.commit();
-
-        return { format: format.selector, topology: topology.selector };
-    }
-});
-
-export const DcdProvider = DataFormatProvider({
-    label: 'DCD',
-    description: 'DCD',
-    category: StructureFormatCategory,
-    binaryExtensions: ['dcd'],
-    parse: (plugin, data) => {
-        const coordinates = plugin.state.data.build()
-            .to(data)
-            .apply(StateTransforms.Model.CoordinatesFromDcd);
-
-        return coordinates.commit();
-    }
-});
-
-export const XtcProvider = DataFormatProvider({
-    label: 'XTC',
-    description: 'XTC',
-    category: StructureFormatCategory,
-    binaryExtensions: ['xtc'],
-    parse: (plugin, data) => {
-        const coordinates = plugin.state.data.build()
-            .to(data)
-            .apply(StateTransforms.Model.CoordinatesFromXtc);
-
-        return coordinates.commit();
-    }
-});
-
-export const BuiltInStructureFormats = [
-    ['psf', PsfProvider] as const,
-    ['dcd', DcdProvider] as const,
-    ['xtc', XtcProvider] as const,
-] as const;
-
-export type BuildInStructureFormat = (typeof BuiltInStructureFormats)[number][0]

+ 78 - 0
src/mol-plugin-state/formats/topology.ts

@@ -0,0 +1,78 @@
+/**
+ * Copyright (c) 2018-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { StateTransforms } from '../transforms';
+import { DataFormatProvider } from './provider';
+
+export const TopologyFormatCategory = 'Topology';
+
+export { PsfProvider };
+const PsfProvider = DataFormatProvider({
+    label: 'PSF',
+    description: 'PSF',
+    category: TopologyFormatCategory,
+    stringExtensions: ['psf'],
+    parse: async (plugin, data) => {
+        const format = plugin.state.data.build()
+            .to(data)
+            .apply(StateTransforms.Data.ParsePsf, {}, { state: { isGhost: true } });
+        const topology = format.apply(StateTransforms.Model.TopologyFromPsf);
+
+        await format.commit();
+
+        return { format: format.selector, topology: topology.selector };
+    }
+});
+type PsfProvider = typeof PsfProvider;
+
+export { PrmtopProvider };
+const PrmtopProvider = DataFormatProvider({
+    label: 'PRMTOP',
+    description: 'PRMTOP',
+    category: TopologyFormatCategory,
+    stringExtensions: ['prmtop', 'parm7'],
+    parse: async (plugin, data) => {
+        const format = plugin.state.data.build()
+            .to(data)
+            .apply(StateTransforms.Data.ParsePrmtop, {}, { state: { isGhost: true } });
+        const topology = format.apply(StateTransforms.Model.TopologyFromPrmtop);
+
+        await format.commit();
+
+        return { format: format.selector, topology: topology.selector };
+    }
+});
+type PrmtopProvider = typeof PrmtopProvider;
+
+export { TopProvider };
+const TopProvider = DataFormatProvider({
+    label: 'TOP',
+    description: 'TOP',
+    category: TopologyFormatCategory,
+    stringExtensions: ['top'],
+    parse: async (plugin, data) => {
+        const format = plugin.state.data.build()
+            .to(data)
+            .apply(StateTransforms.Data.ParseTop, {}, { state: { isGhost: true } });
+        const topology = format.apply(StateTransforms.Model.TopologyFromTop);
+
+        await format.commit();
+
+        return { format: format.selector, topology: topology.selector };
+    }
+});
+type TopProvider = typeof TopProvider;
+
+export type TopologyProvider = PsfProvider;
+
+export const BuiltInTopologyFormats = [
+    ['psf', PsfProvider] as const,
+    ['prmtop', PrmtopProvider] as const,
+    ['top', TopProvider] as const,
+] as const;
+
+export type BuiltInTopologyFormat = (typeof BuiltInTopologyFormats)[number][0]

+ 3 - 3
src/mol-plugin-state/manager/snapshots.ts

@@ -238,12 +238,12 @@ class PluginStateSnapshotManager extends StatefulPluginComponent<{
                 }
             } else {
                 const data = await this.plugin.runTask(readFromFile(file, 'zip'));
-                const assets = Object.create(null);
+                const assetData = Object.create(null);
 
                 objectForEach(data, (v, k) => {
                     if (k === 'state.json' || k === 'assets.json') return;
                     const name = k.substring(k.indexOf('/') + 1);
-                    assets[name] = new File([v], name);
+                    assetData[name] = v;
                 });
                 const stateFile = new File([data['state.json']], 'state.json');
                 const stateData = await this.plugin.runTask(readFromFile(stateFile, 'string'));
@@ -253,7 +253,7 @@ class PluginStateSnapshotManager extends StatefulPluginComponent<{
                     const json = JSON.parse(await this.plugin.runTask(readFromFile(file, 'string')));
 
                     for (const [id, asset] of json) {
-                        this.plugin.managers.asset.set(asset, assets[id]);
+                        this.plugin.managers.asset.set(asset, new File([assetData[id]], asset.name));
                     }
                 }
 

+ 5 - 1
src/mol-plugin-state/objects.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2018-2020 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2018-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author David Sehnal <david.sehnal@gmail.com>
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
@@ -24,6 +24,8 @@ import { CubeFile } from '../mol-io/reader/cube/parser';
 import { DxFile } from '../mol-io/reader/dx/parser';
 import { Color } from '../mol-util/color/color';
 import { Asset } from '../mol-util/assets';
+import { PrmtopFile } from '../mol-io/reader/prmtop/parser';
+import { TopFile } from '../mol-io/reader/top/parser';
 
 export type TypeClass = 'root' | 'data' | 'prop'
 
@@ -72,6 +74,8 @@ export namespace PluginStateObject {
         export class Cif extends Create<CifFile>({ name: 'CIF File', typeClass: 'Data' }) { }
         export class Cube extends Create<CubeFile>({ name: 'Cube File', typeClass: 'Data' }) { }
         export class Psf extends Create<PsfFile>({ name: 'PSF File', typeClass: 'Data' }) { }
+        export class Prmtop extends Create<PrmtopFile>({ name: 'PRMTOP File', typeClass: 'Data' }) { }
+        export class Top extends Create<TopFile>({ name: 'TOP File', typeClass: 'Data' }) { }
         export class Ply extends Create<PlyFile>({ name: 'PLY File', typeClass: 'Data' }) { }
         export class Ccp4 extends Create<Ccp4File>({ name: 'CCP4/MRC/MAP File', typeClass: 'Data' }) { }
         export class Dsn6 extends Create<Dsn6File>({ name: 'DSN6/BRIX File', typeClass: 'Data' }) { }

+ 36 - 0
src/mol-plugin-state/transforms/data.ts

@@ -21,6 +21,8 @@ import { parseCube } from '../../mol-io/reader/cube/parser';
 import { parseDx } from '../../mol-io/reader/dx/parser';
 import { ColorNames } from '../../mol-util/color/names';
 import { assertUnreachable } from '../../mol-util/type-helpers';
+import { parsePrmtop } from '../../mol-io/reader/prmtop/parser';
+import { parseTop } from '../../mol-io/reader/top/parser';
 
 export { Download };
 export { DownloadBlob };
@@ -30,6 +32,8 @@ export { ParseBlob };
 export { ParseCif };
 export { ParseCube };
 export { ParsePsf };
+export { ParsePrmtop };
+export { ParseTop };
 export { ParsePly };
 export { ParseCcp4 };
 export { ParseDsn6 };
@@ -317,6 +321,38 @@ const ParsePsf = PluginStateTransform.BuiltIn({
     }
 });
 
+type ParsePrmtop = typeof ParsePrmtop
+const ParsePrmtop = PluginStateTransform.BuiltIn({
+    name: 'parse-prmtop',
+    display: { name: 'Parse PRMTOP', description: 'Parse PRMTOP from String data' },
+    from: [SO.Data.String],
+    to: SO.Format.Prmtop
+})({
+    apply({ a }) {
+        return Task.create('Parse PRMTOP', async ctx => {
+            const parsed = await parsePrmtop(a.data).runInContext(ctx);
+            if (parsed.isError) throw new Error(parsed.message);
+            return new SO.Format.Prmtop(parsed.result);
+        });
+    }
+});
+
+type ParseTop = typeof ParseTop
+const ParseTop = PluginStateTransform.BuiltIn({
+    name: 'parse-top',
+    display: { name: 'Parse TOP', description: 'Parse TOP from String data' },
+    from: [SO.Data.String],
+    to: SO.Format.Top
+})({
+    apply({ a }) {
+        return Task.create('Parse TOP', async ctx => {
+            const parsed = await parseTop(a.data).runInContext(ctx);
+            if (parsed.isError) throw new Error(parsed.message);
+            return new SO.Format.Top(parsed.result);
+        });
+    }
+});
+
 type ParsePly = typeof ParsePly
 const ParsePly = PluginStateTransform.BuiltIn({
     name: 'parse-ply',

+ 77 - 3
src/mol-plugin-state/transforms/model.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2018-2020 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2018-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author David Sehnal <david.sehnal@gmail.com>
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
@@ -42,10 +42,20 @@ import { trajectoryFromXyz } from '../../mol-model-formats/structure/xyz';
 import { parseSdf } from '../../mol-io/reader/sdf/parser';
 import { trajectoryFromSdf } from '../../mol-model-formats/structure/sdf';
 import { assertUnreachable } from '../../mol-util/type-helpers';
+import { parseTrr } from '../../mol-io/reader/trr/parser';
+import { coordinatesFromTrr } from '../../mol-model-formats/structure/trr';
+import { parseNctraj } from '../../mol-io/reader/nctraj/parser';
+import { coordinatesFromNctraj } from '../../mol-model-formats/structure/nctraj';
+import { topologyFromPrmtop } from '../../mol-model-formats/structure/prmtop';
+import { topologyFromTop } from '../../mol-model-formats/structure/top';
 
 export { CoordinatesFromDcd };
 export { CoordinatesFromXtc };
+export { CoordinatesFromTrr };
+export { CoordinatesFromNctraj };
 export { TopologyFromPsf };
+export { TopologyFromPrmtop };
+export { TopologyFromTop };
 export { TrajectoryFromModelAndCoordinates };
 export { TrajectoryFromBlob };
 export { TrajectoryFromMmCif };
@@ -88,7 +98,7 @@ const CoordinatesFromDcd = PluginStateTransform.BuiltIn({
     }
 });
 
-type CoordinatesFromXtc = typeof CoordinatesFromDcd
+type CoordinatesFromXtc = typeof CoordinatesFromXtc
 const CoordinatesFromXtc = PluginStateTransform.BuiltIn({
     name: 'coordinates-from-xtc',
     display: { name: 'Parse XTC', description: 'Parse XTC binary data.' },
@@ -105,10 +115,44 @@ const CoordinatesFromXtc = PluginStateTransform.BuiltIn({
     }
 });
 
+type CoordinatesFromTrr = typeof CoordinatesFromTrr
+const CoordinatesFromTrr = PluginStateTransform.BuiltIn({
+    name: 'coordinates-from-trr',
+    display: { name: 'Parse TRR', description: 'Parse TRR binary data.' },
+    from: [SO.Data.Binary],
+    to: SO.Molecule.Coordinates
+})({
+    apply({ a }) {
+        return Task.create('Parse TRR', async ctx => {
+            const parsed = await parseTrr(a.data).runInContext(ctx);
+            if (parsed.isError) throw new Error(parsed.message);
+            const coordinates = await coordinatesFromTrr(parsed.result).runInContext(ctx);
+            return new SO.Molecule.Coordinates(coordinates, { label: a.label, description: 'Coordinates' });
+        });
+    }
+});
+
+type CoordinatesFromNctraj = typeof CoordinatesFromNctraj
+const CoordinatesFromNctraj = PluginStateTransform.BuiltIn({
+    name: 'coordinates-from-nctraj',
+    display: { name: 'Parse NCTRAJ', description: 'Parse NCTRAJ binary data.' },
+    from: [SO.Data.Binary],
+    to: SO.Molecule.Coordinates
+})({
+    apply({ a }) {
+        return Task.create('Parse NCTRAJ', async ctx => {
+            const parsed = await parseNctraj(a.data).runInContext(ctx);
+            if (parsed.isError) throw new Error(parsed.message);
+            const coordinates = await coordinatesFromNctraj(parsed.result).runInContext(ctx);
+            return new SO.Molecule.Coordinates(coordinates, { label: a.label, description: 'Coordinates' });
+        });
+    }
+});
+
 type TopologyFromPsf = typeof TopologyFromPsf
 const TopologyFromPsf = PluginStateTransform.BuiltIn({
     name: 'topology-from-psf',
-    display: { name: 'PSF Topology', description: 'Parse PSF string data.' },
+    display: { name: 'PSF Topology', description: 'Create topology from PSF.' },
     from: [SO.Format.Psf],
     to: SO.Molecule.Topology
 })({
@@ -120,6 +164,36 @@ const TopologyFromPsf = PluginStateTransform.BuiltIn({
     }
 });
 
+type TopologyFromPrmtop = typeof TopologyFromPrmtop
+const TopologyFromPrmtop = PluginStateTransform.BuiltIn({
+    name: 'topology-from-prmtop',
+    display: { name: 'PRMTOP Topology', description: 'Create topology from PRMTOP.' },
+    from: [SO.Format.Prmtop],
+    to: SO.Molecule.Topology
+})({
+    apply({ a }) {
+        return Task.create('Create Topology', async ctx => {
+            const topology = await topologyFromPrmtop(a.data).runInContext(ctx);
+            return new SO.Molecule.Topology(topology, { label: topology.label || a.label, description: 'Topology' });
+        });
+    }
+});
+
+type TopologyFromTop = typeof TopologyFromTop
+const TopologyFromTop = PluginStateTransform.BuiltIn({
+    name: 'topology-from-top',
+    display: { name: 'TOP Topology', description: 'Create topology from TOP.' },
+    from: [SO.Format.Top],
+    to: SO.Molecule.Topology
+})({
+    apply({ a }) {
+        return Task.create('Create Topology', async ctx => {
+            const topology = await topologyFromTop(a.data).runInContext(ctx);
+            return new SO.Molecule.Topology(topology, { label: topology.label || a.label, description: 'Topology' });
+        });
+    }
+});
+
 async function getTrajectory(ctx: RuntimeContext, obj: StateObject, coordinates: Coordinates) {
     if (obj.type === SO.Molecule.Topology.type) {
         const topology = obj.data as Topology;

+ 17 - 1
src/mol-plugin-ui/left-panel.tsx

@@ -1,7 +1,8 @@
 /**
- * Copyright (c) 2019 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2019-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author David Sehnal <david.sehnal@gmail.com>
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
  */
 
 import * as React from 'react';
@@ -19,6 +20,20 @@ import { StateTree } from './state/tree';
 import { HelpContent } from './viewport/help';
 import { HomeOutlinedSvg, AccountTreeOutlinedSvg, TuneSvg, HelpOutlineSvg, SaveOutlinedSvg, DeleteOutlinedSvg } from './controls/icons';
 
+export class CustomImportControls extends PluginUIComponent<{ initiallyCollapsed?: boolean }> {
+    componentDidMount() {
+        this.subscribe(this.plugin.state.behaviors.events.changed, () => this.forceUpdate());
+    }
+
+    render() {
+        const controls: JSX.Element[] = [];
+        this.plugin.customImportControls.forEach((Controls, key) => {
+            controls.push(<Controls initiallyCollapsed={this.props.initiallyCollapsed} key={key} />);
+        });
+        return controls.length > 0 ? <>{controls}</> : null;
+    }
+}
+
 export class LeftPanelControls extends PluginUIComponent<{}, { tab: LeftPanelTabName }> {
     state = { tab: this.plugin.behaviors.layout.leftPanelTabName.value };
 
@@ -54,6 +69,7 @@ export class LeftPanelControls extends PluginUIComponent<{}, { tab: LeftPanelTab
         'root': <>
             <SectionHeader icon={HomeOutlinedSvg} title='Home' />
             <StateObjectActions state={this.plugin.state.data} nodeRef={StateTransform.RootRef} hideHeader={true} initiallyCollapsed={true} alwaysExpandFirst={true} />
+            <CustomImportControls />
             {this.plugin.spec.components?.remoteState !== 'none' && <RemoteStateSnapshots listOnly /> }
         </>,
         'data': <>

+ 2 - 1
src/mol-plugin/context.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2018-2021 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2018-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author David Sehnal <david.sehnal@gmail.com>
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
@@ -176,6 +176,7 @@ export class PluginContext {
     readonly customStructureProperties = new CustomProperty.Registry<Structure>();
 
     readonly customStructureControls = new Map<string, { new(): any /* constructible react components with <action.customControl /> */ }>();
+    readonly customImportControls = new Map<string, { new(): any /* constructible react components with <action.customControl /> */ }>();
     readonly genericRepresentationControls = new Map<string, (selection: StructureHierarchyManager['selection']) => [StructureHierarchyRef[], string]>();
 
     /**

+ 1 - 1
src/mol-plugin/spec.ts

@@ -64,10 +64,10 @@ namespace PluginSpec {
 export const DefaultPluginSpec = (): PluginSpec => ({
     actions: [
         PluginSpec.Action(StateActions.Structure.DownloadStructure),
-        PluginSpec.Action(StateActions.Structure.AddTrajectory),
         PluginSpec.Action(StateActions.Volume.DownloadDensity),
         PluginSpec.Action(StateActions.DataFormat.DownloadFile),
         PluginSpec.Action(StateActions.DataFormat.OpenFiles),
+        PluginSpec.Action(StateActions.Structure.LoadTrajectory),
         PluginSpec.Action(StateActions.Structure.EnableModelCustomProps),
         PluginSpec.Action(StateActions.Structure.EnableStructureCustomProps),
 

+ 3 - 1
src/mol-util/data-source.ts

@@ -130,13 +130,15 @@ function getCompression(name: string) {
             DataCompressionMethod.None;
 }
 
+const reFilterPath = /^(__MACOSX|.DS_Store)/;
+
 async function decompress(ctx: RuntimeContext, data: Uint8Array, compression: DataCompressionMethod): Promise<Uint8Array> {
     switch (compression) {
         case DataCompressionMethod.None: return data;
         case DataCompressionMethod.Gzip: return ungzip(ctx, data);
         case DataCompressionMethod.Zip:
             const parsed = await unzip(ctx, data.buffer);
-            const names = Object.keys(parsed);
+            const names = Object.keys(parsed).filter(n => !reFilterPath.test(n));
             if (names.length !== 1) throw new Error('can only decompress zip files with a single entry');
             return parsed[names[0]] as Uint8Array;
     }

+ 2 - 13
src/mol-util/file-info.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2018-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
  */
@@ -11,17 +11,12 @@ export type FileInput = File | Blob | string
 // TODO store globally with decompression plugins?
 const compressedExtList = ['gz', 'zip'];
 
-// TODO store globally with parser plugins?
-const binaryExtList = ['bcif', 'ccp4', 'dcd'];
-
 export interface FileInfo {
     path: string
     name: string
     ext: string
     base: string
     dir: string
-    compressed: string | boolean
-    binary: boolean
     protocol: string
     query: string
     src: FileInput
@@ -29,7 +24,6 @@ export interface FileInfo {
 
 export function getFileInfo(file: FileInput): FileInfo {
     let path: string;
-    let compressed: string|false;
     let protocol = '';
 
     if (file instanceof File) {
@@ -58,16 +52,11 @@ export function getFileInfo(file: FileInput): FileInfo {
     const dir = path.substring(0, path.lastIndexOf('/') + 1);
 
     if (compressedExtList.includes(ext)) {
-        compressed = ext;
         const n = path.length - ext.length - 1;
         ext = (path.substr(0, n).split('.').pop() || '').toLowerCase();
         const m = base.length - ext.length - 1;
         base = base.substr(0, m);
-    } else {
-        compressed = false;
     }
 
-    const binary = binaryExtList.includes(ext);
-
-    return { path, name, ext, base, dir, compressed, binary, protocol, query, src: file };
+    return { path, name, ext, base, dir, protocol, query, src: file };
 }

+ 6 - 0
src/mol-util/index.ts

@@ -202,4 +202,10 @@ export function formatProgress(p: Progress) {
     if (tp.isIndeterminate) return tp.message;
     const x = (100 * tp.current / tp.max).toFixed(2);
     return `${tp.message} ${x}%`;
+}
+
+export function formatBytes(count: number) {
+    const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
+    const i = Math.floor(Math.log(count) / Math.log(1024));
+    return `${(count / Math.pow(1024, i)).toFixed(2)} ${units[i]}`;
 }

+ 5 - 1
src/mol-util/zip/zip.ts

@@ -1,5 +1,5 @@
 /**
- * Copyright (c) 2020 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ * Copyright (c) 2020-2022 mol* contributors, licensed under MIT, See LICENSE file for more info.
  *
  * @author Alexander Rose <alexander.rose@weirdbyte.de>
  *
@@ -15,6 +15,10 @@ import { _inflate } from './inflate';
 import { _deflateRaw } from './deflate';
 import { RuntimeContext, Task } from '../../mol-task';
 
+export function Unzip(buf: ArrayBuffer, onlyNames = false) {
+    return Task.create('Unzip', ctx => unzip(ctx, buf, onlyNames));
+}
+
 export async function unzip(runtime: RuntimeContext, buf: ArrayBuffer, onlyNames = false) {
     const out: { [k: string]: Uint8Array | { size: number, csize: number } } = Object.create(null);
     const data = new Uint8Array(buf);