Browse Source

Added file-selection for PLY-files and create initial ply-parser (working on...)

MarcoSchaeferT 6 years ago
parent
commit
515aaaeb7c

+ 21 - 7
package-lock.json

@@ -4270,12 +4270,14 @@
         "balanced-match": {
           "version": "1.0.0",
           "bundled": true,
-          "dev": true
+          "dev": true,
+          "optional": true
         },
         "brace-expansion": {
           "version": "1.1.11",
           "bundled": true,
           "dev": true,
+          "optional": true,
           "requires": {
             "balanced-match": "^1.0.0",
             "concat-map": "0.0.1"
@@ -4290,17 +4292,20 @@
         "code-point-at": {
           "version": "1.1.0",
           "bundled": true,
-          "dev": true
+          "dev": true,
+          "optional": true
         },
         "concat-map": {
           "version": "0.0.1",
           "bundled": true,
-          "dev": true
+          "dev": true,
+          "optional": true
         },
         "console-control-strings": {
           "version": "1.1.0",
           "bundled": true,
-          "dev": true
+          "dev": true,
+          "optional": true
         },
         "core-util-is": {
           "version": "1.0.2",
@@ -4417,7 +4422,8 @@
         "inherits": {
           "version": "2.0.3",
           "bundled": true,
-          "dev": true
+          "dev": true,
+          "optional": true
         },
         "ini": {
           "version": "1.3.5",
@@ -4429,6 +4435,7 @@
           "version": "1.0.0",
           "bundled": true,
           "dev": true,
+          "optional": true,
           "requires": {
             "number-is-nan": "^1.0.0"
           }
@@ -4443,6 +4450,7 @@
           "version": "3.0.4",
           "bundled": true,
           "dev": true,
+          "optional": true,
           "requires": {
             "brace-expansion": "^1.1.7"
           }
@@ -4450,12 +4458,14 @@
         "minimist": {
           "version": "0.0.8",
           "bundled": true,
-          "dev": true
+          "dev": true,
+          "optional": true
         },
         "minipass": {
           "version": "2.2.4",
           "bundled": true,
           "dev": true,
+          "optional": true,
           "requires": {
             "safe-buffer": "^5.1.1",
             "yallist": "^3.0.0"
@@ -4474,6 +4484,7 @@
           "version": "0.5.1",
           "bundled": true,
           "dev": true,
+          "optional": true,
           "requires": {
             "minimist": "0.0.8"
           }
@@ -4554,7 +4565,8 @@
         "number-is-nan": {
           "version": "1.0.1",
           "bundled": true,
-          "dev": true
+          "dev": true,
+          "optional": true
         },
         "object-assign": {
           "version": "4.1.1",
@@ -4566,6 +4578,7 @@
           "version": "1.4.0",
           "bundled": true,
           "dev": true,
+          "optional": true,
           "requires": {
             "wrappy": "1"
           }
@@ -4687,6 +4700,7 @@
           "version": "1.0.2",
           "bundled": true,
           "dev": true,
+          "optional": true,
           "requires": {
             "code-point-at": "^1.0.0",
             "is-fullwidth-code-point": "^1.0.0",

+ 1 - 0
src/apps/structure-info/volume.ts

@@ -35,6 +35,7 @@ function print(data: Volume) {
     console.log(data.volume.cell);
     console.log(data.volume.dataStats);
     console.log(data.volume.fractionalBox);
+    console.log("\n\n Hello 12156421231 \n\n");
 }
 
 async function doMesh(data: Volume, filename: string) {

+ 90 - 0
src/mol-io/common/ascii.ts

@@ -0,0 +1,90 @@
+/**
+ * Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * Adapted from https://github.com/rcsb/mmtf-javascript
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+// NOT IN USE ELSEWEHERE !!!!!
+export function asciiWrite(data: Uint8Array, offset: number, str: string) {
+    for (let i = 0, l = str.length; i < l; i++) {
+        let codePoint = str.charCodeAt(i);
+
+        // One byte of UTF-8
+        if (codePoint < 0x80) {
+            data[offset++] = codePoint >>> 0 & 0x7f | 0x00;
+            continue;
+        }
+
+        // Two bytes of UTF-8
+        if (codePoint < 0x800) {
+            data[offset++] = codePoint >>> 6 & 0x1f | 0xc0;
+            data[offset++] = codePoint >>> 0 & 0x3f | 0x80;
+            continue;
+        }
+
+        // Three bytes of UTF-8.
+        if (codePoint < 0x10000) {
+            data[offset++] = codePoint >>> 12 & 0x0f | 0xe0;
+            data[offset++] = codePoint >>> 6 & 0x3f | 0x80;
+            data[offset++] = codePoint >>> 0 & 0x3f | 0x80;
+            continue;
+        }
+
+        // Four bytes of UTF-8
+        if (codePoint < 0x110000) {
+            data[offset++] = codePoint >>> 18 & 0x07 | 0xf0;
+            data[offset++] = codePoint >>> 12 & 0x3f | 0x80;
+            data[offset++] = codePoint >>> 6 & 0x3f | 0x80;
+            data[offset++] = codePoint >>> 0 & 0x3f | 0x80;
+            continue;
+        }
+        throw new Error('bad codepoint ' + codePoint);
+    }
+}
+
+const __chars = function () {
+    let data: string[] = [];
+    for (let i = 0; i < 1024; i++) data[i] = String.fromCharCode(i);
+    return data;
+}();
+
+function throwError(err: string) {
+    throw new Error(err);
+}
+
+export function asciiRead(data: number, offset: number, length: number) {
+    let chars = __chars;
+    let str: string | undefined = void 0;
+
+    let byte = data;
+    // One byte character
+    if ((byte & 0x80) !== 0x00) throwError('Invalid byte ' + byte.toString(16));
+    str = chars[byte];
+    return str;
+}
+
+export function asciiByteCount(str: string) {
+    let count = 0;
+    for (let i = 0, l = str.length; i < l; i++) {
+        let codePoint = str.charCodeAt(i);
+        if (codePoint < 0x80) {
+            count += 1;
+            continue;
+        }
+        if (codePoint < 0x800) {
+            count += 2;
+            continue;
+        }
+        if (codePoint < 0x10000) {
+            count += 3;
+            continue;
+        }
+        if (codePoint < 0x110000) {
+            count += 4;
+            continue;
+        }
+        throwError('bad codepoint ' + codePoint);
+    }
+    return count;
+}

+ 1 - 0
src/mol-io/reader/cif/text/parser.ts

@@ -60,6 +60,7 @@ interface TokenizerState {
  * Eat everything until a whitespace/newline occurs.
  */
 function eatValue(state: TokenizerState) {
+    console.log("hello");
     while (state.position < state.length) {
         switch (state.data.charCodeAt(state.position)) {
             case 9:  // \t

+ 1 - 0
src/mol-io/reader/common/text/tokenizer.ts

@@ -85,6 +85,7 @@ export namespace Tokenizer {
     /** Sets the current token start to current position and moves to the next line. */
     export function markLine(state: Tokenizer) {
         state.tokenStart = state.position;
+        console.log("hello");
         eatLine(state);
     }
 

+ 39 - 0
src/mol-io/reader/ply/parse_data/data-model.ts

@@ -0,0 +1,39 @@
+/**
+ * Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import { CifField as CsvColumn } from '../../cif/data-model'
+
+export { CsvColumn }
+
+export interface PlyFile {
+    readonly name?: string,
+    readonly PLY_File: ply_form
+}
+
+export function CsvFile(PLY_File: ply_form, name?: string): PlyFile {
+    return { name, PLY_File };
+}
+
+export interface ply_form {
+    readonly rowCount: number,
+    readonly vertexCount: number,
+    readonly faceCount: number,
+    readonly propertyCount: number,
+    readonly initialHead: ReadonlyArray<string>,
+    getColumn(name: string): CsvColumn | undefined
+}
+
+export function CsvTable(rowCount: number, vertexCount: number, faceCount: number, propertyCount: number, initialHead: string[], columns: CsvColumns): ply_form {
+    return { rowCount, vertexCount, faceCount, propertyCount, initialHead: [...initialHead], getColumn(name) { return columns[name]; } };
+}
+
+export type CsvColumns = { [name: string]: CsvColumn }
+
+// export namespace CsvTable {
+//     export function empty(name: string): Table {
+//         return { rowCount: 0, name, fieldNames: [], getColumn(name: string) { return void 0; } };
+//     };
+// }

+ 9 - 0
src/mol-io/reader/ply/parse_data/field.ts

@@ -0,0 +1,9 @@
+/**
+ * Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+import Field from '../../cif/text/field'
+
+export default Field

+ 263 - 0
src/mol-io/reader/ply/parse_data/ply_parser.ts

@@ -0,0 +1,263 @@
+/**
+ * Copyright (c) 2017 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author Alexander Rose <alexander.rose@weirdbyte.de>
+ */
+
+// import { Column } from 'mol-data/db'
+import { Tokens, TokenBuilder, Tokenizer } from '../../common/text/tokenizer'
+import * as Data from './data-model'
+import Field from './field'
+import Result from '../../result'
+import { Task, RuntimeContext, chunkedSubtask, } from 'mol-task'
+
+
+const enum PlyTokenType {
+    Value = 0,
+    Comment = 1,
+    End = 2,
+    property = 3
+}
+
+interface State {
+    data: string;
+    tokenizer: Tokenizer,
+
+    tokenType: PlyTokenType;
+    runtimeCtx: RuntimeContext,
+    tokens: Tokens[],
+
+    fieldCount: number,
+    recordCount: number,
+
+    columnCount: number,
+    initialHead: string[],
+    propertyNames: string[],
+
+    commentCharCode: number,
+    propertyCharCode: number
+}
+
+function State(data: string, runtimeCtx: RuntimeContext, opts: PlyOptions): State {
+
+    const tokenizer = Tokenizer(data)
+    return {
+        data,
+        tokenizer,
+
+        tokenType: PlyTokenType.End,
+        runtimeCtx,
+        tokens: [],
+
+        fieldCount: 0,
+        recordCount: 0,
+
+        columnCount: 0,
+        initialHead: [],
+        propertyNames: [],
+
+        commentCharCode: opts.comment.charCodeAt(0),
+        propertyCharCode: opts.property.charCodeAt(0)
+    };
+}
+
+/**
+ * Eat everything until a delimiter (whitespace) or newline occurs.
+ * Ignores whitespace at the end of the value, i.e. trim right.
+ * Returns true when a newline occurs after the value.
+ */
+function eatValue(state: Tokenizer) {
+    while (state.position < state.length) {
+        const c = state.data.charCodeAt(state.position);
+        ++state.position
+        switch (c) {
+            case 10:  // \n
+            case 13:  // \r
+                return true;
+            case 32: // ' ' Delimeter of ply is space (Unicode 32)
+                return;
+            case 9:  // \t
+            case 32:  // ' '
+                break;
+            default:
+                ++state.tokenEnd;
+                break;
+        }
+    }
+}
+
+
+
+function skipWhitespace(state: Tokenizer) {
+    let prev = -1;
+    while (state.position < state.length) {
+        const c = state.data.charCodeAt(state.position);
+        switch (c) {
+            case 9:  // '\t'
+            //case 32:  // ' '
+                prev = c;
+                ++state.position;
+                break;
+            case 10:  // \n
+                // handle \r\n
+                if (prev !== 13) {
+                    ++state.lineNumber;
+                }
+                prev = c;
+                ++state.position;
+                break;
+            case 13:  // \r
+                prev = c;
+                ++state.position;
+                ++state.lineNumber;
+                break;
+            default:
+                return;
+        }
+    }
+}
+
+function skipLine(state: Tokenizer) {
+    while (state.position < state.length) {
+        const c = state.data.charCodeAt(state.position);
+        if (c === 10 || c === 13) return  // \n or \r
+        ++state.position
+    }
+}
+
+/**
+ * Move to the next token.
+ * Returns true when the current char is a newline, i.e. indicating a full record.
+ */
+function moveNextInternal(state: State) {
+    const tokenizer = state.tokenizer
+    //skipWhitespace(tokenizer);
+
+    if (tokenizer.position >= tokenizer.length) {
+        state.tokenType = PlyTokenType.End;
+        return true;
+    }
+
+    tokenizer.tokenStart = tokenizer.position;
+    tokenizer.tokenEnd = tokenizer.position;
+    const c = state.data.charCodeAt(tokenizer.position);
+    switch (c) {
+        case state.commentCharCode:
+            state.tokenType = PlyTokenType.Comment;
+            skipLine(tokenizer);
+            break;
+        case state.propertyCharCode:
+            state.tokenType = PlyTokenType.property;
+            //return eatProperty(tokenizer);
+        default:
+            state.tokenType = PlyTokenType.Value;
+            return eatValue(tokenizer);
+    }
+}
+
+/**
+ * Moves to the next non-comment token/line.
+ * Returns true when the current char is a newline, i.e. indicating a full record.
+ */
+function moveNext(state: State) {
+    let newRecord = moveNextInternal(state);
+    while (state.tokenType === PlyTokenType.Comment) { // skip comment lines (marco
+        newRecord = moveNextInternal(state);
+    }
+    return newRecord
+}
+
+function readRecordsChunk(chunkSize: number, state: State) {
+    if (state.tokenType === PlyTokenType.End) return 0
+
+    let newRecord = moveNext(state);
+    if (newRecord) ++state.recordCount
+
+    const { tokens, tokenizer } = state;
+    let counter = 0;
+    while (state.tokenType === PlyTokenType.Value && counter < chunkSize) {
+        TokenBuilder.add(tokens[state.fieldCount % state.columnCount], tokenizer.tokenStart, tokenizer.tokenEnd);
+        ++state.fieldCount
+        newRecord = moveNext(state);
+        if (newRecord) ++state.recordCount
+        ++counter;
+    }
+    return counter;
+}
+
+function readRecordsChunks(state: State) {
+    return chunkedSubtask(state.runtimeCtx, 100000, state, readRecordsChunk,
+        (ctx, state) => ctx.update({ message: 'Parsing...', current: state.tokenizer.position, max: state.data.length }));
+}
+
+function addColumn (state: State) {
+    state.initialHead.push(Tokenizer.getTokenString(state.tokenizer))
+    state.tokens.push(TokenBuilder.create(state.tokenizer, state.data.length / 80))
+}
+
+function init(state: State) { // only for first line to get the columns! (marco)
+    let newRecord = moveNext(state)
+    while (!newRecord) {  // newRecord is only true when a newline occurs (marco)
+        addColumn(state)
+        newRecord = moveNext(state);
+    }
+    addColumn(state)
+    newRecord = moveNext(state);
+    while (!newRecord) {
+        addColumn(state)
+        newRecord = moveNext(state);
+    }
+    addColumn(state)
+    if(state.initialHead[0] !== 'ply'){
+        console.log("ERROR: this is not a .ply file!")
+        throw new Error("this is not a .ply file!");
+        return 0;
+    }
+    if(state.initialHead[2] !== 'ascii'){
+        console.log("ERROR: only ASCII-DECODING is supported!");
+        throw new Error("only ASCII-DECODING is supported!");
+        return 0;
+    }
+    state.columnCount = state.initialHead.length
+    return 1;
+}
+
+async function handleRecords(state: State): Promise<Data.ply_form> {
+    if(!init(state)){
+        console.log("ERROR: parsing file (PLY) failed!")
+        throw new Error("parsing file (PLY) failed!");
+    }
+    await readRecordsChunks(state)
+
+    const columns: Data.CsvColumns = Object.create(null);
+    for (let i = 0; i < state.columnCount; ++i) {
+        columns[state.initialHead[i]] = Field(state.tokens[i], state.recordCount);
+    }
+
+
+    return Data.CsvTable(state.recordCount,0,0,0, state.initialHead, columns)
+}
+
+async function parseInternal(data: string, ctx: RuntimeContext, opts: PlyOptions): Promise<Result<Data.PlyFile>> {
+    const state = State(data, ctx, opts);
+
+    ctx.update({ message: 'Parsing...', current: 0, max: data.length });
+    const table = await handleRecords(state)
+    const result = Data.CsvFile(table)
+    console.log(result);
+    return Result.success(result);
+}
+
+interface PlyOptions {
+    comment: string;
+    property: string;
+}
+
+export function parse(data: string, opts?: Partial<PlyOptions>) {
+    const completeOpts = Object.assign({}, { comment: 'c', property: 'p' }, opts)
+    return Task.create<Result<Data.PlyFile>>('Parse PLY', async ctx => {
+        return await parseInternal(data, ctx, completeOpts);
+    });
+}
+
+export default parse;

+ 58 - 0
src/mol-io/reader/ply/read_data/data-source.ts

@@ -0,0 +1,58 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ *
+ * Adapted from LiteMol
+ */
+
+import { Task, RuntimeContext } from 'mol-task';
+
+export function readFromFile(file: File) {
+    return <Task<number | string>>readFromFileInternal(file);
+}
+
+
+async function processFile(ctx: RuntimeContext, e: any) {
+    const data = (e.target as FileReader).result;
+    return  data as string;
+}
+
+function readData(ctx: RuntimeContext, action: string, data: XMLHttpRequest | FileReader): Promise<any> {
+    return new Promise<any>((resolve, reject) => {
+        data.onerror = (e: any) => {
+            const error = (<FileReader>e.target).error;
+            reject(error ? error : 'Failed.');
+        };
+
+        data.onabort = () => reject(Task.Aborted(''));
+
+        data.onprogress = (e: ProgressEvent) => {
+            if (e.lengthComputable) {
+                ctx.update({ message: action, isIndeterminate: false, current: e.loaded, max: e.total });
+            } else {
+                ctx.update({ message: `${action} ${(e.loaded / 1024 / 1024).toFixed(2)} MB`, isIndeterminate: true });
+            }
+        }
+        data.onload = (e: any) => resolve(e);
+    });
+}
+
+function readFromFileInternal(file: File): Task<string | number> {
+    let reader: FileReader | undefined = void 0;
+    return Task.create('Read File', async ctx => {
+        try {
+            reader = new FileReader();
+            reader.readAsBinaryString(file);
+
+            ctx.update({ message: 'Opening file...', canAbort: true });
+            const e = await readData(ctx, 'Reading...', reader);
+            const result = processFile(ctx, e);
+            return result;
+        } finally {
+            reader = void 0;
+        }
+    }, () => {
+        if (reader) reader.abort();
+    });
+}

+ 60 - 0
src/mol-io/reader/ply/read_data/data.ts

@@ -0,0 +1,60 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import { PluginStateTransform } from '../../../../mol-plugin/state/objects';
+import { PluginStateObject as SO } from '../../../../mol-plugin/state/objects';
+import { Task } from 'mol-task';
+import PLY from 'mol-io/reader/ply/parse_data/ply_parser'
+import { ParamDefinition as PD } from 'mol-util/param-definition';
+import { Transformer } from 'mol-state';
+import { readFromFile } from './data-source';
+
+export { ReadFile_ascii }
+type ReadFile_ascii = typeof ReadFile_ascii
+const ReadFile_ascii = PluginStateTransform.BuiltIn({
+    name: 'ReadFile_ascii',
+    display: { name: 'ReadFile_ascii', description: 'Read string data from the specified file' },
+    from: SO.Root,
+    to: [SO.Data.String],
+    params: {
+        file: PD.File(),
+        label: PD.makeOptional(PD.Text('')),
+        isBinary: PD.makeOptional(PD.Boolean(false, { description: 'If true, open file as as binary (string otherwise)' }))
+    }
+})({
+    apply({ params: p }) {
+        return Task.create('Open File', async ctx => {
+            const data = await readFromFile(p.file).runInContext(ctx);
+            return  new SO.Data.String(data as string, { label: p.label ? p.label : p.file.name });
+        });
+    },
+    update({ oldParams, newParams, b }) {
+        if (oldParams.label !== newParams.label) {
+            (b.label as string) = newParams.label || oldParams.file.name;
+            return Transformer.UpdateResult.Updated;
+        }
+        return Transformer.UpdateResult.Unchanged;
+    },
+    isSerializable: () => ({ isSerializable: false, reason: 'Cannot serialize user loaded files.' })
+});
+
+
+export { ParsePLY }
+type ParsePLY = typeof ParsePLY
+const ParsePLY = PluginStateTransform.BuiltIn({
+    name: 'parse-ply',
+    display: { name: 'Parse PLY', description: 'Parse OLY from String' },
+    from: [SO.Data.String],
+    to: SO.Format.Ply
+})({
+    apply({ a }) {
+        return Task.create('Parse PLY', async ctx => {
+            const parsed = await (PLY(a.data).runInContext(ctx));
+            if (parsed.isError) throw new Error(parsed.message);
+            return new SO.Format.Ply(parsed.result);
+        });
+    }
+});

+ 2 - 1
src/mol-plugin/index.ts

@@ -11,7 +11,7 @@ import * as React from 'react';
 import * as ReactDOM from 'react-dom';
 import { PluginCommands } from './command';
 import { PluginSpec } from './spec';
-import { DownloadStructure, CreateComplexRepresentation, OpenStructure } from './state/actions/basic';
+import {DownloadStructure, CreateComplexRepresentation, OpenStructure, PLYtest} from './state/actions/basic';
 import { StateTransforms } from './state/transforms';
 import { PluginBehaviors } from './behavior';
 
@@ -24,6 +24,7 @@ const DefaultSpec: PluginSpec = {
     actions: [
         PluginSpec.Action(DownloadStructure),
         PluginSpec.Action(OpenStructure),
+        PluginSpec.Action(PLYtest),
         PluginSpec.Action(CreateComplexRepresentation),
         PluginSpec.Action(StateTransforms.Data.Download),
         PluginSpec.Action(StateTransforms.Data.ParseCif),

+ 19 - 1
src/mol-plugin/state/actions/basic.ts

@@ -84,13 +84,31 @@ export const OpenStructure = StateAction.build({
     const data = b.toRoot().apply(StateTransforms.Data.ReadFile, { file: params.file, isBinary: /\.bcif$/i.test(params.file.name) });
     return state.update(createStructureTree(ctx, data, false));
 });
+import * as data_functions from "../../../mol-io/reader/ply//read_data/data"
+export const PLYtest = StateAction.build({
+    display: { name: 'PLY Test', description: 'nothing ply' },
+    from: PluginStateObject.Root,
+    params: { file: PD.File({ accept: '.ply' }) }
+})(({ params, state }, ctx: PluginContext) => {
+    const b = state.build();
+    const data = b.toRoot().apply(data_functions.ReadFile_ascii, { file: params.file, isBinary: false });
+    return state.update(getPLYdata(ctx, data));
+});
+
+function getPLYdata(ctx: PluginContext, b: StateTreeBuilder.To<PluginStateObject.Data.String>, ): StateTree {
+    let root = b
+        .apply(data_functions.ParsePLY);
+    console.log(data_functions.ParsePLY);
+
+    return root.getTree();
+}
+
 
 function createStructureTree(ctx: PluginContext, b: StateTreeBuilder.To<PluginStateObject.Data.Binary | PluginStateObject.Data.String>, supportProps: boolean): StateTree {
     let root = b
         .apply(StateTransforms.Data.ParseCif)
         .apply(StateTransforms.Model.TrajectoryFromMmCif)
         .apply(StateTransforms.Model.ModelFromTrajectory, { modelIndex: 0 });
-
     if (supportProps) {
         root = root.apply(StateTransforms.Model.CustomModelProperties);
     }

+ 2 - 0
src/mol-plugin/state/objects.ts

@@ -5,6 +5,7 @@
  */
 
 import { CifFile } from 'mol-io/reader/cif';
+import { PlyFile } from 'mol-io/reader/ply/parse_data/data-model';
 import { Model as _Model, Structure as _Structure } from 'mol-model/structure';
 import { VolumeData } from 'mol-model/volume';
 import { PluginBehavior } from 'mol-plugin/behavior/behavior';
@@ -56,6 +57,7 @@ export namespace PluginStateObject {
     export namespace Format {
         export class Json extends Create<any>({ name: 'JSON Data', typeClass: 'Data' }) { }
         export class Cif extends Create<CifFile>({ name: 'CIF File', typeClass: 'Data' }) { }
+        export class Ply extends Create<PlyFile>({ name: 'PLY File', typeClass: 'Data' }) { }
     }
 
     export namespace Molecule {

+ 1 - 1
src/mol-plugin/state/transforms/data.ts

@@ -90,4 +90,4 @@ const ParseCif = PluginStateTransform.BuiltIn({
             return new SO.Format.Cif(parsed.result);
         });
     }
-});
+});

+ 1 - 0
src/tests/browser/index.html

@@ -34,5 +34,6 @@
                 document.body.appendChild(script)
             }
         </script>
+        <script type="text/javascript" src="./render-shape.js"></script>
     </body>
 </html>

+ 7 - 5
src/tests/browser/render-shape.ts

@@ -16,6 +16,8 @@ import { Mesh } from 'mol-geo/geometry/mesh/mesh';
 import { labelFirst } from 'mol-theme/label';
 import { RuntimeContext, Progress } from 'mol-task';
 
+
+
 const parent = document.getElementById('app')!
 parent.style.width = '100%'
 parent.style.height = '100%'
@@ -56,7 +58,7 @@ async function getSphereMesh(ctx: RuntimeContext, centers: number[], mesh?: Mesh
     const builderState = MeshBuilder.createState(centers.length * 128, centers.length * 128 / 2, mesh)
     const t = Mat4.identity()
     const v = Vec3.zero()
-    const sphere = Sphere(2)
+    const sphere = Sphere(4)
     builderState.currentGroup = 0
     for (let i = 0, il = centers.length / 3; i < il; ++i) {
         // for production, calls to update should be guarded by `if (ctx.shouldUpdate)`
@@ -69,8 +71,8 @@ async function getSphereMesh(ctx: RuntimeContext, centers: number[], mesh?: Mesh
 }
 
 const myData = {
-    centers: [0, 0, 0, 0, 3, 0],
-    colors: [ColorNames.tomato, ColorNames.springgreen],
+    centers: [0, 0, 0, 0, 3, 0, 1, 0 , 4],
+    colors: [ColorNames.tomato, ColorNames.springgreen,ColorNames.springgreen],
     labels: ['Sphere 0, Instance A', 'Sphere 1, Instance A', 'Sphere 0, Instance B', 'Sphere 1, Instance B'],
     transforms: [Mat4.identity(), Mat4.fromTranslation(Mat4.zero(), Vec3.create(3, 0, 0))]
 }
@@ -96,7 +98,7 @@ async function getShape(ctx: RuntimeContext, data: MyData, props: {}, shape?: Sh
 // Init ShapeRepresentation container
 const repr = ShapeRepresentation(getShape, Mesh.Utils)
 
-async function init() {
+export async function init() {
     // Create shape from myData and add to canvas3d
     await repr.createOrUpdate({}, myData).run((p: Progress) => console.log(Progress.format(p)))
     console.log(repr)
@@ -110,4 +112,4 @@ async function init() {
         await repr.createOrUpdate({}, myData).run()
     }, 1000)
 }
-init()
+export default init();