Просмотр исходного кода

Merge branch 'master' into gl-lines

Alexander Rose 6 лет назад
Родитель
Сommit
e09e25b750

+ 2 - 0
package.json

@@ -21,6 +21,8 @@
     "watch-viewer": "webpack build/node_modules/apps/viewer/index.js -w --mode development -o build/viewer/index.js",
     "build-canvas": "webpack build/node_modules/apps/canvas/index.js --mode development -o build/canvas/index.js",
     "watch-canvas": "webpack build/node_modules/apps/canvas/index.js -w --mode development -o build/canvas/index.js",
+    "build-ms-query": "webpack build/node_modules/apps/model-server-query/index.js --mode development -o build/model-server-query/index.js",
+    "watch-ms-query": "webpack build/node_modules/apps/model-server-query/index.js -w --mode development -o build/model-server-query/index.js",
     "model-server": "node build/node_modules/servers/model/server.js",
     "model-server-watch": "nodemon --watch build/node_modules build/node_modules/servers/model/server.js"
   },

+ 12 - 0
src/apps/model-server-query/index.html

@@ -0,0 +1,12 @@
+<!DOCTYPE html>
+<html lang="en">
+    <head>
+        <meta charset="utf-8" />
+        <meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
+        <title>Mol* ModelServer Query Builder</title>
+    </head>
+    <body>
+        <div id="app"></div>
+        <script type="text/javascript" src="./index.js"></script>
+    </body>
+</html>

+ 134 - 0
src/apps/model-server-query/index.tsx

@@ -0,0 +1,134 @@
+/**
+ * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
+ *
+ * @author David Sehnal <david.sehnal@gmail.com>
+ */
+
+import * as React from 'react'
+import * as ReactDOM from 'react-dom'
+import * as Rx from 'rxjs'
+
+import { QueryDefinition, QueryList } from '../../servers/model/server/api'
+
+import './index.html'
+
+interface State {
+    query: Rx.BehaviorSubject<QueryDefinition>,
+    id: Rx.BehaviorSubject<string>,
+    params: Rx.BehaviorSubject<any>,
+    isBinary: Rx.BehaviorSubject<boolean>,
+    models: Rx.BehaviorSubject<number[]>,
+    url: Rx.Subject<string>
+}
+
+class Root extends React.Component<{ state: State }, {  }> {
+    render() {
+        return <div>
+            <div>
+                Query: <QuerySelect state={this.props.state} />
+            </div>
+            <div>
+                ID: <input type='text' onChange={t => this.props.state.id.next(t.currentTarget.value)} />
+            </div>
+            <div>
+                Params:<br/>
+                <QueryParams state={this.props.state} />
+            </div>
+            <div>
+                Model numbers (empty for all): <ModelNums state={this.props.state} />
+            </div>
+            <div>
+                <input type='checkbox' onChange={t => this.props.state.isBinary.next(!!t.currentTarget.checked)} /> Binary
+            </div>
+            <div>
+                Query string:
+                <QueryUrl state={this.props.state} />
+            </div>
+        </div>
+    }
+}
+
+class QuerySelect extends React.Component<{ state: State }> {
+    render() {
+        return <select onChange={s => this.props.state.query.next(QueryList[+s.currentTarget.value].definition)}>
+            { QueryList.map((q, i) => <option value={i} key={i} selected={i === 1}>{q.definition.niceName}</option>) }
+        </select>
+    }
+}
+
+class QueryParams extends React.Component<{ state: State }, { prms: string }> {
+    state = { prms: '' };
+
+    parseParams(str: string) {
+        this.setState({ prms: str });
+        try {
+            const params = JSON.parse(str);
+            this.props.state.params.next(params);
+        } catch {
+            this.props.state.params.next({});
+        }
+    }
+
+    componentDidMount() {
+        this.props.state.query.subscribe(q => this.setState({ prms: formatParams(q) }))
+    }
+
+    render() {
+        return <textarea style={{height: '300px'}} value={this.state.prms} cols={80} onChange={t => this.parseParams(t.currentTarget.value)} />;
+    }
+}
+
+class QueryUrl extends React.Component<{ state: State }, { queryString: string }> {
+    state = { queryString: '' };
+
+    componentDidMount() {
+        this.props.state.url.subscribe(url => this.setState({ queryString: url }))
+    }
+
+    render() {
+        return <input type='text' value={this.state.queryString} style={{ width: '800px' }} />
+    }
+}
+
+class ModelNums extends React.Component<{ state: State }> {
+    render() {
+        return <input type='text' defaultValue='1' style={{ width: '300px' }} onChange={t =>
+            this.props.state.models.next(t.currentTarget.value.split(',')
+                .map(v => v.trim())
+                .filter(v => !!v)
+                .map(v => +v)
+                )} />
+    }
+}
+
+const state: State = {
+    query: new Rx.BehaviorSubject(QueryList[1].definition),
+    id: new Rx.BehaviorSubject('1cbs'),
+    params: new Rx.BehaviorSubject({ }),
+    isBinary: new Rx.BehaviorSubject(false),
+    models: new Rx.BehaviorSubject<number[]>([]),
+    url: new Rx.Subject()
+}
+
+function formatParams(def: QueryDefinition) {
+    const prms = Object.create(null);
+    for (const p of def.params) {
+        prms[p.name] = p.exampleValues ? p.exampleValues[0] : void 0;
+    }
+    return JSON.stringify(prms, void 0, 2);
+}
+
+function formatUrl() {
+    const json = JSON.stringify({
+        name: state.query.value.name,
+        id: state.id.value,
+        modelNums: state.models.value.length ? state.models.value : void 0,
+        binary: state.isBinary.value,
+        params: state.params.value
+    });
+    state.url.next(encodeURIComponent(json));
+}
+
+Rx.merge(state.query, state.id, state.params, state.isBinary, state.models).subscribe(s => formatUrl());
+
+ReactDOM.render(<Root state={state} />, document.getElementById('app'));

+ 9 - 6
src/mol-model-props/pdbe/structure-quality-report.ts

@@ -5,7 +5,7 @@
  */
 
 import { CifWriter } from 'mol-io/writer/cif';
-import { Model, ModelPropertyDescriptor, ResidueIndex, Unit, ResidueCustomProperty } from 'mol-model/structure';
+import { Model, ModelPropertyDescriptor, ResidueIndex, Unit, ResidueCustomProperty, StructureProperties as P } from 'mol-model/structure';
 import { residueIdFields } from 'mol-model/structure/export/categories/atom_site';
 import CifField = CifWriter.Field;
 import { mmCIF_residueId_schema } from 'mol-io/reader/cif/schema/mmcif-extras';
@@ -21,7 +21,7 @@ import Type from 'mol-script/language/type';
 type IssueMap = ResidueCustomProperty<string[]>
 
 const _Descriptor = ModelPropertyDescriptor({
-    isStatic: true,
+    isStatic: false,
     name: 'structure_quality_report',
     cifExport: {
         prefix: 'pdbe',
@@ -50,6 +50,7 @@ type ExportCtx = ResidueCustomProperty.ExportCtx<string[]>
 const _structure_quality_report_issues_fields: CifField<number, ExportCtx>[] = [
     CifField.index('id'),
     ...residueIdFields<number, ExportCtx>((i, d) => d.elements[i]),
+    CifField.int<number, ExportCtx>('pdbx_PDB_model_num', (i, d) => P.unit.model_num(d.elements[i])),
     CifField.str<number, ExportCtx>('issues', (i, d) => d.property(i).join(','))
 ];
 
@@ -83,9 +84,10 @@ function createIssueMapFromJson(modelData: Model, data: any): IssueMap | undefin
 
 function createIssueMapFromCif(modelData: Model, data: Table<typeof StructureQualityReport.Schema.pdbe_structure_quality_report_issues>): IssueMap | undefined {
     const ret = new Map<ResidueIndex, string[]>();
-    const { label_entity_id, label_asym_id, auth_seq_id, pdbx_PDB_ins_code, issues, _rowCount } = data;
+    const { label_entity_id, label_asym_id, auth_seq_id, pdbx_PDB_ins_code, issues, pdbx_PDB_model_num, _rowCount } = data;
 
     for (let i = 0; i < _rowCount; i++) {
+        if (pdbx_PDB_model_num.value(i) !== modelData.modelNum) continue;
         const idx = modelData.atomicHierarchy.index.findResidue(label_entity_id.value(i), label_asym_id.value(i), auth_seq_id.value(i), pdbx_PDB_ins_code.value(i));
         ret.set(idx, issues.value(i));
     }
@@ -103,6 +105,7 @@ export namespace StructureQualityReport {
         pdbe_structure_quality_report_issues: {
             id: Column.Schema.int,
             ...mmCIF_residueId_schema,
+            pdbx_PDB_model_num: Column.Schema.int,
             issues: Column.Schema.List(',', x => x)
         }
     }
@@ -111,7 +114,7 @@ export namespace StructureQualityReport {
         // provide JSON from api
         PDBe_apiSourceJson?: (model: Model) => Promise<any>
     }) {
-        if (model.customProperties.has(Descriptor)) return true;
+        if (get(model)) return true;
 
         let issueMap;
 
@@ -129,12 +132,12 @@ export namespace StructureQualityReport {
         }
 
         model.customProperties.add(Descriptor);
-        model._staticPropertyData.__StructureQualityReport__ = issueMap;
+        model._dynamicPropertyData.__StructureQualityReport__ = issueMap;
         return true;
     }
 
     export function get(model: Model): IssueMap | undefined {
-        return model._staticPropertyData.__StructureQualityReport__;
+        return model._dynamicPropertyData.__StructureQualityReport__;
     }
 
     const _emptyArray: string[] = [];

+ 9 - 8
src/mol-model/structure/export/mmcif.ts

@@ -21,8 +21,9 @@ export interface CifExportContext {
 }
 
 export namespace CifExportContext {
-    export function create(structure: Structure, model: Model): CifExportContext {
-        return { structure, model, cache: Object.create(null) };
+    export function create(structures: Structure | Structure[]): CifExportContext[] {
+        if (Array.isArray(structures)) return structures.map(structure => ({ structure, model: structure.models[0], cache: Object.create(null) }));
+        return [{ structure: structures, model: structures.models[0], cache: Object.create(null) }];
     }
 }
 
@@ -99,20 +100,20 @@ export const mmCIF_Export_Filters = {
 }
 
 /** Doesn't start a data block */
-export function encode_mmCIF_categories(encoder: CifWriter.Encoder, structure: Structure, params?: { skipCategoryNames?: Set<string>, exportCtx?: CifExportContext }) {
-    const models = structure.models;
+export function encode_mmCIF_categories(encoder: CifWriter.Encoder, structures: Structure | Structure[], params?: { skipCategoryNames?: Set<string>, exportCtx?: CifExportContext[] }) {
+    const first = Array.isArray(structures) ? structures[0] : (structures as Structure);
+    const models = first.models;
     if (models.length !== 1) throw 'Can\'t export stucture composed from multiple models.';
-    const model = models[0];
 
     const _params = params || { };
-
-    const ctx: CifExportContext[] = [_params.exportCtx ? _params.exportCtx : CifExportContext.create(structure, model)];
+    const ctx: CifExportContext[] = params && params.exportCtx ? params.exportCtx : CifExportContext.create(structures);
 
     for (const cat of Categories) {
         if (_params.skipCategoryNames && _params.skipCategoryNames.has(cat.name)) continue;
         encoder.writeCategory(cat, ctx);
     }
-    for (const customProp of model.customProperties.all) {
+
+    for (const customProp of models[0].customProperties.all) {
         if (!customProp.cifExport || customProp.cifExport.categories.length === 0) continue;
 
         const prefix = customProp.cifExport.prefix;

+ 2 - 1
src/mol-model/structure/model/formats/mmcif.ts

@@ -146,7 +146,8 @@ function createStandardModel(format: mmCIF_Format, atom_site: AtomSite, entities
             ...previous,
             id: UUID.create(),
             modelNum: atom_site.pdbx_PDB_model_num.value(0),
-            atomicConformation: atomic.conformation
+            atomicConformation: atomic.conformation,
+            _dynamicPropertyData: Object.create(null)
         };
     }
 

+ 27 - 19
src/servers/model/preprocess/master.ts

@@ -7,8 +7,7 @@
 import * as fs from 'fs'
 import * as path from 'path'
 import * as argparse from 'argparse'
-import { preprocessFile } from './preprocess';
-import { ParallelPreprocessConfig, runMaster } from './parallel';
+import { runMaster, PreprocessEntry } from './parallel';
 
 const cmdParser = new argparse.ArgumentParser({
     addHelp: true,
@@ -17,14 +16,16 @@ const cmdParser = new argparse.ArgumentParser({
 cmdParser.addArgument(['--input', '-i'], { help: 'Input filename', required: false });
 cmdParser.addArgument(['--outCIF', '-oc'], { help: 'Output CIF filename', required: false });
 cmdParser.addArgument(['--outBCIF', '-ob'], { help: 'Output BinaryCIF filename', required: false });
-cmdParser.addArgument(['--bulk', '-b'], { help: 'Bulk JSON ({ numProcesses?: number, entries: { source: string, cif?: string, bcif?: string }[] })', required: false });
-cmdParser.addArgument(['--folderIn', '-f'], { help: 'Convert folder', required: false });
+// TODO: add back? cmdParser.addArgument(['--bulk', '-b'], { help: 'Bulk JSON ({ numProcesses?: number, entries: { source: string, cif?: string, bcif?: string }[] })', required: false });
+cmdParser.addArgument(['--cfg', '-c'], { help: 'Config file path', required: false });
+cmdParser.addArgument(['--folderIn', '-fin'], { help: 'Convert folder', required: false });
 cmdParser.addArgument(['--folderOutCIF', '-foc'], { help: 'Convert folder text output', required: false });
 cmdParser.addArgument(['--folderOutBCIF', '-fob'], { help: 'Convert folder binary output', required: false });
 cmdParser.addArgument(['--folderNumProcesses', '-fp'], { help: 'Convert folder num processes', required: false });
 
 interface CmdArgs {
-    bulk?: string,
+    // bulk?: string,
+    cfg?: string,
     input?: string,
     outCIF?: string,
     outBCIF?: string,
@@ -34,31 +35,38 @@ interface CmdArgs {
     folderNumProcesses?: string
 }
 
+export interface PreprocessConfig {
+    numProcesses?: number,
+    customPropertyProviders?: string[]
+}
+
 const cmdArgs = cmdParser.parseArgs() as CmdArgs;
 
-if (cmdArgs.input) preprocessFile(cmdArgs.input, cmdArgs.outCIF, cmdArgs.outBCIF);
-else if (cmdArgs.bulk) runBulk(cmdArgs.bulk);
-else if (cmdArgs.folderIn) runFolder(cmdArgs);
+let entries: PreprocessEntry[] = []
+let config: PreprocessConfig = { numProcesses: 1, customPropertyProviders: [] }
 
-function runBulk(input: string) {
-    const config = JSON.parse(fs.readFileSync(input, 'utf8')) as ParallelPreprocessConfig;
-    runMaster(config);
+if (cmdArgs.input) entries.push({ source: cmdArgs.input, cif: cmdArgs.outCIF, bcif: cmdArgs.outBCIF });
+// else if (cmdArgs.bulk) runBulk(cmdArgs.bulk);
+else if (cmdArgs.folderIn) findEntries();
+
+if (cmdArgs.cfg) {
+    config = JSON.parse(fs.readFileSync(cmdArgs.cfg, 'utf8')) as PreprocessConfig;
 }
 
-function runFolder(args: CmdArgs) {
-    const files = fs.readdirSync(args.folderIn!);
-    const config: ParallelPreprocessConfig = { numProcesses: +args.folderNumProcesses! || 1, entries: [] };
+runMaster(config, entries);
+
+function findEntries() {
+    const files = fs.readdirSync(cmdArgs.folderIn!);
     const cifTest = /\.cif$/;
     for (const f of files) {
         if (!cifTest.test(f)) continue;
 
-        config.entries.push({
-            source: path.join(args.folderIn!, f),
-            cif: cmdArgs.folderOutCIF ? path.join(args.folderOutCIF!, f) : void 0,
-            bcif: cmdArgs.folderOutBCIF ? path.join(args.folderOutBCIF!, path.parse(f).name + '.bcif') : void 0,
+        entries.push({
+            source: path.join(cmdArgs.folderIn!, f),
+            cif: cmdArgs.folderOutCIF ? path.join(cmdArgs.folderOutCIF!, f) : void 0,
+            bcif: cmdArgs.folderOutBCIF ? path.join(cmdArgs.folderOutBCIF!, path.parse(f).name + '.bcif') : void 0,
         });
     }
-    runMaster(config);
 }
 
 // example:

+ 30 - 19
src/servers/model/preprocess/parallel.ts

@@ -9,6 +9,9 @@ import * as cluster from 'cluster'
 import { now } from 'mol-task';
 import { PerformanceMonitor } from 'mol-util/performance-monitor';
 import { preprocessFile } from './preprocess';
+import { createModelPropertiesProviderFromSources } from '../property-provider';
+
+type PreprocessConfig = import('./master').PreprocessConfig
 
 export interface PreprocessEntry {
     source: string,
@@ -16,43 +19,41 @@ export interface PreprocessEntry {
     bcif?: string
 }
 
-export interface ParallelPreprocessConfig {
-    numProcesses?: number,
-    entries: PreprocessEntry[]
-}
-
-export function runMaster(config: ParallelPreprocessConfig) {
-    const parts = partitionArray(config.entries, config.numProcesses || 1);
-    // const numForks = Math.min(parts.length, config.numProcesses);
-
+export function runMaster(config: PreprocessConfig, entries: PreprocessEntry[]) {
     const started = now();
     let progress = 0;
     const onMessage = (msg: any) => {
         if (msg.type === 'tick') {
             progress++;
             const elapsed = now() - started;
-            console.log(`[${progress}/${config.entries.length}] in ${PerformanceMonitor.format(elapsed)} (avg ${PerformanceMonitor.format(elapsed / progress)}).`);
+            console.log(`[${progress}/${entries.length}] in ${PerformanceMonitor.format(elapsed)} (avg ${PerformanceMonitor.format(elapsed / progress)}).`);
         } else if (msg.type === 'error') {
             console.error(`${msg.id}: ${msg.error}`)
         }
     }
 
-    for (const _ of parts) {
-        const worker = cluster.fork();
-        worker.on('message', onMessage);
-    }
+    if (entries.length === 1) {
+        runSingle(entries[0], config, onMessage);
+    } else {
+        const parts = partitionArray(entries, config.numProcesses || 1);
+        for (const _ of parts) {
+            const worker = cluster.fork();
+            worker.on('message', onMessage);
+        }
 
-    let i = 0;
-    for (const id in cluster.workers) {
-        cluster.workers[id]!.send(parts[i++]);
+        let i = 0;
+        for (const id in cluster.workers) {
+            cluster.workers[id]!.send({ entries: parts[i++], config });
+        }
     }
 }
 
 export function runChild() {
-    process.on('message', async (entries: PreprocessEntry[]) => {
+    process.on('message', async ({ entries, config }: { entries: PreprocessEntry[], config: PreprocessConfig }) => {
+        const props = createModelPropertiesProviderFromSources(config.customPropertyProviders || []);
         for (const entry of entries) {
             try {
-                await preprocessFile(entry.source, entry.cif, entry.bcif);
+                await preprocessFile(entry.source, props, entry.cif, entry.bcif);
             } catch (e) {
                 process.send!({ type: 'error', id: path.parse(entry.source).name, error: '' + e });
             }
@@ -62,6 +63,16 @@ export function runChild() {
     });
 }
 
+async function runSingle(entry: PreprocessEntry, config: PreprocessConfig, onMessage: (msg: any) => void) {
+    const props = createModelPropertiesProviderFromSources(config.customPropertyProviders || []);
+    try {
+        await preprocessFile(entry.source, props, entry.cif, entry.bcif);
+    } catch (e) {
+        onMessage({ type: 'error', id: path.parse(entry.source).name, error: '' + e });
+    }
+    onMessage({ type: 'tick' });
+}
+
 function partitionArray<T>(xs: T[], count: number): T[][] {
     const ret: T[][] = [];
     const s = Math.ceil(xs.length / count);

+ 17 - 37
src/servers/model/preprocess/preprocess.ts

@@ -4,65 +4,45 @@
  * @author David Sehnal <david.sehnal@gmail.com>
  */
 
-import { readStructure } from '../server/structure-wrapper';
+import { readStructureWrapper, resolveStructures } from '../server/structure-wrapper';
 import { classifyCif } from './converter';
-// import { ConsoleLogger } from 'mol-util/console-logger';
 import { Structure } from 'mol-model/structure';
 import { CifWriter } from 'mol-io/writer/cif';
 import Writer from 'mol-io/writer/writer';
 import { wrapFileToWriter } from '../server/api-local';
-import { Task/*, now*/ } from 'mol-task';
-import { /*showProgress, clearLine */ } from './util';
 import { encode_mmCIF_categories, CifExportContext } from 'mol-model/structure/export/mmcif';
+import { ModelPropertiesProvider } from '../property-provider';
 
 // TODO: error handling
-// let linearId = 0;
 
-export async function preprocessFile(filename: string, outputCif?: string, outputBcif?: string) {
-    // linearId++;
-
-    //const started = now();
-    //ConsoleLogger.log(`${linearId}`, `Reading '${filename}'...`);
-    // TODO: support the custom prop provider list here.
-    const input = await readStructure('entry', '_local_', filename, void 0);
-    //ConsoleLogger.log(`${linearId}`, `Classifying CIF categories...`);
+export async function preprocessFile(filename: string, propertyProvider?: ModelPropertiesProvider, outputCif?: string, outputBcif?: string) {
+    const input = await readStructureWrapper('entry', '_local_', filename, propertyProvider);
     const categories = await classifyCif(input.cifFrame);
-    //clearLine();
-
-    const exportCtx = CifExportContext.create(input.structure, input.structure.models[0]);
+    const inputStructures = (await resolveStructures(input))!;
+    const exportCtx = CifExportContext.create(inputStructures);
 
     if (outputCif) {
-        //ConsoleLogger.log(`${linearId}`, `Encoding CIF...`);
         const writer = wrapFileToWriter(outputCif);
         const encoder = CifWriter.createEncoder({ binary: false });
-        await encode(input.structure, input.cifFrame.header, categories, encoder, exportCtx, writer);
-        // clearLine();
+        encode(inputStructures[0], input.cifFrame.header, categories, encoder, exportCtx, writer);
         writer.end();
     }
 
     if (outputBcif) {
-        // ConsoleLogger.log(`${linearId}`, `Encoding BinaryCIF...`);
         const writer = wrapFileToWriter(outputBcif);
         const encoder = CifWriter.createEncoder({ binary: true, binaryAutoClassifyEncoding: true });
-        await encode(input.structure, input.cifFrame.header, categories, encoder, exportCtx, writer);
-        //clearLine();
+        encode(inputStructures[0], input.cifFrame.header, categories, encoder, exportCtx, writer);
         writer.end();
     }
-    // ConsoleLogger.log(`${linearId}`, `Finished '${filename}' in ${Math.round(now() - started)}ms`);
 }
 
-function encode(structure: Structure, header: string, categories: CifWriter.Category[], encoder: CifWriter.Encoder, exportCtx: CifExportContext, writer: Writer) {
-    return Task.create('Encode', async ctx => {
-        const skipCategoryNames = new Set<string>(categories.map(c => c.name));
-        encoder.startDataBlock(header);
-        // let current = 0;
-        for (const cat of categories){
-            encoder.writeCategory(cat);
-            // current++;
-            // if (ctx.shouldUpdate) await ctx.update({ message: 'Encoding...', current, max: categories.length });
-        }
-        encode_mmCIF_categories(encoder, structure, { skipCategoryNames, exportCtx });
-        encoder.encode();
-        encoder.writeTo(writer);
-    }).run();
+function encode(structure: Structure, header: string, categories: CifWriter.Category[], encoder: CifWriter.Encoder, exportCtx: CifExportContext[], writer: Writer) {
+    const skipCategoryNames = new Set<string>(categories.map(c => c.name));
+    encoder.startDataBlock(header);
+    for (const cat of categories) {
+        encoder.writeCategory(cat);
+    }
+    encode_mmCIF_categories(encoder, structure, { skipCategoryNames, exportCtx });
+    encoder.encode();
+    encoder.writeTo(writer);
 }

+ 2 - 2
src/servers/model/properties/pdbe.ts

@@ -8,10 +8,10 @@
 import { Model } from 'mol-model/structure';
 import { PDBe_structureQualityReport } from './providers/pdbe';
 
-export function attachModelProperties(model: Model): Promise<any>[] {
+export function attachModelProperties(model: Model, cache: object): Promise<any>[] {
     // return a list of promises that start attaching the props in parallel
     // (if there are downloads etc.)
     return [
-        PDBe_structureQualityReport(model)
+        PDBe_structureQualityReport(model, cache)
     ];
 }

+ 7 - 2
src/servers/model/properties/providers/pdbe.ts

@@ -7,12 +7,17 @@
  import { Model } from 'mol-model/structure';
 import { StructureQualityReport } from 'mol-model-props/pdbe/structure-quality-report';
 import { fetchRetry } from '../../utils/fetch-retry';
+import { UUID } from 'mol-util';
 
-export function PDBe_structureQualityReport(model: Model) {
+const cacheKey = UUID.create();
+export function PDBe_structureQualityReport(model: Model, cache: any) {
     return StructureQualityReport.attachFromCifOrApi(model, {
         PDBe_apiSourceJson: async model => {
+            if (cache[cacheKey]) return cache[cacheKey];
             const rawData = await fetchRetry(`https://www.ebi.ac.uk/pdbe/api/validation/residuewise_outlier_summary/entry/${model.label.toLowerCase()}`, 1500, 5);
-            return await rawData.json();
+            const json = await rawData.json();
+            cache[cacheKey] = json;
+            return json;
         }
     });
 }

+ 9 - 6
src/servers/model/provider.ts → src/servers/model/property-provider.ts

@@ -7,22 +7,25 @@
 import { Model } from 'mol-model/structure';
 import Config from './config';
 
-export type ModelPropertiesProvider = (model: Model) => Promise<any>[]
+export type ModelPropertiesProvider = (model: Model, cache: object) => Promise<any>[]
 
 export function createModelPropertiesProviderFromConfig(): ModelPropertiesProvider {
-    if (!Config.customPropertyProviders || Config.customPropertyProviders.length === 0) return () => [];
+    return createModelPropertiesProviderFromSources(Config.customPropertyProviders);
+}
+
+export function createModelPropertiesProviderFromSources(sources: string[]): ModelPropertiesProvider {
+    if (!sources || sources.length === 0) return () => [];
 
     const ps: ModelPropertiesProvider[] = [];
-    for (const p of Config.customPropertyProviders) {
+    for (const p of sources) {
         ps.push(require(p).attachModelProperties);
     }
 
-    return model => {
+    return (model, cache) => {
         const ret: Promise<any>[] = [];
         for (const p of ps) {
-            for (const e of p(model)) ret.push(e);
+            for (const e of p(model, cache)) ret.push(e);
         }
         return ret;
     }
 }
-

+ 2 - 1
src/servers/model/server/api-local.ts

@@ -17,6 +17,7 @@ export type LocalInput = {
     input: string,
     output: string,
     query: string,
+    modelNums?: number[],
     params?: any
 }[];
 
@@ -27,7 +28,7 @@ export async function runLocal(input: LocalInput) {
     }
 
     for (const job of input) {
-        JobManager.add('_local_', job.input, job.query, job.params || { }, job.output);
+        JobManager.add('_local_', job.input, job.query, job.params || { }, job.modelNums, job.output);
     }
     JobManager.sort();
 

+ 1 - 1
src/servers/model/server/api-web.ts

@@ -103,7 +103,7 @@ export function initWebApi(app: express.Express) {
         const name = args.name;
         const entryId = args.id;
         const params = args.params || { };
-        const jobId = JobManager.add('pdb', entryId, name, params);
+        const jobId = JobManager.add('pdb', entryId, name, params, args.modelNums);
         responseMap.set(jobId, res);
         if (JobManager.size === 1) processNextJob();
     });

+ 6 - 5
src/servers/model/server/api.ts

@@ -20,7 +20,7 @@ export interface QueryParamInfo {
     description?: string,
     required?: boolean,
     defaultValue?: any,
-    exampleValues?: string[],
+    exampleValues?: any[],
     validation?: (v: any) => void
 }
 
@@ -50,15 +50,15 @@ export interface QueryDefinition {
 const AtomSiteTestParams: QueryParamInfo = {
     name: 'atom_site',
     type: QueryParamType.JSON,
-    description: 'Object or array of objects describing atom properties. Name are same as in wwPDB mmCIF dictionary of the atom_site category.',
-    exampleValues: [`{ label_comp_id: 'ALA' }`, `{ label_seq_id: 123, label_asym_id: 'A' }`]
+    description: 'Object or array of objects describing atom properties. Names are same as in wwPDB mmCIF dictionary of the atom_site category.',
+    exampleValues: [{ label_comp_id: 'ALA' }, { label_seq_id: 123, label_asym_id: 'A' }]
 };
 
 const RadiusParam: QueryParamInfo = {
     name: 'radius',
     type: QueryParamType.Float,
     defaultValue: 5,
-    exampleValues: ['5'],
+    exampleValues: [5],
     description: 'Value in Angstroms.',
     validation(v: any) {
         if (v < 1 || v > 10) {
@@ -82,13 +82,14 @@ const QueryMap: { [id: string]: Partial<QueryDefinition> } = {
         structureTransform(p, s) {
             return StructureSymmetry.builderSymmetryMates(s, p.radius).run();
         },
+        params: [ RadiusParam ]
     },
     'assembly': {
         niceName: 'Assembly',
         description: 'Computes structural assembly.',
         query: () => Queries.generators.all,
         structureTransform(p, s) {
-            return StructureSymmetry.buildAssembly(s, '' + p.name).run();
+            return StructureSymmetry.buildAssembly(s, '' + (p.name || '1')).run();
         },
         params: [{
             name: 'name',

+ 5 - 3
src/servers/model/server/jobs.ts

@@ -23,11 +23,12 @@ export interface Job {
     queryDefinition: QueryDefinition,
     normalizedParams: any,
     responseFormat: ResponseFormat,
+    modelNums?: number[],
 
     outputFilename?: string
 }
 
-export function createJob(sourceId: '_local_' | string, entryId: string, queryName: string, params: any, outputFilename?: string): Job {
+export function createJob(sourceId: '_local_' | string, entryId: string, queryName: string, params: any, modelNums?: number[], outputFilename?: string): Job {
     const queryDefinition = getQueryByName(queryName);
     if (!queryDefinition) throw new Error(`Query '${queryName}' is not supported.`);
 
@@ -42,6 +43,7 @@ export function createJob(sourceId: '_local_' | string, entryId: string, queryNa
         queryDefinition,
         normalizedParams,
         responseFormat: { isBinary: !!params.binary },
+        modelNums,
         outputFilename
     };
 }
@@ -53,8 +55,8 @@ class _JobQueue {
         return this.list.count;
     }
 
-    add(sourceId: '_local_' | string, entryId: string, queryName: string, params: any, outputFilename?: string) {
-        const job = createJob(sourceId, entryId, queryName, params, outputFilename);
+    add(sourceId: '_local_' | string, entryId: string, queryName: string, params: any, modelNums?: number[], outputFilename?: string) {
+        const job = createJob(sourceId, entryId, queryName, params, modelNums, outputFilename);
         this.list.addLast(job);
         return job.id;
     }

+ 29 - 12
src/servers/model/server/query.ts

@@ -6,7 +6,7 @@
 
 import { Column } from 'mol-data/db';
 import { CifWriter } from 'mol-io/writer/cif';
-import { StructureQuery, StructureSelection } from 'mol-model/structure';
+import { StructureQuery, StructureSelection, Structure } from 'mol-model/structure';
 import { encode_mmCIF_categories } from 'mol-model/structure/export/mmcif';
 import { now, Progress } from 'mol-task';
 import { ConsoleLogger } from 'mol-util/console-logger';
@@ -14,9 +14,9 @@ import { PerformanceMonitor } from 'mol-util/performance-monitor';
 import Config from '../config';
 import Version from '../version';
 import { Job } from './jobs';
-import { getStructure, StructureWrapper } from './structure-wrapper';
+import { createStructureWrapperFromJob, StructureWrapper, resolveStructures } from './structure-wrapper';
 import CifField = CifWriter.Field
-import { createModelPropertiesProviderFromConfig } from '../provider';
+import { createModelPropertiesProviderFromConfig, ModelPropertiesProvider } from '../property-provider';
 
 export interface Stats {
     structure: StructureWrapper,
@@ -26,20 +26,37 @@ export interface Stats {
 
 const perf = new PerformanceMonitor();
 
-const propertyProvider = createModelPropertiesProviderFromConfig();
+let _propertyProvider: ModelPropertiesProvider;
+function propertyProvider() {
+    if (_propertyProvider) return _propertyProvider;
+    _propertyProvider = createModelPropertiesProviderFromConfig();
+    return _propertyProvider;
+}
 
 export async function resolveJob(job: Job): Promise<CifWriter.Encoder<any>> {
     ConsoleLogger.logId(job.id, 'Query', 'Starting.');
 
-    const wrappedStructure = await getStructure(job, propertyProvider);
+    const wrappedStructure = await createStructureWrapperFromJob(job, propertyProvider());
 
     try {
         perf.start('query');
-        const structure = job.queryDefinition.structureTransform
-            ? await job.queryDefinition.structureTransform(job.normalizedParams, wrappedStructure.structure)
-            : wrappedStructure.structure;
-        const query = job.queryDefinition.query(job.normalizedParams, structure);
-        const result = await StructureSelection.unionStructure(StructureQuery.run(query, structure, Config.maxQueryTimeInMs));
+        const sourceStructures = await resolveStructures(wrappedStructure, job.modelNums);
+        if (!sourceStructures.length) throw new Error('Model not available');
+
+        let structures: Structure[] = sourceStructures;
+
+        if (job.queryDefinition.structureTransform) {
+            structures = [];
+            for (const s of sourceStructures) {
+                structures.push(await job.queryDefinition.structureTransform(job.normalizedParams, s));
+            }
+        }
+
+        const queries = structures.map(s => job.queryDefinition.query(job.normalizedParams, s));
+        const result: Structure[] = [];
+        for (let i = 0; i < structures.length; i++) {
+            result.push(await StructureSelection.unionStructure(StructureQuery.run(queries[i], structures[i], Config.maxQueryTimeInMs)));
+        }
         perf.end('query');
 
         const encoder = CifWriter.createEncoder({
@@ -52,7 +69,7 @@ export async function resolveJob(job: Job): Promise<CifWriter.Encoder<any>> {
         ConsoleLogger.logId(job.id, 'Query', 'Query finished.');
 
         perf.start('encode');
-        encoder.startDataBlock(structure.units[0].model.label.toUpperCase());
+        encoder.startDataBlock(sourceStructures[0].models[0].label.toUpperCase());
         encoder.writeCategory(_model_server_result, [job]);
         encoder.writeCategory(_model_server_params, [job]);
 
@@ -130,7 +147,7 @@ const _model_server_error_fields: CifField<number, string>[] = [
 const _model_server_stats_fields: CifField<number, Stats>[] = [
     int32<Stats>('io_time_ms', ctx => ctx.structure.info.readTime | 0),
     int32<Stats>('parse_time_ms', ctx => ctx.structure.info.parseTime | 0),
-    int32<Stats>('attach_props_time_ms', ctx => ctx.structure.info.attachPropsTime | 0),
+    // int32<Stats>('attach_props_time_ms', ctx => ctx.structure.info.attachPropsTime | 0),
     int32<Stats>('create_model_time_ms', ctx => ctx.structure.info.createModelTime | 0),
     int32<Stats>('query_time_ms', ctx => ctx.queryTimeMs | 0),
     int32<Stats>('encode_time_ms', ctx => ctx.encodeTimeMs | 0)

+ 47 - 19
src/servers/model/server/structure-wrapper.ts

@@ -14,7 +14,7 @@ import * as fs from 'fs'
 import * as zlib from 'zlib'
 import { Job } from './jobs';
 import { ConsoleLogger } from 'mol-util/console-logger';
-import { ModelPropertiesProvider } from '../provider';
+import { ModelPropertiesProvider } from '../property-provider';
 
 require('util.promisify').shim();
 
@@ -36,20 +36,23 @@ export interface StructureInfo {
 
 export interface StructureWrapper {
     info: StructureInfo,
-
     isBinary: boolean,
     key: string,
     approximateSize: number,
-    structure: Structure,
-    cifFrame: CifFrame
+    models: ArrayLike<Model>,
+    modelMap: Map<number, Model>,
+    structureModelMap: Map<number, Structure>,
+    propertyProvider: ModelPropertiesProvider | undefined,
+    cifFrame: CifFrame,
+    cache: object
 }
 
-export async function getStructure(job: Job, propertyProvider: ModelPropertiesProvider | undefined, allowCache = true): Promise<StructureWrapper> {
+export async function createStructureWrapperFromJob(job: Job, propertyProvider: ModelPropertiesProvider | undefined, allowCache = true): Promise<StructureWrapper> {
     if (allowCache && Config.cacheParams.useCache) {
         const ret = StructureCache.get(job.key);
         if (ret) return ret;
     }
-    const ret = await readStructure(job.key, job.sourceId, job.entryId, propertyProvider);
+    const ret = await readStructureWrapper(job.key, job.sourceId, job.entryId, propertyProvider);
     if (allowCache && Config.cacheParams.useCache) {
         StructureCache.add(ret);
     }
@@ -86,7 +89,7 @@ async function parseCif(data: string|Uint8Array) {
     return parsed.result;
 }
 
-export async function readStructure(key: string, sourceId: string | '_local_', entryId: string, propertyProvider: ModelPropertiesProvider | undefined) {
+export async function readStructureWrapper(key: string, sourceId: string | '_local_', entryId: string, propertyProvider: ModelPropertiesProvider | undefined) {
     const filename = sourceId === '_local_' ? entryId : Config.mapFile(sourceId, entryId);
     if (!filename) throw new Error(`Cound not map '${key}' to a valid filename.`);
     if (!fs.existsSync(filename)) throw new Error(`Could not find source file for '${key}'.`);
@@ -108,16 +111,10 @@ export async function readStructure(key: string, sourceId: string | '_local_', e
     const models = await Model.create(Format.mmCIF(frame)).run();
     perf.end('createModel');
 
-    perf.start('attachProps');
-    if (propertyProvider) {
-        const modelProps = propertyProvider(models[0]);
-        for (const p of modelProps) {
-            await tryAttach(key, p);
-        }
+    const modelMap = new Map<number, Model>();
+    for (const m of models) {
+        modelMap.set(m.modelNum, m);
     }
-    perf.end('attachProps');
-
-    const structure = Structure.ofModel(models[0]);
 
     const ret: StructureWrapper = {
         info: {
@@ -125,20 +122,51 @@ export async function readStructure(key: string, sourceId: string | '_local_', e
             readTime: perf.time('read'),
             parseTime: perf.time('parse'),
             createModelTime: perf.time('createModel'),
-            attachPropsTime: perf.time('attachProps'),
+            attachPropsTime: 0, // perf.time('attachProps'),
             sourceId,
             entryId
         },
         isBinary: /\.bcif/.test(filename),
         key,
         approximateSize: typeof data === 'string' ? 2 * data.length : data.length,
-        structure,
-        cifFrame: frame
+        models,
+        modelMap,
+        structureModelMap: new Map(),
+        cifFrame: frame,
+        propertyProvider,
+        cache: Object.create(null)
     };
 
     return ret;
 }
 
+export async function resolveStructure(wrapper: StructureWrapper, modelNum?: number) {
+    if (typeof modelNum === 'undefined') modelNum = wrapper.models[0].modelNum;
+    if (wrapper.structureModelMap.has(modelNum)) return wrapper.structureModelMap.get(modelNum)!;
+    if (!wrapper.modelMap.has(modelNum)) {
+        return void 0;
+    }
+
+    const model = wrapper.modelMap.get(modelNum)!;
+    const structure = Structure.ofModel(model);
+    if (wrapper.propertyProvider) {
+        const modelProps = wrapper.propertyProvider(model, wrapper.cache);
+        for (const p of modelProps) {
+            await tryAttach(wrapper.key, p);
+        }
+    }
+    return structure;
+}
+
+export async function resolveStructures(wrapper: StructureWrapper, modelNums?: number[]) {
+    const ret: Structure[] = [];
+    for (const n of modelNums || (wrapper.models as Model[]).map(m => m.modelNum)) {
+        const s = await resolveStructure(wrapper, n);
+        if (s) ret.push(s);
+    }
+    return ret;
+}
+
 async function tryAttach(key: string, promise: Promise<any>) {
     try {
         await promise;