Browse Source

build mol-server/preprocess as a separate app (damn shaders: need to fix), use Buffer.from/alloc instead of new Buffer

David Sehnal 6 years ago
parent
commit
17f534186d

+ 1 - 1
src/perf-tests/cif-encoder.ts

@@ -60,7 +60,7 @@ function testBinary() {
     enc.writeCategory(getCat('cat2'), [{ rowCount: 1, fields: category2fields }]);
     enc.encode();
     const data = enc.getData() as Uint8Array;
-    fs.writeFileSync('e:/test/mol-star/test.bcif', new Buffer(data));
+    fs.writeFileSync('e:/test/mol-star/test.bcif', Buffer.from(data));
     console.log('written binary');
 }
 

+ 37 - 2
src/servers/model/preprocess/preprocess.ts

@@ -4,7 +4,7 @@
  * @author David Sehnal <david.sehnal@gmail.com>
  */
 
-import { readStructureWrapper, resolveStructures } from '../server/structure-wrapper';
+import { readStructureWrapper, resolveStructures, readDataAndFrame } from '../server/structure-wrapper';
 import { classifyCif } from './converter';
 import { Structure } from 'mol-model/structure';
 import { CifWriter } from 'mol-io/writer/cif';
@@ -15,7 +15,14 @@ import { ModelPropertiesProvider } from '../property-provider';
 
 // TODO: error handling
 
-export async function preprocessFile(filename: string, propertyProvider?: ModelPropertiesProvider, outputCif?: string, outputBcif?: string) {
+export function preprocessFile(filename: string, propertyProvider?: ModelPropertiesProvider, outputCif?: string, outputBcif?: string) {
+    return propertyProvider
+        ? preprocess(filename, propertyProvider, outputCif, outputBcif)
+        : convert(filename, outputCif, outputBcif);
+}
+
+
+async function preprocess(filename: string, propertyProvider?: ModelPropertiesProvider, outputCif?: string, outputBcif?: string) {
     const input = await readStructureWrapper('entry', '_local_', filename, propertyProvider);
     const categories = await classifyCif(input.cifFrame);
     const inputStructures = (await resolveStructures(input))!;
@@ -36,6 +43,34 @@ export async function preprocessFile(filename: string, propertyProvider?: ModelP
     }
 }
 
+async function convert(filename: string, outputCif?: string, outputBcif?: string) {
+    const { frame } = await readDataAndFrame(filename);
+    const categories = await classifyCif(frame);
+
+    if (outputCif) {
+        const writer = wrapFileToWriter(outputCif);
+        const encoder = CifWriter.createEncoder({ binary: false });
+        encodeConvert(frame.header, categories, encoder, writer);
+        writer.end();
+    }
+
+    if (outputBcif) {
+        const writer = wrapFileToWriter(outputBcif);
+        const encoder = CifWriter.createEncoder({ binary: true, binaryAutoClassifyEncoding: true });
+        encodeConvert(frame.header, categories, encoder, writer);
+        writer.end();
+    }
+}
+
+function encodeConvert(header: string, categories: CifWriter.Category[], encoder: CifWriter.Encoder, writer: Writer) {
+    encoder.startDataBlock(header);
+    for (const cat of categories) {
+        encoder.writeCategory(cat);
+    }
+    encoder.encode();
+    encoder.writeTo(writer);
+}
+
 function encode(structure: Structure, header: string, categories: CifWriter.Category[], encoder: CifWriter.Encoder, exportCtx: CifExportContext, writer: Writer) {
     const skipCategoryNames = new Set<string>(categories.map(c => c.name));
     encoder.startDataBlock(header);

+ 3 - 3
src/servers/model/property-provider.ts

@@ -18,11 +18,11 @@ export type AttachModelProperty = (args: { model: Model, params: any, cache: any
 export type AttachModelProperties = (args: { model: Model, params: any, cache: any }) => Promise<any>[]
 export type ModelPropertiesProvider = (model: Model, cache: any) => Promise<any>[]
 
-export function createModelPropertiesProviderFromConfig(): ModelPropertiesProvider {
+export function createModelPropertiesProviderFromConfig() {
     return createModelPropertiesProvider(Config.customProperties);
 }
 
-export function createModelPropertiesProvider(configOrPath: ModelPropertyProviderConfig | string | undefined): ModelPropertiesProvider {
+export function createModelPropertiesProvider(configOrPath: ModelPropertyProviderConfig | string | undefined): ModelPropertiesProvider | undefined {
     let config: ModelPropertyProviderConfig;
     if (typeof configOrPath === 'string') {
         try {
@@ -35,7 +35,7 @@ export function createModelPropertiesProvider(configOrPath: ModelPropertyProvide
         config = configOrPath!;
     }
 
-    if (!config || !config.sources || config.sources.length === 0) return () => [];
+    if (!config || !config.sources || config.sources.length === 0) return void 0;
 
     const ps: AttachModelProperties[] = [];
     for (const p of config.sources) {

+ 1 - 1
src/servers/model/server/api-local.ts

@@ -84,7 +84,7 @@ export function wrapFileToWriter(fn: string) {
         },
         writeBinary(this: any, data: Uint8Array) {
             this.open();
-            fs.writeSync(this.file, new Buffer(data.buffer));
+            fs.writeSync(this.file, Buffer.from(data.buffer));
             return true;
         },
         writeString(this: any, data: string) {

+ 1 - 1
src/servers/model/server/api-web.ts

@@ -39,7 +39,7 @@ function wrapResponse(fn: string, res: express.Response) {
         },
         writeBinary(this: any, data: Uint8Array) {
             if (!this.headerWritten) this.writeHeader(true);
-            return res.write(new Buffer(data.buffer));
+            return res.write(Buffer.from(data.buffer));
         },
         writeString(this: any, data: string) {
             if (!this.headerWritten) this.writeHeader(false);

+ 13 - 7
src/servers/model/server/structure-wrapper.ts

@@ -90,24 +90,30 @@ async function parseCif(data: string|Uint8Array) {
     return parsed.result;
 }
 
-export async function readStructureWrapper(key: string, sourceId: string | '_local_', entryId: string, propertyProvider: ModelPropertiesProvider | undefined) {
-    const filename = sourceId === '_local_' ? entryId : Config.mapFile(sourceId, entryId);
-    if (!filename) throw new Error(`Cound not map '${key}' to a valid filename.`);
-    if (!fs.existsSync(filename)) throw new Error(`Could not find source file for '${key}'.`);
-
+export async function readDataAndFrame(filename: string, key?: string) {
     perf.start('read');
     let data;
     try {
         data = await readFile(filename);
     } catch (e) {
-        ConsoleLogger.error(key, '' + e);
-        throw new Error(`Could not read the file for '${key}' from disk.`);
+        ConsoleLogger.error(key || filename, '' + e);
+        throw new Error(`Could not read the file for '${key || filename}' from disk.`);
     }
 
     perf.end('read');
     perf.start('parse');
     const frame = (await parseCif(data)).blocks[0];
     perf.end('parse');
+
+    return { data, frame };
+}
+
+export async function readStructureWrapper(key: string, sourceId: string | '_local_', entryId: string, propertyProvider: ModelPropertiesProvider | undefined) {
+    const filename = sourceId === '_local_' ? entryId : Config.mapFile(sourceId, entryId);
+    if (!filename) throw new Error(`Cound not map '${key}' to a valid filename.`);
+    if (!fs.existsSync(filename)) throw new Error(`Could not find source file for '${key}'.`);
+
+    const { data, frame } = await readDataAndFrame(filename, key);
     perf.start('createModel');
     const models = await trajectoryFromMmCIF(frame).run();
     perf.end('createModel');

+ 1 - 1
src/servers/model/test.ts

@@ -13,7 +13,7 @@ function wrapFile(fn: string) {
         },
         writeBinary(this: any, data: Uint8Array) {
             this.open();
-            fs.writeSync(this.file, new Buffer(data));
+            fs.writeSync(this.file, Buffer.from(data));
             return true;
         },
         writeString(this: any, data: string) {

+ 1 - 1
src/servers/volume/common/binary-schema.ts

@@ -97,7 +97,7 @@ function writeElement(e: Element, buffer: Buffer, src: any, offset: number) {
 
 function write(element: Element, src: any) {
     const size = byteCount(element, src);
-    const buffer = new Buffer(size);
+    const buffer = Buffer.alloc(size);
     writeElement(element, buffer, src, 0);
     return buffer;
 }

+ 1 - 1
src/servers/volume/common/file.ts

@@ -57,7 +57,7 @@ export function createFile(filename: string) {
     });
 }
 
-const smallBuffer = SimpleBuffer.fromBuffer(new Buffer(8));
+const smallBuffer = SimpleBuffer.fromBuffer(Buffer.alloc(8));
 export async function writeInt(file: FileHandle, value: number, position: number) {
     smallBuffer.writeInt32LE(value, 0);
     await file.writeBuffer(position, smallBuffer, 4);

+ 1 - 1
src/servers/volume/pack/main.ts

@@ -40,7 +40,7 @@ function updateAllocationProgress(progress: Data.Progress, progressDone: number)
  */
 async function allocateFile(ctx: Data.Context) {
     const { totalByteSize, file } = ctx;
-    const buffer = new Buffer(Math.min(totalByteSize, 8 * 1024 * 1024));
+    const buffer = Buffer.alloc(Math.min(totalByteSize, 8 * 1024 * 1024));
     const progress: Data.Progress = { current: 0, max: Math.ceil(totalByteSize / buffer.byteLength) };
     let written = 0;
     while (written < totalByteSize) {

+ 3 - 3
src/servers/volume/pack/sampling.ts

@@ -20,11 +20,11 @@ export async function createContext(filename: string, channels: Format.Context[]
     const { extent, valueType, grid, origin } = channels[0].data.header;
 
     const samplingCounts = getSamplingCounts(extent, blockSize);
-    const cubeBuffer = new Buffer(new ArrayBuffer(channels.length * blockSize * blockSize * blockSize * getElementByteSize(valueType)));
+    const cubeBuffer = Buffer.from(new ArrayBuffer(channels.length * blockSize * blockSize * blockSize * getElementByteSize(valueType)));
 
     const litteEndianCubeBuffer = SimpleBuffer.IsNativeEndianLittle
         ? cubeBuffer
-        : new Buffer(new ArrayBuffer(channels.length * blockSize * blockSize * blockSize * getElementByteSize(valueType)));
+        : Buffer.from(new ArrayBuffer(channels.length * blockSize * blockSize * blockSize * getElementByteSize(valueType)));
 
     // The data can be periodic iff the extent is the same as the grid and origin is 0.
     if (grid.some((v, i) => v !== extent[i]) || origin.some(v => v !== 0)) {
@@ -100,7 +100,7 @@ function createBlockBuffer(sampleCount: number[], blockSize: number, valueType:
     for (let i = 0; i < numChannels; i++) values[i] = createTypedArray(valueType, sampleCount[0] * sampleCount[1] * blockSize);
     return {
         values,
-        buffers: values.map(xs => new Buffer(xs.buffer)),
+        buffers: values.map(xs => Buffer.from(xs.buffer)),
         slicesWritten: 0
     };
 }

+ 1 - 1
src/servers/volume/server/local-api.ts

@@ -121,7 +121,7 @@ function wrapFile(fn: string) {
         },
         writeBinary(this: any, data: Uint8Array) {
             this.open();
-            fs.writeSync(this.file, new Buffer(data));
+            fs.writeSync(this.file, Buffer.from(data));
             return true;
         },
         writeString(this: any, data: string) {

+ 1 - 1
src/servers/volume/server/web-api.ts

@@ -60,7 +60,7 @@ function wrapResponse(fn: string, res: express.Response) {
         },
         writeBinary(this: any, data: Uint8Array) {
             if (!this.headerWritten) this.writeHeader(true);
-            return res.write(new Buffer(data.buffer));
+            return res.write(Buffer.from(data.buffer));
         },
         writeString(this: any, data: string) {
             if (!this.headerWritten) this.writeHeader(false);

+ 1 - 0
webpack.config.js

@@ -88,6 +88,7 @@ module.exports = [
     createApp('viewer'),
     createApp('basic-wrapper'),
     createNodeApp('state-docs'),
+    createNodeEntryPoint('preprocess', 'servers/model', 'model-server'),
     createApp('model-server-query'),
 
     createBrowserTest('font-atlas'),