main.ts 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133
  1. /**
  2. * Copyright (c) 2018 mol* contributors, licensed under MIT, See LICENSE file for more info.
  3. *
  4. * Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
  5. *
  6. * @author David Sehnal <david.sehnal@gmail.com>
  7. */
  8. import * as CCP4 from './ccp4'
  9. import * as File from '../common/file'
  10. import * as Data from './data-model'
  11. import * as Sampling from './sampling'
  12. import * as DataFormat from '../common/data-format'
  13. import * as fs from 'fs'
  14. export default async function pack(input: { name: string, filename: string }[], blockSize: number, isPeriodic: boolean, outputFilename: string) {
  15. try {
  16. await create(outputFilename, input, blockSize, isPeriodic);
  17. } catch (e) {
  18. console.error('[Error] ' + e);
  19. }
  20. }
  21. function getTime() {
  22. let t = process.hrtime();
  23. return t[0] * 1000 + t[1] / 1000000;
  24. }
  25. function updateAllocationProgress(progress: Data.Progress, progressDone: number) {
  26. let old = (100 * progress.current / progress.max).toFixed(0);
  27. progress.current += progressDone;
  28. let $new = (100 * progress.current / progress.max).toFixed(0);
  29. if (old !== $new) {
  30. process.stdout.write(`\rAllocating... ${$new}%`);
  31. }
  32. }
  33. /**
  34. * Pre allocate the disk space to be able to do "random" writes into the entire file.
  35. */
  36. async function allocateFile(ctx: Data.Context) {
  37. const { totalByteSize, file } = ctx;
  38. const buffer = new Buffer(Math.min(totalByteSize, 8 * 1024 * 1024));
  39. const progress: Data.Progress = { current: 0, max: Math.ceil(totalByteSize / buffer.byteLength) };
  40. let written = 0;
  41. while (written < totalByteSize) {
  42. written += fs.writeSync(file, buffer, 0, Math.min(totalByteSize - written, buffer.byteLength));
  43. updateAllocationProgress(progress, 1);
  44. }
  45. }
  46. function determineBlockSize(data: CCP4.Data, blockSize: number) {
  47. const { extent } = data.header;
  48. const maxLayerSize = 1024 * 1024 * 1024;
  49. const valueCount = extent[0] * extent[1];
  50. if (valueCount * blockSize <= maxLayerSize) return blockSize;
  51. while (blockSize > 0) {
  52. blockSize -= 4;
  53. if (valueCount * blockSize <= maxLayerSize) return blockSize;
  54. }
  55. throw new Error('Could not determine a valid block size.');
  56. }
  57. async function writeHeader(ctx: Data.Context) {
  58. const header = DataFormat.encodeHeader(Data.createHeader(ctx));
  59. await File.writeInt(ctx.file, header.byteLength, 0);
  60. await File.writeBuffer(ctx.file, 4, header);
  61. }
  62. async function create(filename: string, sourceDensities: { name: string, filename: string }[], sourceBlockSize: number, isPeriodic: boolean) {
  63. const startedTime = getTime();
  64. if (sourceBlockSize % 4 !== 0 || sourceBlockSize < 4) {
  65. throw Error('Block size must be a positive number divisible by 4.');
  66. }
  67. if (!sourceDensities.length) {
  68. throw Error('Specify at least one source density.');
  69. }
  70. process.stdout.write('Initializing... ');
  71. const files: number[] = [];
  72. try {
  73. // Step 1a: Read the CCP4 headers
  74. const channels: CCP4.Data[] = [];
  75. for (const s of sourceDensities) channels.push(await CCP4.open(s.name, s.filename));
  76. // Step 1b: Check if the CCP4 headers are compatible.
  77. const isOk = channels.reduce((ok, s) => ok && CCP4.compareHeaders(channels[0].header, s.header), true);
  78. if (!isOk) {
  79. throw new Error('Input file headers are not compatible (different grid, etc.).');
  80. }
  81. const blockSize = determineBlockSize(channels[0], sourceBlockSize);
  82. for (const ch of channels) CCP4.assignSliceBuffer(ch, blockSize);
  83. // Step 1c: Create data context.
  84. const context = await Sampling.createContext(filename, channels, blockSize, isPeriodic);
  85. for (const s of channels) files.push(s.file);
  86. files.push(context.file);
  87. process.stdout.write(' done.\n');
  88. console.log(`Block size: ${blockSize}`);
  89. // Step 2: Allocate disk space.
  90. process.stdout.write('Allocating... 0%');
  91. await allocateFile(context);
  92. process.stdout.write('\rAllocating... done.\n');
  93. // Step 3: Process and write the data
  94. process.stdout.write('Writing data... 0%');
  95. await Sampling.processData(context);
  96. process.stdout.write('\rWriting data... done.\n');
  97. // Step 4: Write the header at the start of the file.
  98. // The header is written last because the sigma/min/max values are computed
  99. // during step 3.
  100. process.stdout.write('Writing header... ');
  101. await writeHeader(context);
  102. process.stdout.write('done.\n');
  103. // Step 5: Report the time, d'ph.
  104. const time = getTime() - startedTime;
  105. console.log(`[Done] ${time.toFixed(0)}ms.`);
  106. } finally {
  107. for (let f of files) File.close(f);
  108. // const ff = await File.openRead(filename);
  109. // const hh = await DataFormat.readHeader(ff);
  110. // File.close(ff);
  111. // console.log(hh.header);
  112. }
  113. }