ソースを参照

postprocessing init

AronKovacs 4 年 前
コミット
b45cf206fd

+ 4 - 3
src/apps/docking-viewer/viewport.tsx

@@ -46,9 +46,10 @@ function occlusionStyle(plugin: PluginContext) {
         postprocessing: {
             ...plugin.canvas3d!.props.postprocessing,
             occlusion: { name: 'on', params: {
-                kernelSize: 8,
-                bias: 0.8,
-                radius: 64
+                samples: 64,
+                radius: 8,
+                bias: 0.025,
+                kernelSize: 13
             } },
             outline: { name: 'on', params: {
                 scale: 1.0,

+ 2 - 2
src/examples/lighting/index.ts

@@ -24,7 +24,7 @@ const Canvas3DPresets = {
             mode: 'temporal' as Canvas3DProps['multiSample']['mode']
         },
         postprocessing: {
-            occlusion: { name: 'on', params: { bias: 0.8, kernelSize: 6, radius: 64 } },
+            occlusion: { name: 'on', params: { samples: 64, radius: 8, bias: 0.025, kernelSize: 13 } },
             outline: { name: 'on', params: { scale: 1, threshold: 0.8 } }
         },
         renderer: {
@@ -37,7 +37,7 @@ const Canvas3DPresets = {
             mode: 'temporal' as Canvas3DProps['multiSample']['mode']
         },
         postprocessing: {
-            occlusion: { name: 'on', params: { bias: 0.8, kernelSize: 6, radius: 64 } },
+            occlusion: { name: 'on', params: { samples: 64, radius: 8, bias: 0.025, kernelSize: 13 } },
             outline: { name: 'off', params: { } }
         },
         renderer: {

+ 2 - 4
src/mol-canvas3d/canvas3d.ts

@@ -24,7 +24,7 @@ import { ParamDefinition as PD } from '../mol-util/param-definition';
 import { DebugHelperParams } from './helper/bounding-sphere-helper';
 import { SetUtils } from '../mol-util/set';
 import { Canvas3dInteractionHelper } from './helper/interaction-events';
-import { PostprocessingParams, PostprocessingPass } from './passes/postprocessing';
+import { PostprocessingParams } from './passes/postprocessing';
 import { MultiSampleHelper, MultiSampleParams, MultiSamplePass } from './passes/multi-sample';
 import { PickData } from './passes/pick';
 import { PickHelper } from './passes/pick';
@@ -301,9 +301,7 @@ namespace Canvas3D {
                 if (MultiSamplePass.isEnabled(p.multiSample)) {
                     multiSampleHelper.render(renderer, cam, scene, helper, true, p.transparentBackground, p);
                 } else {
-                    const toDrawingBuffer = !PostprocessingPass.isEnabled(p.postprocessing) && scene.volumes.renderables.length === 0 && !passes.draw.wboitEnabled;
-                    passes.draw.render(renderer, cam, scene, helper, toDrawingBuffer, p.transparentBackground);
-                    if (!toDrawingBuffer) passes.postprocessing.render(cam, true, p.postprocessing);
+                    passes.draw.render(renderer, cam, scene, helper, true, p.renderer.backgroundColor, p.transparentBackground, p.postprocessing);
                 }
                 pickHelper.dirty = true;
                 didRender = true;

+ 99 - 18
src/mol-canvas3d/passes/draw.ts

@@ -22,8 +22,11 @@ import { Helper } from '../helper/helper';
 
 import quad_vert from '../../mol-gl/shader/quad.vert';
 import depthMerge_frag from '../../mol-gl/shader/depth-merge.frag';
+import copyFbo_frag from '../../mol-gl/shader/copy-fbo.frag';
 import { StereoCamera } from '../camera/stereo';
 import { WboitPass } from './wboit';
+import { FxaaPass, PostprocessingPass, PostprocessingProps } from './postprocessing';
+import { Color } from '../../mol-util/color';
 
 const DepthMergeSchema = {
     ...QuadSchema,
@@ -50,6 +53,29 @@ function getDepthMergeRenderable(ctx: WebGLContext, depthTexturePrimitives: Text
     return createComputeRenderable(renderItem, values);
 }
 
+const CopyFboSchema = {
+    ...QuadSchema,
+    tColor: TextureSpec('texture', 'rgba', 'ubyte', 'nearest'),
+    tDepth: TextureSpec('texture', 'depth', 'ushort', 'nearest'),
+    uTexSize: UniformSpec('v2'),
+};
+const  CopyFboShaderCode = ShaderCode('copy-fbo', quad_vert, copyFbo_frag);
+type  CopyFboRenderable = ComputeRenderable<Values<typeof CopyFboSchema>>
+
+function getCopyFboRenderable(ctx: WebGLContext, colorTexture: Texture, depthTexture: Texture): CopyFboRenderable {
+    const values: Values<typeof CopyFboSchema> = {
+        ...QuadValues,
+        tColor: ValueCell.create(colorTexture),
+        tDepth: ValueCell.create(depthTexture),
+        uTexSize: ValueCell.create(Vec2.create(colorTexture.getWidth(), colorTexture.getHeight())),
+    };
+
+    const schema = { ...CopyFboSchema };
+    const renderItem = createComputeRenderItem(ctx, 'triangles', CopyFboShaderCode, schema, values);
+
+    return createComputeRenderable(renderItem, values);
+}
+
 export class DrawPass {
     private readonly drawTarget: RenderTarget
 
@@ -57,14 +83,20 @@ export class DrawPass {
     readonly depthTexture: Texture
     readonly depthTexturePrimitives: Texture
 
-    private readonly packedDepth: boolean
+    readonly packedDepth: boolean
+
     private depthTarget: RenderTarget
     private depthTargetPrimitives: RenderTarget | null
     private depthTargetVolumes: RenderTarget | null
     private depthTextureVolumes: Texture
     private depthMerge: DepthMergeRenderable
 
+    private copyFboTarget: CopyFboRenderable
+    private copyFboPostprocessing: CopyFboRenderable
+
     private wboit: WboitPass | undefined
+    readonly postprocessing: PostprocessingPass
+    private readonly fxaa: FxaaPass
 
     get wboitEnabled() {
         return !!this.wboit?.enabled;
@@ -93,6 +125,11 @@ export class DrawPass {
         this.depthMerge = getDepthMergeRenderable(webgl, this.depthTexturePrimitives, this.depthTextureVolumes, this.packedDepth);
 
         this.wboit = enableWboit ? new WboitPass(webgl, width, height) : undefined;
+        this.postprocessing = new PostprocessingPass(webgl, this);
+        this.fxaa = new FxaaPass(webgl, this);
+
+        this.copyFboTarget = getCopyFboRenderable(webgl, this.colorTarget.texture, this.depthTarget.texture);
+        this.copyFboPostprocessing = getCopyFboRenderable(webgl, this.postprocessing.target.texture, this.depthTarget.texture);
     }
 
     setSize(width: number, height: number) {
@@ -117,9 +154,15 @@ export class DrawPass {
 
             ValueCell.update(this.depthMerge.values.uTexSize, Vec2.set(this.depthMerge.values.uTexSize.ref.value, width, height));
 
+            ValueCell.update(this.copyFboTarget.values.uTexSize, Vec2.set(this.copyFboTarget.values.uTexSize.ref.value, width, height));
+            ValueCell.update(this.copyFboPostprocessing.values.uTexSize, Vec2.set(this.copyFboPostprocessing.values.uTexSize.ref.value, width, height));
+
             if (this.wboit?.enabled) {
                 this.wboit.setSize(width, height);
             }
+
+            this.postprocessing.setSize(width, height);
+            this.fxaa.setSize(width, height);
         }
     }
 
@@ -137,39 +180,48 @@ export class DrawPass {
         this.depthMerge.render();
     }
 
-    private _renderWboit(renderer: Renderer, camera: ICamera, scene: Scene, toDrawingBuffer: boolean) {
+    private _renderWboit(renderer: Renderer, camera: ICamera, scene: Scene, backgroundColor: Color, postprocessingProps: PostprocessingProps) {
         if (!this.wboit?.enabled) throw new Error('expected wboit to be enabled');
 
-        const renderTarget = toDrawingBuffer ? this.drawTarget : this.colorTarget;
-        renderTarget.bind();
+        this.colorTarget.bind();
         renderer.clear(true);
 
         // render opaque primitives
-        this.depthTexturePrimitives.attachFramebuffer(renderTarget.framebuffer, 'depth');
-        renderTarget.bind();
+        this.depthTexturePrimitives.attachFramebuffer(this.colorTarget.framebuffer, 'depth');
+        this.colorTarget.bind();
+        renderer.clearDepth();
         renderer.renderWboitOpaque(scene.primitives, camera, null);
 
         // render opaque volumes
-        this.depthTextureVolumes.attachFramebuffer(renderTarget.framebuffer, 'depth');
-        renderTarget.bind();
+        this.depthTextureVolumes.attachFramebuffer(this.colorTarget.framebuffer, 'depth');
+        this.colorTarget.bind();
         renderer.clearDepth();
         renderer.renderWboitOpaque(scene.volumes, camera, this.depthTexturePrimitives);
 
         // merge depth of opaque primitives and volumes
         this._depthMerge();
 
+        if (PostprocessingPass.isEnabled(postprocessingProps)) {
+            this.postprocessing.render(camera, false, backgroundColor, postprocessingProps);
+        }
+
         // render transparent primitives and volumes
         this.wboit.bind();
         renderer.renderWboitTransparent(scene.primitives, camera, this.depthTexture);
         renderer.renderWboitTransparent(scene.volumes, camera, this.depthTexture);
 
         // evaluate wboit
-        this.depthTexturePrimitives.attachFramebuffer(renderTarget.framebuffer, 'depth');
-        renderTarget.bind();
+        if (PostprocessingPass.isEnabled(postprocessingProps)) {
+            this.depthTexturePrimitives.attachFramebuffer(this.postprocessing.target.framebuffer, 'depth');
+            this.postprocessing.target.bind();
+        } else {
+            this.depthTexturePrimitives.attachFramebuffer(this.colorTarget.framebuffer, 'depth');
+            this.colorTarget.bind();
+        }
         this.wboit.render();
     }
 
-    private _renderBlended(renderer: Renderer, camera: ICamera, scene: Scene, toDrawingBuffer: boolean) {
+    private _renderBlended(renderer: Renderer, camera: ICamera, scene: Scene, toDrawingBuffer: boolean, postprocessingProps: PostprocessingProps) {
         if (toDrawingBuffer) {
             this.webgl.unbindFramebuffer();
         } else {
@@ -221,15 +273,23 @@ export class DrawPass {
         }
     }
 
-    private _render(renderer: Renderer, camera: ICamera, scene: Scene, helper: Helper, toDrawingBuffer: boolean) {
+    private _render(renderer: Renderer, camera: ICamera, scene: Scene, helper: Helper, toDrawingBuffer: boolean, backgroundColor: Color, postprocessingProps: PostprocessingProps) {
+        const antialiasingEnabled = FxaaPass.isEnabled(postprocessingProps);
+
         const { x, y, width, height } = camera.viewport;
         renderer.setViewport(x, y, width, height);
         renderer.update(camera);
 
         if (this.wboitEnabled) {
-            this._renderWboit(renderer, camera, scene, toDrawingBuffer);
+            this._renderWboit(renderer, camera, scene, backgroundColor, postprocessingProps);
         } else {
-            this._renderBlended(renderer, camera, scene, toDrawingBuffer);
+            this._renderBlended(renderer, camera, scene, !antialiasingEnabled && toDrawingBuffer, postprocessingProps);
+        }
+
+        if (PostprocessingPass.isEnabled(postprocessingProps)) {
+            this.postprocessing.target.bind();
+        } else {
+            this.colorTarget.bind();
         }
 
         if (helper.debug.isEnabled) {
@@ -245,18 +305,39 @@ export class DrawPass {
             renderer.renderBlended(helper.camera.scene, helper.camera.camera, null);
         }
 
+        if (antialiasingEnabled) {
+            this.fxaa.render(camera, toDrawingBuffer, postprocessingProps);
+        } else if (toDrawingBuffer) {
+            this.drawTarget.bind();
+
+            if (PostprocessingPass.isEnabled(postprocessingProps)) {
+                this.copyFboPostprocessing.render();
+            } else {
+                this.copyFboTarget.render();
+            }
+        }
+
         this.webgl.gl.flush();
     }
 
-    render(renderer: Renderer, camera: Camera | StereoCamera, scene: Scene, helper: Helper, toDrawingBuffer: boolean, transparentBackground: boolean) {
+    render(renderer: Renderer, camera: Camera | StereoCamera, scene: Scene, helper: Helper, toDrawingBuffer: boolean, backgroundColor: Color, transparentBackground: boolean, postprocessingProps: PostprocessingProps) {
         renderer.setTransparentBackground(transparentBackground);
         renderer.setDrawingBufferSize(this.colorTarget.getWidth(), this.colorTarget.getHeight());
 
         if (StereoCamera.is(camera)) {
-            this._render(renderer, camera.left, scene, helper, toDrawingBuffer);
-            this._render(renderer, camera.right, scene, helper, toDrawingBuffer);
+            this._render(renderer, camera.left, scene, helper, toDrawingBuffer, backgroundColor, postprocessingProps);
+            this._render(renderer, camera.right, scene, helper, toDrawingBuffer, backgroundColor, postprocessingProps);
         } else {
-            this._render(renderer, camera, scene, helper, toDrawingBuffer);
+            this._render(renderer, camera, scene, helper, toDrawingBuffer, backgroundColor, postprocessingProps);
+        }
+    }
+
+    getColorTarget(postprocessingProps: PostprocessingProps): RenderTarget {
+        if (FxaaPass.isEnabled(postprocessingProps)) {
+            return this.fxaa.target;
+        } else if (PostprocessingPass.isEnabled(postprocessingProps)) {
+            return this.postprocessing.target;
         }
+        return this.colorTarget;
     }
 }

+ 5 - 12
src/mol-canvas3d/passes/image.ts

@@ -10,13 +10,14 @@ import Renderer from '../../mol-gl/renderer';
 import Scene from '../../mol-gl/scene';
 import { ParamDefinition as PD } from '../../mol-util/param-definition';
 import { DrawPass } from './draw';
-import { PostprocessingPass, PostprocessingParams } from './postprocessing';
+import { PostprocessingParams } from './postprocessing';
 import { MultiSamplePass, MultiSampleParams, MultiSampleHelper } from './multi-sample';
 import { Camera } from '../camera';
 import { Viewport } from '../camera/util';
 import { PixelData } from '../../mol-util/image';
 import { Helper } from '../helper/helper';
 import { CameraHelper, CameraHelperParams } from '../helper/camera-helper';
+import { Color } from '../../mol-util/color';
 
 export const ImageParams = {
     transparentBackground: PD.Boolean(false),
@@ -38,7 +39,6 @@ export class ImagePass {
     get colorTarget() { return this._colorTarget; }
 
     private readonly drawPass: DrawPass
-    private readonly postprocessingPass: PostprocessingPass
     private readonly multiSamplePass: MultiSamplePass
     private readonly multiSampleHelper: MultiSampleHelper
     private readonly helper: Helper
@@ -50,8 +50,7 @@ export class ImagePass {
         this.props = { ...PD.getDefaultValues(ImageParams), ...props };
 
         this.drawPass = new DrawPass(webgl, 128, 128, enableWboit);
-        this.postprocessingPass = new PostprocessingPass(webgl, this.drawPass);
-        this.multiSamplePass = new MultiSamplePass(webgl, this.drawPass, this.postprocessingPass);
+        this.multiSamplePass = new MultiSamplePass(webgl, this.drawPass);
         this.multiSampleHelper = new MultiSampleHelper(this.multiSamplePass);
 
         this.helper = {
@@ -70,7 +69,6 @@ export class ImagePass {
         this._height = height;
 
         this.drawPass.setSize(width, height);
-        this.postprocessingPass.syncSize();
         this.multiSamplePass.syncSize();
     }
 
@@ -88,13 +86,8 @@ export class ImagePass {
             this.multiSampleHelper.render(this.renderer, this._camera, this.scene, this.helper, false, this.props.transparentBackground, this.props);
             this._colorTarget = this.multiSamplePass.colorTarget;
         } else {
-            this.drawPass.render(this.renderer, this._camera, this.scene, this.helper, false, this.props.transparentBackground);
-            if (PostprocessingPass.isEnabled(this.props.postprocessing)) {
-                this.postprocessingPass.render(this._camera, false, this.props.postprocessing);
-                this._colorTarget = this.postprocessingPass.target;
-            } else {
-                this._colorTarget = this.drawPass.colorTarget;
-            }
+            this.drawPass.render(this.renderer, this._camera, this.scene, this.helper, false, Color(0xffffff), this.props.transparentBackground, this.props.postprocessing);
+            this._colorTarget = this.drawPass.getColorTarget(this.props.postprocessing);
         }
     }
 

+ 13 - 18
src/mol-canvas3d/passes/multi-sample.ts

@@ -16,7 +16,7 @@ import { createComputeRenderable, ComputeRenderable } from '../../mol-gl/rendera
 import { ParamDefinition as PD } from '../../mol-util/param-definition';
 import { RenderTarget } from '../../mol-gl/webgl/render-target';
 import { Camera } from '../../mol-canvas3d/camera';
-import { PostprocessingPass, PostprocessingProps } from './postprocessing';
+import { PostprocessingProps } from './postprocessing';
 import { DrawPass } from './draw';
 import Renderer from '../../mol-gl/renderer';
 import Scene from '../../mol-gl/scene';
@@ -25,6 +25,7 @@ import { StereoCamera } from '../camera/stereo';
 
 import quad_vert from '../../mol-gl/shader/quad.vert';
 import compose_frag from '../../mol-gl/shader/compose.frag';
+import { Color } from '../../mol-util/color';
 
 const ComposeSchema = {
     ...QuadSchema,
@@ -68,7 +69,7 @@ export class MultiSamplePass {
     private holdTarget: RenderTarget
     private compose: ComposeRenderable
 
-    constructor(private webgl: WebGLContext, private drawPass: DrawPass, private postprocessing: PostprocessingPass) {
+    constructor(private webgl: WebGLContext, private drawPass: DrawPass) {
         const { colorBufferFloat, textureFloat } = webgl.extensions;
         const width = drawPass.colorTarget.getWidth();
         const height = drawPass.colorTarget.getHeight();
@@ -109,7 +110,7 @@ export class MultiSamplePass {
     }
 
     private renderMultiSample(renderer: Renderer, camera: Camera | StereoCamera, scene: Scene, helper: Helper, toDrawingBuffer: boolean, transparentBackground: boolean, props: Props) {
-        const { compose, composeTarget, drawPass, postprocessing, webgl } = this;
+        const { compose, composeTarget, drawPass, webgl } = this;
         const { gl, state } = webgl;
 
         // based on the Multisample Anti-Aliasing Render Pass
@@ -123,10 +124,8 @@ export class MultiSamplePass {
         const baseSampleWeight = 1.0 / offsetList.length;
         const roundingRange = 1 / 32;
 
-        const postprocessingEnabled = PostprocessingPass.isEnabled(props.postprocessing);
-
         camera.viewOffset.enabled = true;
-        ValueCell.update(compose.values.tColor, postprocessingEnabled ? postprocessing.target.texture : drawPass.colorTarget.texture);
+        ValueCell.update(compose.values.tColor, drawPass.getColorTarget(props.postprocessing).texture);
         compose.update();
 
         // render the scene multiple times, each slightly jitter offset
@@ -143,9 +142,8 @@ export class MultiSamplePass {
             const sampleWeight = baseSampleWeight + roundingRange * uniformCenteredDistribution;
             ValueCell.update(compose.values.uWeight, sampleWeight);
 
-            // render scene and optionally postprocess
-            drawPass.render(renderer, camera, scene, helper, false, transparentBackground);
-            if (postprocessingEnabled) postprocessing.render(camera, false, props.postprocessing);
+            // render scene
+            drawPass.render(renderer, camera, scene, helper, false, Color(0xffffff), transparentBackground, props.postprocessing);
 
             // compose rendered scene with compose target
             composeTarget.bind();
@@ -179,7 +177,7 @@ export class MultiSamplePass {
     }
 
     private renderTemporalMultiSample(sampleIndex: number, renderer: Renderer, camera: Camera | StereoCamera, scene: Scene, helper: Helper, toDrawingBuffer: boolean, transparentBackground: boolean, props: Props) {
-        const { compose, composeTarget, holdTarget, postprocessing, drawPass, webgl } = this;
+        const { compose, composeTarget, holdTarget, drawPass, webgl } = this;
         const { gl, state } = webgl;
 
         // based on the Multisample Anti-Aliasing Render Pass
@@ -193,13 +191,11 @@ export class MultiSamplePass {
 
         const { x, y, width, height } = camera.viewport;
         const sampleWeight = 1.0 / offsetList.length;
-        const postprocessingEnabled = PostprocessingPass.isEnabled(props.postprocessing) || props.postprocessing.antialiasing.name === 'on';
 
         if (sampleIndex === -1) {
-            drawPass.render(renderer, camera, scene, helper, false, transparentBackground);
-            if (postprocessingEnabled) postprocessing.render(camera, false, props.postprocessing);
+            drawPass.render(renderer, camera, scene, helper, false, Color(0xffffff), transparentBackground, props.postprocessing);
             ValueCell.update(compose.values.uWeight, 1.0);
-            ValueCell.update(compose.values.tColor, postprocessingEnabled ? postprocessing.target.texture : drawPass.colorTarget.texture);
+            ValueCell.update(compose.values.tColor, drawPass.getColorTarget(props.postprocessing).texture);
             compose.update();
 
             holdTarget.bind();
@@ -212,7 +208,7 @@ export class MultiSamplePass {
             sampleIndex += 1;
         } else {
             camera.viewOffset.enabled = true;
-            ValueCell.update(compose.values.tColor, postprocessingEnabled ? postprocessing.target.texture : drawPass.colorTarget.texture);
+            ValueCell.update(compose.values.tColor, drawPass.getColorTarget(props.postprocessing).texture);
             ValueCell.update(compose.values.uWeight, sampleWeight);
             compose.update();
 
@@ -224,9 +220,8 @@ export class MultiSamplePass {
                 Camera.setViewOffset(camera.viewOffset, width, height, offset[0], offset[1], width, height);
                 camera.update();
 
-                // render scene and optionally postprocess
-                drawPass.render(renderer, camera, scene, helper, false, transparentBackground);
-                if (postprocessingEnabled) postprocessing.render(camera, false, props.postprocessing);
+                // render scene
+                drawPass.render(renderer, camera, scene, helper, false, Color(0xffffff), transparentBackground, props.postprocessing);
 
                 // compose rendered scene with compose target
                 composeTarget.bind();

+ 1 - 5
src/mol-canvas3d/passes/passes.ts

@@ -6,29 +6,25 @@
 
 import { DrawPass } from './draw';
 import { PickPass } from './pick';
-import { PostprocessingPass } from './postprocessing';
 import { MultiSamplePass } from './multi-sample';
 import { WebGLContext } from '../../mol-gl/webgl/context';
 
 export class Passes {
     readonly draw: DrawPass
     readonly pick: PickPass
-    readonly postprocessing: PostprocessingPass
     readonly multiSample: MultiSamplePass
 
     constructor(private webgl: WebGLContext, attribs: Partial<{ pickScale: number, enableWboit: boolean }> = {}) {
         const { gl } = webgl;
         this.draw = new DrawPass(webgl, gl.drawingBufferWidth, gl.drawingBufferHeight, attribs.enableWboit || false);
         this.pick = new PickPass(webgl, this.draw, attribs.pickScale || 0.25);
-        this.postprocessing = new PostprocessingPass(webgl, this.draw);
-        this.multiSample = new MultiSamplePass(webgl, this.draw, this.postprocessing);
+        this.multiSample = new MultiSamplePass(webgl, this.draw);
     }
 
     updateSize() {
         const { gl } = this.webgl;
         this.draw.setSize(gl.drawingBufferWidth, gl.drawingBufferHeight);
         this.pick.syncSize();
-        this.postprocessing.syncSize();
         this.multiSample.syncSize();
     }
 }

+ 436 - 76
src/mol-canvas3d/passes/postprocessing.ts

@@ -12,20 +12,191 @@ import { Texture } from '../../mol-gl/webgl/texture';
 import { ValueCell } from '../../mol-util';
 import { createComputeRenderItem } from '../../mol-gl/webgl/render-item';
 import { createComputeRenderable, ComputeRenderable } from '../../mol-gl/renderable';
-import { Vec2, Vec3 } from '../../mol-math/linear-algebra';
+import { Mat4, Vec2, Vec3 } from '../../mol-math/linear-algebra';
 import { ParamDefinition as PD } from '../../mol-util/param-definition';
 import { RenderTarget } from '../../mol-gl/webgl/render-target';
 import { DrawPass } from './draw';
-import { Camera, ICamera } from '../../mol-canvas3d/camera';
+import { ICamera } from '../../mol-canvas3d/camera';
 import quad_vert from '../../mol-gl/shader/quad.vert';
+import outlines_frag from '../../mol-gl/shader/outlines.frag';
+import ssao_frag from '../../mol-gl/shader/ssao.frag';
+import ssao_blur_frag from '../../mol-gl/shader/ssao-blur.frag';
 import postprocessing_frag from '../../mol-gl/shader/postprocessing.frag';
 import fxaa_frag from '../../mol-gl/shader/fxaa.frag';
-import { StereoCamera } from '../camera/stereo';
+import { Framebuffer } from '../../mol-gl/webgl/framebuffer';
+import { Color } from '../../mol-util/color';
+
+const OutlinesSchema = {
+    ...QuadSchema,
+    tDepth: TextureSpec('texture', 'rgba', 'ubyte', 'nearest'),
+    uTexSize: UniformSpec('v2'),
+
+    dOrthographic: DefineSpec('number'),
+    uNear: UniformSpec('f'),
+    uFar: UniformSpec('f'),
+
+    uMaxPossibleViewZDiff: UniformSpec('f'),
+};
+type OutlinesRenderable = ComputeRenderable<Values<typeof OutlinesSchema>>
+
+function getOutlinesRenderable(ctx: WebGLContext, depthTexture: Texture): OutlinesRenderable {
+    const values: Values<typeof OutlinesSchema> = {
+        ...QuadValues,
+        tDepth: ValueCell.create(depthTexture),
+        uTexSize: ValueCell.create(Vec2.create(depthTexture.getWidth(), depthTexture.getHeight())),
+
+        dOrthographic: ValueCell.create(0),
+        uNear: ValueCell.create(1),
+        uFar: ValueCell.create(10000),
+
+        uMaxPossibleViewZDiff: ValueCell.create(0.5),
+    };
+
+    const schema = { ...OutlinesSchema };
+    const shaderCode = ShaderCode('outlines', quad_vert, outlines_frag);
+    const renderItem = createComputeRenderItem(ctx, 'triangles', shaderCode, schema, values);
+
+    return createComputeRenderable(renderItem, values);
+}
+
+const SsaoSchema = {
+    ...QuadSchema,
+    tDepth: TextureSpec('texture', 'rgba', 'ubyte', 'nearest'),
+
+    uSamples: UniformSpec('v3[]'),
+    dNSamples: DefineSpec('number'),
+
+    uProjection: UniformSpec('m4'),
+    uInvProjection: UniformSpec('m4'),
+
+    uTexSize: UniformSpec('v2'),
+
+    uRadius: UniformSpec('f'),
+    uBias: UniformSpec('f'),
+};
+
+type SsaoRenderable = ComputeRenderable<Values<typeof SsaoSchema>>
+
+function getSsaoRenderable(ctx: WebGLContext, depthTexture: Texture, nSamples: number): SsaoRenderable {
+    const values: Values<typeof SsaoSchema> = {
+        ...QuadValues,
+        tDepth: ValueCell.create(depthTexture),
+
+        uSamples: ValueCell.create(getSamples(nSamples)),
+        dNSamples: ValueCell.create(nSamples),
+
+        uProjection: ValueCell.create(Mat4.identity()),
+        uInvProjection: ValueCell.create(Mat4.identity()),
+
+        uTexSize: ValueCell.create(Vec2.create(ctx.gl.drawingBufferWidth, ctx.gl.drawingBufferHeight)),
+
+        uRadius: ValueCell.create(8.0),
+        uBias: ValueCell.create(0.025),
+    };
+
+    const schema = { ...SsaoSchema };
+    const shaderCode = ShaderCode('ssao', quad_vert, ssao_frag);
+    const renderItem = createComputeRenderItem(ctx, 'triangles', shaderCode, schema, values);
+
+    return createComputeRenderable(renderItem, values);
+}
+
+const SsaoBlurSchema = {
+    ...QuadSchema,
+    tSsaoDepth: TextureSpec('texture', 'rgba', 'ubyte', 'nearest'),
+    uTexSize: UniformSpec('v2'),
+
+    uKernel: UniformSpec('f[]'),
+    dOcclusionKernelSize: DefineSpec('number'),
+
+    uBlurDirectionX: UniformSpec('f'),
+    uBlurDirectionY: UniformSpec('f'),
+
+    uMaxPossibleViewZDiff: UniformSpec('f'),
+
+    uNear: UniformSpec('f'),
+    uFar: UniformSpec('f'),
+    dOrthographic: DefineSpec('number'),
+};
+
+type SsaoBlurRenderable = ComputeRenderable<Values<typeof SsaoBlurSchema>>
+
+function getSsaoBlurRenderable(ctx: WebGLContext, ssaoDepthTexture: Texture, blurKernelSize: number, direction: 'horizontal' | 'vertical'): SsaoBlurRenderable {
+    const values: Values<typeof SsaoBlurSchema> = {
+        ...QuadValues,
+        tSsaoDepth: ValueCell.create(ssaoDepthTexture),
+        uTexSize: ValueCell.create(Vec2.create(ssaoDepthTexture.getWidth(), ssaoDepthTexture.getHeight())),
+
+        uKernel: ValueCell.create(getBlurKernel(blurKernelSize)),
+        dOcclusionKernelSize: ValueCell.create(blurKernelSize),
+
+        uBlurDirectionX: ValueCell.create(direction === 'horizontal' ? 1 : 0),
+        uBlurDirectionY: ValueCell.create(direction === 'vertical' ? 1 : 0),
+
+        uMaxPossibleViewZDiff: ValueCell.create(0.5),
+
+        uNear: ValueCell.create(0.0),
+        uFar: ValueCell.create(10000.0),
+        dOrthographic: ValueCell.create(0),
+    };
+
+    const schema = { ...SsaoBlurSchema };
+    const shaderCode = ShaderCode('ssao_blur', quad_vert, ssao_blur_frag);
+    const renderItem = createComputeRenderItem(ctx, 'triangles', shaderCode, schema, values);
+
+    return createComputeRenderable(renderItem, values);
+}
+
+function getBlurKernel(kernelSize: number): number[] {
+    let sigma = kernelSize / 3.0;
+    let halfKernelSize = Math.floor((kernelSize + 1) / 2);
+
+    let kernel = [];
+    for (let x = 0; x < halfKernelSize; x++) {
+        kernel.push((1.0 / ((Math.sqrt(2 * Math.PI)) * sigma)) * Math.exp(-x * x / (2 * sigma * sigma)));
+    }
+
+    return kernel;
+}
+
+function getSamples(nSamples: number): number[] {
+    let vectorSamples = [];
+    for (let i = 0; i < nSamples; i++) {
+        let v = Vec3();
+
+        v[0] = Math.random() * 2.0 - 1.0;
+        v[1] = Math.random() * 2.0 - 1.0;
+        v[2] = Math.random();
+
+        Vec3.normalize(v, v);
+
+        Vec3.scale(v, v, Math.random());
+
+        let scale = (i * i) / (nSamples * nSamples);
+        scale = 0.1 + scale * (1.0 - 0.1);
+
+        Vec3.scale(v, v, scale);
+
+        vectorSamples.push(v);
+    }
+
+    let samples = [];
+    for (let i = 0; i < nSamples; i++) {
+        let v = vectorSamples[i];
+        samples.push(v[0]);
+        samples.push(v[1]);
+        samples.push(v[2]);
+    }
+
+    return samples;
+}
 
 const PostprocessingSchema = {
     ...QuadSchema,
+    tSsaoDepth: TextureSpec('texture', 'rgba', 'ubyte', 'nearest'),
     tColor: TextureSpec('texture', 'rgba', 'ubyte', 'nearest'),
-    tPackedDepth: TextureSpec('texture', 'depth', 'ushort', 'nearest'),
+    tDepth: TextureSpec('texture', 'rgba', 'ubyte', 'nearest'),
+    tOutlines: TextureSpec('texture', 'rgba', 'ubyte', 'nearest'),
     uTexSize: UniformSpec('v2'),
 
     dOrthographic: DefineSpec('number'),
@@ -35,27 +206,26 @@ const PostprocessingSchema = {
     uFogFar: UniformSpec('f'),
     uFogColor: UniformSpec('v3'),
 
+    uMaxPossibleViewZDiff: UniformSpec('f'),
+
     dOcclusionEnable: DefineSpec('boolean'),
-    dOcclusionKernelSize: DefineSpec('number'),
-    uOcclusionBias: UniformSpec('f'),
-    uOcclusionRadius: UniformSpec('f'),
 
     dOutlineEnable: DefineSpec('boolean'),
     uOutlineScale: UniformSpec('f'),
     uOutlineThreshold: UniformSpec('f'),
+
+    dPackedDepth: DefineSpec('boolean'),
 };
-const PostprocessingShaderCode = ShaderCode('postprocessing', quad_vert, postprocessing_frag);
 type PostprocessingRenderable = ComputeRenderable<Values<typeof PostprocessingSchema>>
 
-function getPostprocessingRenderable(ctx: WebGLContext, colorTexture: Texture, depthTexture: Texture): PostprocessingRenderable {
-    const width = colorTexture.getWidth();
-    const height = colorTexture.getHeight();
-
+function getPostprocessingRenderable(ctx: WebGLContext, colorTexture: Texture, depthTexture: Texture, packedDepth: boolean, outlinesTexture: Texture, ssaoDepthTexture: Texture): PostprocessingRenderable {
     const values: Values<typeof PostprocessingSchema> = {
         ...QuadValues,
+        tSsaoDepth: ValueCell.create(ssaoDepthTexture),
         tColor: ValueCell.create(colorTexture),
-        tPackedDepth: ValueCell.create(depthTexture),
-        uTexSize: ValueCell.create(Vec2.create(width, height)),
+        tDepth: ValueCell.create(depthTexture),
+        tOutlines: ValueCell.create(outlinesTexture),
+        uTexSize: ValueCell.create(Vec2.create(colorTexture.getWidth(), colorTexture.getHeight())),
 
         dOrthographic: ValueCell.create(0),
         uNear: ValueCell.create(1),
@@ -64,18 +234,20 @@ function getPostprocessingRenderable(ctx: WebGLContext, colorTexture: Texture, d
         uFogFar: ValueCell.create(10000),
         uFogColor: ValueCell.create(Vec3.create(1, 1, 1)),
 
+        uMaxPossibleViewZDiff: ValueCell.create(0.5),
+
         dOcclusionEnable: ValueCell.create(false),
-        dOcclusionKernelSize: ValueCell.create(4),
-        uOcclusionBias: ValueCell.create(0.5),
-        uOcclusionRadius: ValueCell.create(64),
 
         dOutlineEnable: ValueCell.create(false),
-        uOutlineScale: ValueCell.create(1 * ctx.pixelRatio),
+        uOutlineScale: ValueCell.create(ctx.pixelRatio),
         uOutlineThreshold: ValueCell.create(0.8),
+
+        dPackedDepth: ValueCell.create(packedDepth),
     };
 
     const schema = { ...PostprocessingSchema };
-    const renderItem = createComputeRenderItem(ctx, 'triangles', PostprocessingShaderCode, schema, values);
+    const shaderCode = ShaderCode('postprocessing', quad_vert, postprocessing_frag);
+    const renderItem = createComputeRenderItem(ctx, 'triangles', shaderCode, schema, values);
 
     return createComputeRenderable(renderItem, values);
 }
@@ -83,16 +255,17 @@ function getPostprocessingRenderable(ctx: WebGLContext, colorTexture: Texture, d
 export const PostprocessingParams = {
     occlusion: PD.MappedStatic('off', {
         on: PD.Group({
-            kernelSize: PD.Numeric(4, { min: 1, max: 32, step: 1 }),
-            bias: PD.Numeric(0.5, { min: 0, max: 1, step: 0.01 }),
-            radius: PD.Numeric(64, { min: 0, max: 256, step: 1 }),
+            samples: PD.Numeric(64, {min: 1, max: 256, step: 1}),
+            radius: PD.Numeric(8.0, { min: 0.1, max: 64, step: 0.1 }),
+            bias: PD.Numeric(0.025, { min: 0, max: 1, step: 0.001 }),
+            kernelSize: PD.Numeric(13, { min: 1, max: 25, step: 2 }),
         }),
         off: PD.Group({})
     }, { cycle: true, description: 'Darken occluded crevices with the ambient occlusion effect' }),
     outline: PD.MappedStatic('off', {
         on: PD.Group({
-            scale: PD.Numeric(1, { min: 0, max: 10, step: 1 }),
-            threshold: PD.Numeric(0.8, { min: 0, max: 5, step: 0.01 }),
+            scale: PD.Numeric(1, { min: 0, max: 5, step: 1 }),
+            threshold: PD.Numeric(0.1, { min: 0.01, max: 1, step: 0.01 }),
         }),
         off: PD.Group({})
     }, { cycle: true, description: 'Draw outline around 3D objects' }),
@@ -115,35 +288,168 @@ export class PostprocessingPass {
 
     readonly target: RenderTarget
 
-    private readonly tmpTarget: RenderTarget
+    private readonly outlinesTarget: RenderTarget
+    private readonly outlinesRenderable: OutlinesRenderable
+
+    private readonly ssaoFramebuffer: Framebuffer
+    private readonly ssaoBlurFirstPassFramebuffer: Framebuffer
+    private readonly ssaoBlurSecondPassFramebuffer: Framebuffer
+
+    private readonly ssaoDepthTexture: Texture
+    private readonly ssaoDepthBlurProxyTexture: Texture
+
+    private readonly ssaoRenderable: SsaoRenderable
+    private readonly ssaoBlurFirstPassRenderable: SsaoBlurRenderable
+    private readonly ssaoBlurSecondPassRenderable: SsaoBlurRenderable
+
+    private nSamples: number
+    private blurKernelSize: number
+
     private readonly renderable: PostprocessingRenderable
-    private readonly fxaa: FxaaRenderable
 
-    constructor(private webgl: WebGLContext, private drawPass: DrawPass) {
-        const { colorTarget, depthTexture } = drawPass;
+    constructor(private webgl: WebGLContext, drawPass: DrawPass) {
+        const { colorTarget, depthTexture, packedDepth } = drawPass;
         const width = colorTarget.getWidth();
         const height = colorTarget.getHeight();
 
-        this.target = webgl.createRenderTarget(width, height, false);
-        this.tmpTarget = webgl.createRenderTarget(width, height, false, 'uint8', 'linear');
-        this.renderable = getPostprocessingRenderable(webgl, colorTarget.texture, depthTexture);
-        this.fxaa = getFxaaRenderable(webgl, this.tmpTarget.texture);
-    }
+        this.nSamples = 64;
+        this.blurKernelSize = 3;
+
+        this.target = webgl.createRenderTarget(width, height, false, 'uint8', 'linear');
 
-    syncSize() {
-        const width = this.drawPass.colorTarget.getWidth();
-        const height = this.drawPass.colorTarget.getHeight();
+        this.outlinesTarget = webgl.createRenderTarget(width, height, false);
+        this.outlinesRenderable = getOutlinesRenderable(webgl, depthTexture);
+
+        this.ssaoFramebuffer = webgl.resources.framebuffer();
+        this.ssaoBlurFirstPassFramebuffer = webgl.resources.framebuffer();
+        this.ssaoBlurSecondPassFramebuffer = webgl.resources.framebuffer();
+
+        this.ssaoDepthTexture = webgl.resources.texture('image-uint8', 'rgba', 'ubyte', 'nearest');
+        this.ssaoDepthTexture.define(width, height);
+        this.ssaoDepthTexture.attachFramebuffer(this.ssaoFramebuffer, 'color0');
+
+        this.ssaoDepthBlurProxyTexture = webgl.resources.texture('image-uint8', 'rgba', 'ubyte', 'nearest');
+        this.ssaoDepthBlurProxyTexture.define(width, height);
+        this.ssaoDepthBlurProxyTexture.attachFramebuffer(this.ssaoBlurFirstPassFramebuffer, 'color0');
+
+        this.ssaoDepthTexture.attachFramebuffer(this.ssaoBlurSecondPassFramebuffer, 'color0');
+
+        this.ssaoRenderable = getSsaoRenderable(webgl, depthTexture, this.nSamples);
+        this.ssaoBlurFirstPassRenderable = getSsaoBlurRenderable(webgl, this.ssaoDepthTexture, this.blurKernelSize, 'horizontal');
+        this.ssaoBlurSecondPassRenderable = getSsaoBlurRenderable(webgl, this.ssaoDepthBlurProxyTexture, this.blurKernelSize, 'vertical');
+        this.renderable = getPostprocessingRenderable(webgl, colorTarget.texture,  depthTexture, packedDepth, this.outlinesTarget.texture, this.ssaoDepthTexture);
+    }
 
+    setSize(width: number, height: number) {
         const [w, h] = this.renderable.values.uTexSize.ref.value;
         if (width !== w || height !== h) {
             this.target.setSize(width, height);
-            this.tmpTarget.setSize(width, height);
+            this.outlinesTarget.setSize(width, height);
+            this.ssaoDepthTexture.define(width, height);
+            this.ssaoDepthBlurProxyTexture.define(width, height);
+
             ValueCell.update(this.renderable.values.uTexSize, Vec2.set(this.renderable.values.uTexSize.ref.value, width, height));
-            ValueCell.update(this.fxaa.values.uTexSizeInv, Vec2.set(this.fxaa.values.uTexSizeInv.ref.value, 1 / width, 1 / height));
+            ValueCell.update(this.outlinesRenderable.values.uTexSize, Vec2.set(this.outlinesRenderable.values.uTexSize.ref.value, width, height));
+            ValueCell.update(this.ssaoRenderable.values.uTexSize, Vec2.set(this.ssaoRenderable.values.uTexSize.ref.value, width, height));
+            ValueCell.update(this.ssaoBlurFirstPassRenderable.values.uTexSize, Vec2.set(this.ssaoRenderable.values.uTexSize.ref.value, width, height));
+            ValueCell.update(this.ssaoBlurSecondPassRenderable.values.uTexSize, Vec2.set(this.ssaoRenderable.values.uTexSize.ref.value, width, height));
         }
     }
 
-    private updateState(camera: ICamera) {
+    private updateState(camera: ICamera, backgroundColor: Color, props: PostprocessingProps) {
+        let needsUpdateMain = false;
+        let needsUpdateSsao = false;
+        let needsUpdateSsaoBlur = false;
+
+        let orthographic = camera.state.mode === 'orthographic' ? 1 : 0;
+        let outlinesEnabled = props.outline.name === 'on';
+        let occlusionEnabled = props.occlusion.name === 'on';
+
+        if (props.occlusion.name === 'on') {
+            let invProjection = Mat4.identity();
+            Mat4.invert(invProjection, camera.projection);
+            ValueCell.updateIfChanged(this.ssaoRenderable.values.uProjection, camera.projection);
+            ValueCell.updateIfChanged(this.ssaoRenderable.values.uInvProjection, invProjection);
+
+            ValueCell.updateIfChanged(this.ssaoBlurFirstPassRenderable.values.uNear, camera.near);
+            ValueCell.updateIfChanged(this.ssaoBlurSecondPassRenderable.values.uNear, camera.near);
+
+            ValueCell.updateIfChanged(this.ssaoBlurFirstPassRenderable.values.uFar, camera.far);
+            ValueCell.updateIfChanged(this.ssaoBlurSecondPassRenderable.values.uFar, camera.far);
+
+            if (this.ssaoBlurFirstPassRenderable.values.dOrthographic.ref.value !== orthographic) { needsUpdateSsaoBlur = true; }
+            ValueCell.updateIfChanged(this.ssaoBlurFirstPassRenderable.values.dOrthographic, orthographic);
+            ValueCell.updateIfChanged(this.ssaoBlurSecondPassRenderable.values.dOrthographic, orthographic);
+
+            if (this.nSamples !== props.occlusion.params.samples) {
+                needsUpdateSsao = true;
+
+                this.nSamples = props.occlusion.params.samples;
+                ValueCell.updateIfChanged(this.ssaoRenderable.values.uSamples, getSamples(this.nSamples));
+                ValueCell.updateIfChanged(this.ssaoRenderable.values.dNSamples, this.nSamples);
+            }
+            ValueCell.updateIfChanged(this.ssaoRenderable.values.uRadius, props.occlusion.params.radius);
+            ValueCell.updateIfChanged(this.ssaoRenderable.values.uBias, props.occlusion.params.bias);
+
+            if (this.blurKernelSize !== props.occlusion.params.kernelSize) {
+                needsUpdateSsaoBlur = true;
+
+                this.blurKernelSize = props.occlusion.params.kernelSize;
+                let kernel = getBlurKernel(this.blurKernelSize);
+
+                ValueCell.updateIfChanged(this.ssaoBlurFirstPassRenderable.values.uKernel, kernel);
+                ValueCell.updateIfChanged(this.ssaoBlurSecondPassRenderable.values.uKernel, kernel);
+                ValueCell.updateIfChanged(this.ssaoBlurFirstPassRenderable.values.dOcclusionKernelSize, this.blurKernelSize);
+                ValueCell.updateIfChanged(this.ssaoBlurSecondPassRenderable.values.dOcclusionKernelSize, this.blurKernelSize);
+            }
+
+        }
+
+        if (props.outline.name === 'on') {
+            let maxPossibleViewZDiff = props.outline.params.threshold * (camera.fogFar - camera.near);
+
+            ValueCell.updateIfChanged(this.outlinesRenderable.values.uNear, camera.near);
+            ValueCell.updateIfChanged(this.outlinesRenderable.values.uFar, camera.far);
+            ValueCell.updateIfChanged(this.outlinesRenderable.values.uMaxPossibleViewZDiff, maxPossibleViewZDiff);
+
+            ValueCell.updateIfChanged(this.renderable.values.uMaxPossibleViewZDiff, maxPossibleViewZDiff);
+            ValueCell.updateIfChanged(this.renderable.values.uFogColor, Color.toVec3Normalized(this.renderable.values.uFogColor.ref.value, backgroundColor));
+            ValueCell.updateIfChanged(this.renderable.values.uOutlineScale, props.outline.params.scale);
+            ValueCell.updateIfChanged(this.renderable.values.uOutlineThreshold, props.outline.params.threshold);
+        }
+
+        ValueCell.updateIfChanged(this.renderable.values.uFar, camera.far);
+        ValueCell.updateIfChanged(this.renderable.values.uNear, camera.near);
+        ValueCell.updateIfChanged(this.renderable.values.uFogFar, camera.fogFar);
+        ValueCell.updateIfChanged(this.renderable.values.uFogNear, camera.fogNear);
+        if (this.renderable.values.dOrthographic.ref.value !== orthographic) { needsUpdateMain = true; }
+        ValueCell.updateIfChanged(this.renderable.values.dOrthographic, orthographic);
+        if (this.renderable.values.dOutlineEnable.ref.value !== outlinesEnabled) { needsUpdateMain = true; }
+        ValueCell.updateIfChanged(this.renderable.values.dOutlineEnable, outlinesEnabled);
+        if (this.renderable.values.dOcclusionEnable.ref.value !== occlusionEnabled) { needsUpdateMain = true; }
+        ValueCell.updateIfChanged(this.renderable.values.dOcclusionEnable, occlusionEnabled);
+
+        if (needsUpdateSsao) {
+            this.ssaoRenderable.update();
+        }
+
+        if (needsUpdateSsaoBlur) {
+            this.ssaoBlurFirstPassRenderable.update();
+            this.ssaoBlurSecondPassRenderable.update();
+        }
+
+        if (needsUpdateMain) {
+            this.renderable.update();
+        }
+
+        const { gl, state } = this.webgl;
+
+        state.disable(gl.SCISSOR_TEST);
+        state.disable(gl.BLEND);
+        state.disable(gl.DEPTH_TEST);
+        state.depthMask(false);
+
+        /*
         const { gl, state } = this.webgl;
 
         state.disable(gl.SCISSOR_TEST);
@@ -155,11 +461,43 @@ export class PostprocessingPass {
         gl.viewport(x, y, width, height);
         gl.scissor(x, y, width, height);
 
+        state.clearColor(0, 0, 0, 1);
+        gl.clear(gl.COLOR_BUFFER_BIT);*/
+    }
+
+    render(camera: ICamera, toDrawingBuffer: boolean, backgroundColor: Color, props: PostprocessingProps) {
+        this.updateState(camera, backgroundColor, props);
+
+        if (props.outline.name === 'on') {
+            this.outlinesTarget.bind();
+            this.outlinesRenderable.render();
+        }
+
+        if (props.occlusion.name === 'on') {
+            this.ssaoFramebuffer.bind();
+            this.ssaoRenderable.render();
+
+            this.ssaoBlurFirstPassFramebuffer.bind();
+            this.ssaoBlurFirstPassRenderable.render();
+
+            this.ssaoBlurSecondPassFramebuffer.bind();
+            this.ssaoBlurSecondPassRenderable.render();
+        }
+
+        if (toDrawingBuffer) {
+            this.webgl.unbindFramebuffer();
+        } else {
+            this.target.bind();
+        }
+
+        const { gl, state } = this.webgl;
         state.clearColor(0, 0, 0, 1);
         gl.clear(gl.COLOR_BUFFER_BIT);
+
+        this.renderable.render();
     }
 
-    private _renderPostprocessing(camera: ICamera, toDrawingBuffer: boolean, props: PostprocessingProps) {
+    _render(camera: ICamera, toDrawingBuffer: boolean, props: PostprocessingProps) {
         const { values } = this.renderable;
 
         ValueCell.updateIfChanged(values.uFar, camera.far);
@@ -177,11 +515,11 @@ export class PostprocessingPass {
         if (values.dOcclusionEnable.ref.value !== occlusion) needsUpdate = true;
         ValueCell.updateIfChanged(this.renderable.values.dOcclusionEnable, occlusion);
         if (props.occlusion.name === 'on') {
-            const { kernelSize } = props.occlusion.params;
-            if (values.dOcclusionKernelSize.ref.value !== kernelSize) needsUpdate = true;
-            ValueCell.updateIfChanged(values.dOcclusionKernelSize, kernelSize);
-            ValueCell.updateIfChanged(values.uOcclusionBias, props.occlusion.params.bias);
-            ValueCell.updateIfChanged(values.uOcclusionRadius, props.occlusion.params.radius);
+            // const { kernelSize } = props.occlusion.params;
+            // if (values.dOcclusionKernelSize.ref.value !== kernelSize) needsUpdate = true;
+            // ValueCell.updateIfChanged(values.dOcclusionKernelSize, kernelSize);
+            // ValueCell.updateIfChanged(values.uOcclusionBias, props.occlusion.params.bias);
+            // ValueCell.updateIfChanged(values.uOcclusionRadius, props.occlusion.params.radius);
         }
 
         const outline = props.outline.name === 'on';
@@ -196,29 +534,70 @@ export class PostprocessingPass {
             this.renderable.update();
         }
 
-        if (props.antialiasing.name === 'on') {
-            this.tmpTarget.bind();
-        } else if (toDrawingBuffer) {
+        if (toDrawingBuffer) {
             this.webgl.unbindFramebuffer();
         } else {
             this.target.bind();
         }
 
-        this.updateState(camera);
+        // this.updateState(camera);
         this.renderable.render();
     }
+}
+
+export class FxaaPass {
+    static isEnabled(props: PostprocessingProps) {
+        return props.antialiasing.name === 'on';
+    }
+
+    readonly target: RenderTarget
+    private readonly renderable: FxaaRenderable
 
-    private _renderFxaa(camera: ICamera, toDrawingBuffer: boolean, props: PostprocessingProps) {
+    constructor(private webgl: WebGLContext, private drawPass: DrawPass) {
+        const { colorTarget } = drawPass;
+        const width = colorTarget.getWidth();
+        const height = colorTarget.getHeight();
+
+        this.target = webgl.createRenderTarget(width, height, false);
+        this.renderable = getFxaaRenderable(webgl, drawPass.colorTarget.texture);
+    }
+
+    setSize(width: number, height: number) {
+        const [w, h] = [this.target.texture.getWidth(), this.target.texture.getHeight()];
+        if (width !== w || height !== h) {
+            this.target.setSize(width, height);
+            ValueCell.update(this.renderable.values.uTexSizeInv, Vec2.set(this.renderable.values.uTexSizeInv.ref.value, 1 / width, 1 / height));
+        }
+    }
+
+    private updateState(camera: ICamera) {
+        const { gl, state } = this.webgl;
+
+        state.disable(gl.SCISSOR_TEST);
+        state.disable(gl.BLEND);
+        state.disable(gl.DEPTH_TEST);
+        state.depthMask(false);
+
+        const { x, y, width, height } = camera.viewport;
+        gl.viewport(x, y, width, height);
+        gl.scissor(x, y, width, height);
+
+        state.clearColor(0, 0, 0, 1);
+        gl.clear(gl.COLOR_BUFFER_BIT);
+    }
+
+    render(camera: ICamera, toDrawingBuffer: boolean, props: PostprocessingProps) {
         if (props.antialiasing.name === 'off') return;
 
-        const { values } = this.fxaa;
+        const { values } = this.renderable;
 
         let needsUpdate = false;
 
-        const input = (props.occlusion.name === 'on' || props.outline.name === 'on')
-            ? this.tmpTarget.texture : this.drawPass.colorTarget.texture;
-        if (values.tColor.ref.value !== input) {
-            ValueCell.update(this.fxaa.values.tColor, input);
+        if (PostprocessingPass.isEnabled(props)) {
+            ValueCell.update(this.renderable.values.tColor, this.drawPass.postprocessing.target.texture);
+            needsUpdate = true;
+        } else {
+            ValueCell.update(this.renderable.values.tColor, this.drawPass.colorTarget.texture);
             needsUpdate = true;
         }
 
@@ -233,7 +612,7 @@ export class PostprocessingPass {
         ValueCell.updateIfChanged(values.dSubpixelQuality, subpixelQuality);
 
         if (needsUpdate) {
-            this.fxaa.update();
+            this.renderable.update();
         }
 
         if (toDrawingBuffer) {
@@ -243,26 +622,7 @@ export class PostprocessingPass {
         }
 
         this.updateState(camera);
-        this.fxaa.render();
-    }
-
-    private _render(camera: ICamera, toDrawingBuffer: boolean, props: PostprocessingProps) {
-        if (props.occlusion.name === 'on' || props.outline.name === 'on' || props.antialiasing.name === 'off') {
-            this._renderPostprocessing(camera, toDrawingBuffer, props);
-        }
-
-        if (props.antialiasing.name === 'on') {
-            this._renderFxaa(camera, toDrawingBuffer, props);
-        }
-    }
-
-    render(camera: Camera | StereoCamera, toDrawingBuffer: boolean, props: PostprocessingProps) {
-        if (StereoCamera.is(camera)) {
-            this._render(camera.left, toDrawingBuffer, props);
-            this._render(camera.right, toDrawingBuffer, props);
-        } else {
-            this._render(camera, toDrawingBuffer, props);
-        }
+        this.renderable.render();
     }
 }
 

+ 19 - 0
src/mol-gl/shader/chunks/common.glsl.ts

@@ -49,6 +49,25 @@ float decodeFloatRGB(const in vec3 rgb) {
     return (rgb.r * 256.0 * 256.0 * 255.0 + rgb.g * 256.0 * 255.0 + rgb.b * 255.0) - 1.0;
 }
 
+vec2 packUnitIntervalToRG(const in float v) {
+    vec2 enc;
+    enc.xy = vec2(fract(v * 256.0), v);
+	enc.y -= enc.x * (1.0 / 256.0);
+    enc.xy *=  256.0 / 255.0;
+
+    return enc;
+}
+
+float unpackRGToUnitInterval(const in vec2 enc) {
+    return dot(enc, vec2(255.0 / (256.0 * 256.0), 255.0 / 256.0));
+}
+
+vec3 screenSpaceToViewSpace(const in vec3 ssPos, const in mat4 invProjection) {
+    vec4 p = vec4(ssPos * 2.0 - 1.0, 1.0);
+    p = invProjection * p;
+    return p.xyz / p.w;
+}
+
 const float PackUpscale = 256.0 / 255.0; // fraction -> 0..1 (including 1)
 const float UnpackDownscale = 255.0 / 256.0; // 0..1 -> fraction (excluding 1)
 const vec3 PackFactors = vec3(256.0 * 256.0 * 256.0, 256.0 * 256.0,  256.0);

+ 20 - 0
src/mol-gl/shader/copy-fbo.frag.ts

@@ -0,0 +1,20 @@
+export default `
+precision highp float;
+precision highp sampler2D;
+
+uniform sampler2D tColor;
+uniform sampler2D tDepth;
+uniform vec2 uTexSize;
+
+#include common
+
+float getDepth(const in vec2 coords) {
+    return unpackRGBAToDepth(texture2D(tDepth, coords));
+}
+
+void main() {
+    vec2 coords = gl_FragCoord.xy / uTexSize;
+    gl_FragColor = texture2D(tColor, coords);
+    gl_FragDepthEXT = getDepth(coords);
+}
+`;

+ 67 - 0
src/mol-gl/shader/outlines.frag.ts

@@ -0,0 +1,67 @@
+export default `
+precision highp float;
+precision highp int;
+precision highp sampler2D;
+
+uniform sampler2D tDepth;
+uniform vec2 uTexSize;
+
+uniform float uNear;
+uniform float uFar;
+
+uniform float uMaxPossibleViewZDiff;
+
+#include common
+
+float perspectiveDepthToViewZ(const in float invClipZ, const in float near, const in float far) {
+	return (near * far) / ((far - near) * invClipZ - far);
+}
+
+float orthographicDepthToViewZ(const in float linearClipZ, const in float near, const in float far) {
+	return linearClipZ * (near - far) - near;
+}
+
+float getViewZ(const in float depth) {
+	#if dOrthographic == 1
+		return orthographicDepthToViewZ(depth, uNear, uFar);
+	#else
+		return perspectiveDepthToViewZ(depth, uNear, uFar);
+	#endif
+}
+
+float getDepth(const in vec2 coords) {
+	return unpackRGBAToDepth(texture2D(tDepth, coords));
+}
+
+bool isBackground(const in float depth) {
+    return depth >= 0.99;
+}
+
+void main(void) {
+    float backgroundViewZ = uFar + 3.0 * uMaxPossibleViewZDiff;
+	
+	vec2 coords = gl_FragCoord.xy / uTexSize;
+    vec2 invTexSize = 1.0 / uTexSize;
+
+	float selfDepth = getDepth(coords);
+	float selfViewZ = isBackground(selfDepth) ? backgroundViewZ : getViewZ(getDepth(coords));
+
+	float outline = 1.0;
+	float bestDepth = 1.0;
+
+	for (int y = -1; y <= 1; y++) {
+		for (int x = -1; x <= 1; x++) {
+			vec2 sampleCoords = coords + vec2(float(x), float(y)) * invTexSize;
+			float sampleDepth = getDepth(sampleCoords);
+			float sampleViewZ = isBackground(sampleDepth) ? backgroundViewZ : getViewZ(sampleDepth);
+
+			if (abs(selfViewZ - sampleViewZ) > uMaxPossibleViewZDiff && selfDepth > sampleDepth && sampleDepth <= bestDepth) {
+				outline = 0.0;
+				bestDepth = sampleDepth;
+			}
+		}
+	}
+
+	gl_FragColor = vec4(outline, packUnitIntervalToRG(bestDepth), 0.0);
+}
+`;

+ 80 - 77
src/mol-gl/shader/postprocessing.frag.ts

@@ -3,8 +3,10 @@ precision highp float;
 precision highp int;
 precision highp sampler2D;
 
+uniform sampler2D tSsaoDepth;
 uniform sampler2D tColor;
-uniform sampler2D tPackedDepth;
+uniform sampler2D tDepth;
+uniform sampler2D tOutlines;
 uniform vec2 uTexSize;
 
 uniform float uNear;
@@ -19,103 +21,104 @@ uniform float uOcclusionRadius;
 uniform float uOutlineScale;
 uniform float uOutlineThreshold;
 
-const float noiseAmount = 0.0002;
+uniform float uMaxPossibleViewZDiff;
+
 const vec4 occlusionColor = vec4(0.0, 0.0, 0.0, 1.0);
 
 #include common
 
-float noise(const in vec2 coords) {
-    float a = 12.9898;
-    float b = 78.233;
-    float c = 43758.5453;
-    float dt = dot(coords, vec2(a,b));
-    float sn = mod(dt, 3.14159);
-
-    return fract(sin(sn) * c);
-}
-
 float perspectiveDepthToViewZ(const in float invClipZ, const in float near, const in float far) {
-    return (near * far) / ((far - near) * invClipZ - far);
+	return (near * far) / ((far - near) * invClipZ - far);
 }
 
 float orthographicDepthToViewZ(const in float linearClipZ, const in float near, const in float far) {
-    return linearClipZ * (near - far) - near;
+	return linearClipZ * (near - far) - near;
 }
 
 float getViewZ(const in float depth) {
-    #if dOrthographic == 1
-        return orthographicDepthToViewZ(depth, uNear, uFar);
-    #else
-        return perspectiveDepthToViewZ(depth, uNear, uFar);
-    #endif
+	#if dOrthographic == 1
+		return orthographicDepthToViewZ(depth, uNear, uFar);
+	#else
+		return perspectiveDepthToViewZ(depth, uNear, uFar);
+	#endif
 }
 
 float getDepth(const in vec2 coords) {
-    return unpackRGBAToDepth(texture2D(tPackedDepth, coords));
+	return unpackRGBAToDepth(texture2D(tDepth, coords));
 }
 
-float calcSSAO(const in vec2 coords, const in float depth) {
-    float occlusionFactor = 0.0;
-
-    for (int i = -dOcclusionKernelSize; i <= dOcclusionKernelSize; i++) {
-        for (int j = -dOcclusionKernelSize; j <= dOcclusionKernelSize; j++) {
-            vec2 coordsDelta = coords + uOcclusionRadius / float(dOcclusionKernelSize) * vec2(float(i) / uTexSize.x, float(j) / uTexSize.y);
-            coordsDelta += noiseAmount * (noise(coordsDelta) - 0.5) / uTexSize;
-            coordsDelta = clamp(coordsDelta, 0.5 / uTexSize, 1.0 - 1.0 / uTexSize);
-            if (getDepth(coordsDelta) < depth) occlusionFactor += 1.0;
-        }
-    }
-
-    return occlusionFactor / float((2 * dOcclusionKernelSize + 1) * (2 * dOcclusionKernelSize + 1));
+bool isBackground(const in float depth) {
+    return depth >= 0.99;
 }
 
-vec2 calcEdgeDepth(const in vec2 coords) {
-    vec2 invTexSize = 1.0 / uTexSize;
-    float halfScaleFloor = floor(uOutlineScale * 0.5);
-    float halfScaleCeil = ceil(uOutlineScale * 0.5);
-
-    vec2 bottomLeftUV = coords - invTexSize * halfScaleFloor;
-    vec2 topRightUV = coords + invTexSize * halfScaleCeil;
-    vec2 bottomRightUV = coords + vec2(invTexSize.x * halfScaleCeil, -invTexSize.y * halfScaleFloor);
-    vec2 topLeftUV = coords + vec2(-invTexSize.x * halfScaleFloor, invTexSize.y * halfScaleCeil);
-
-    float depth0 = getDepth(bottomLeftUV);
-    float depth1 = getDepth(topRightUV);
-    float depth2 = getDepth(bottomRightUV);
-    float depth3 = getDepth(topLeftUV);
-
-    float depthFiniteDifference0 = depth1 - depth0;
-    float depthFiniteDifference1 = depth3 - depth2;
+float getOutline(const in vec2 coords, out float closestTexel) {
+	float backgroundViewZ = uFar + 3.0 * uMaxPossibleViewZDiff;
+	vec2 invTexSize = 1.0 / uTexSize;
+
+	float selfDepth = getDepth(coords);
+	float selfViewZ = isBackground(selfDepth) ? backgroundViewZ : getViewZ(getDepth(coords));
+
+	float outline = 1.0;
+	closestTexel = 1.0;
+	for (float y = -uOutlineScale; y <= uOutlineScale; y++) {
+		for (float x = -uOutlineScale; x <= uOutlineScale; x++) {
+			if (x * x + y * y > uOutlineScale * uOutlineScale) {
+				continue;
+			}
+
+			vec2 sampleCoords = coords + vec2(x, y) * invTexSize;
+
+			vec4 sampleOutlineCombined = texture2D(tOutlines, sampleCoords);
+			float sampleOutline = sampleOutlineCombined.r;
+			float sampleOutlineDepth = unpackRGToUnitInterval(sampleOutlineCombined.gb);
+
+			if (sampleOutline == 0.0 && sampleOutlineDepth < closestTexel && abs(selfViewZ - sampleOutlineDepth) > uMaxPossibleViewZDiff) {
+				outline = 0.0;
+				closestTexel = sampleOutlineDepth;
+			}
+		}
+	}
+	return outline;
+}
 
-    return vec2(
-        sqrt(pow(depthFiniteDifference0, 2.0) + pow(depthFiniteDifference1, 2.0)) * 100.0,
-        min(depth0, min(depth1, min(depth2, depth3)))
-    );
+float getSsao(vec2 coords) {
+	float rawSsao = unpackRGToUnitInterval(texture(tSsaoDepth, coords).xy);
+	if (rawSsao > 0.999) {
+		return 1.0;
+	} else if (rawSsao > 0.001) {
+		return rawSsao;
+	}
+	return 0.0;
 }
 
 void main(void) {
-    vec2 coords = gl_FragCoord.xy / uTexSize;
-    vec4 color = texture2D(tColor, coords);
-
-    #ifdef dOutlineEnable
-        vec2 edgeDepth = calcEdgeDepth(coords);
-        float edgeFlag = step(edgeDepth.x, uOutlineThreshold);
-        color.rgb *= edgeFlag;
-
-        float viewDist = abs(getViewZ(edgeDepth.y));
-        float fogFactor = smoothstep(uFogNear, uFogFar, viewDist) * (1.0 - edgeFlag);
-        color.rgb = mix(color.rgb, uFogColor, fogFactor);
-    #endif
-
-    // occlusion needs to be handled after outline to darken them properly
-    #ifdef dOcclusionEnable
-        float depth = getDepth(coords);
-        if (depth <= 0.99) {
-            float occlusionFactor = calcSSAO(coords, depth);
-            color = mix(color, occlusionColor, uOcclusionBias * occlusionFactor);
-        }
-    #endif
-
-    gl_FragColor = color;
+	vec2 coords = gl_FragCoord.xy / uTexSize;
+	vec4 color = texture(tColor, coords);
+
+	#ifdef dOutlineEnable
+		float closestTexel;
+		float outline = getOutline(coords, closestTexel);
+    	
+		if (outline == 0.0) {
+			color.rgb *= outline;
+			float viewDist = abs(getViewZ(closestTexel));
+			float fogFactor = smoothstep(uFogNear, uFogFar, viewDist);
+			if (color.a != 1.0) {
+				color.a = 1.0 - fogFactor;
+			}
+			color.rgb = mix(color.rgb, vec3(1.0), fogFactor);
+		}
+	#endif
+
+	// occlusion needs to be handled after outline to darken them properly
+	#ifdef dOcclusionEnable
+		float depth = getDepth(coords);
+		if (!isBackground(depth)) {
+			float occlusionFactor = getSsao(coords);
+			color = mix(occlusionColor, color, occlusionFactor);
+		}
+	#endif
+
+	gl_FragColor = color;
 }
 `;

+ 86 - 0
src/mol-gl/shader/ssao-blur.frag.ts

@@ -0,0 +1,86 @@
+export default `
+precision highp float;
+precision highp int;
+precision highp sampler2D;
+
+uniform sampler2D tSsaoDepth;
+uniform vec2 uTexSize;
+
+uniform float uKernel[dOcclusionKernelSize];
+
+uniform float uBlurDirectionX;
+uniform float uBlurDirectionY;
+
+uniform float uMaxPossibleViewZDiff;
+
+uniform float uNear;
+uniform float uFar;
+
+#include common
+
+float perspectiveDepthToViewZ(const in float invClipZ, const in float near, const in float far) {
+	return (near * far) / ((far - near) * invClipZ - far);
+}
+
+float orthographicDepthToViewZ(const in float linearClipZ, const in float near, const in float far) {
+	return linearClipZ * (near - far) - near;
+}
+
+float getViewZ(const in float depth) {
+	#if dOrthographic == 1
+		return orthographicDepthToViewZ(depth, uNear, uFar);
+	#else
+		return perspectiveDepthToViewZ(depth, uNear, uFar);
+	#endif
+}
+
+bool isBackground(const in float depth) {
+    return depth >= 0.99;
+}
+
+void main(void) {
+	vec2 coords = gl_FragCoord.xy / uTexSize;
+
+    vec2 packedDepth = texture(tSsaoDepth, coords).zw;
+
+    float selfDepth = unpackRGToUnitInterval(packedDepth);
+    // if background and if second pass
+	if (isBackground(selfDepth) && uBlurDirectionY != 0.0) {
+       gl_FragColor = vec4(packUnitIntervalToRG(1.0), packedDepth);
+       return;
+    }
+
+    float selfViewZ = getViewZ(selfDepth);
+
+    vec2 offset = vec2(uBlurDirectionX, uBlurDirectionY) / uTexSize;
+
+    float sum = 0.0;
+    float kernelSum = 0.0;
+    // only if kernelSize is odd
+    for (int i = -dOcclusionKernelSize / 2; i <= dOcclusionKernelSize / 2; i++) {
+        vec2 sampleCoords = coords + float(i) * offset;
+
+        vec4 sampleSsaoDepth = texture(tSsaoDepth, sampleCoords);
+
+        float sampleDepth = unpackRGToUnitInterval(sampleSsaoDepth.zw);
+        if (isBackground(sampleDepth)) {
+            continue;
+        }
+
+        if (abs(i) > 1) {
+            float sampleViewZ = getViewZ(sampleDepth);
+            if (abs(selfViewZ - sampleViewZ) > uMaxPossibleViewZDiff) {
+                continue;
+            }
+        }
+
+        float kernel = uKernel[abs(i)];
+        float sampleValue = unpackRGToUnitInterval(sampleSsaoDepth.xy);
+
+        sum += kernel * sampleValue;
+        kernelSum += kernel;
+    }
+    
+    gl_FragColor = vec4(packUnitIntervalToRG(sum / kernelSum), packedDepth);
+}
+`;

+ 103 - 0
src/mol-gl/shader/ssao.frag.ts

@@ -0,0 +1,103 @@
+export default `
+precision highp float;
+precision highp int;
+precision highp sampler2D;
+
+#include common
+
+uniform sampler2D tDepth;
+
+uniform vec3 uSamples[dNSamples];
+
+uniform mat4 uProjection;
+uniform mat4 uInvProjection;
+
+uniform vec2 uTexSize;
+
+uniform float uRadius;
+uniform float uBias;
+
+float smootherstep(float edge0, float edge1, float x) {
+	x = clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0);
+	return x * x * x * (x * (x * 6.0 - 15.0) + 10.0);
+}
+
+float noise(const in vec2 coords) {
+	float a = 12.9898;
+	float b = 78.233;
+	float c = 43758.5453;
+	float dt = dot(coords, vec2(a,b));
+	float sn = mod(dt, 3.14159);
+	return abs(fract(sin(sn) * c)); // is abs necessary?
+}
+
+vec2 getNoiseVec2(const in vec2 coords) {
+	return vec2(noise(coords), noise(coords) + 2.71828);
+}
+
+bool isBackground(const in float depth) {
+    return depth >= 0.99;
+}
+
+float getDepth(const in vec2 coords) {
+	return unpackRGBAToDepth(texture2D(tDepth, coords));
+}
+
+vec3 normalFromDepth(const in float depth, const in float depth1, const in float depth2, vec2 offset1, vec2 offset2) {
+    vec3 p1 = vec3(offset1, depth1 - depth);
+    vec3 p2 = vec3(offset2, depth2 - depth);
+    
+    vec3 normal = cross(p1, p2);
+    normal.z = -normal.z;
+    
+    return normalize(normal);
+}
+
+void main(void) {
+	vec2 selfCoords = gl_FragCoord.xy / uTexSize;
+
+	float selfDepth = getDepth(selfCoords);
+	vec2 selfPackedDepth = packUnitIntervalToRG(selfDepth);
+
+	if (isBackground(selfDepth)) {
+		gl_FragColor = vec4(packUnitIntervalToRG(0.0), selfPackedDepth);
+		return;
+	}
+	
+	vec2 offset1 = vec2(0.0, 0.001);
+    vec2 offset2 = vec2(0.001, 0.0);
+
+	float selfDepth1 = getDepth(selfCoords + offset1);
+	float selfDepth2 = getDepth(selfCoords + offset2);
+
+	vec3 selfViewNormal = normalFromDepth(selfDepth, selfDepth1, selfDepth2, offset1, offset2);
+	vec3 selfViewPos = screenSpaceToViewSpace(vec3(selfCoords, selfDepth), uInvProjection);
+
+    vec3 randomVec = normalize(vec3(getNoiseVec2(selfCoords) * 2.0 - 1.0, 0.0));
+	
+    vec3 tangent = normalize(randomVec - selfViewNormal * dot(randomVec, selfViewNormal));
+    vec3 bitangent = cross(selfViewNormal, tangent);
+    mat3 TBN = mat3(tangent, bitangent, selfViewNormal);
+
+    float occlusion = 0.0;
+    for(int i = 0; i < dNSamples; i++){
+        vec3 sampleViewPos = TBN * uSamples[i];
+        sampleViewPos = selfViewPos + sampleViewPos * uRadius; 
+        
+        vec4 offset = vec4(sampleViewPos, 1.0);
+        offset = uProjection * offset;
+        offset.xyz /= offset.w;
+        offset.xyz = offset.xyz * 0.5 + 0.5;
+        
+		float sampleDepth = getDepth(offset.xy);
+		float sampleViewZ = screenSpaceToViewSpace(vec3(offset.xy, sampleDepth), uInvProjection).z;
+
+        occlusion += (sampleViewZ >= sampleViewPos.z + uBias ? 1.0 : 0.0) * smootherstep(0.0, 1.0, uRadius / abs(selfViewPos.z - sampleViewZ));           
+    }
+    occlusion = 1.0 - (occlusion / float(dNSamples));
+
+	vec2 packedOcclusion = packUnitIntervalToRG(occlusion);
+    
+    gl_FragColor = vec4(packedOcclusion, selfPackedDepth);
+}
+`;