diff --git a/Android.mk b/Android.mk
old mode 100644
new mode 100755
index d6d543157f83acde2f43d9e802842446f48ccbdd..186f196721c6d2ee917dfa7059e64bc80bc316bb
--- a/Android.mk
+++ b/Android.mk
@@ -27,6 +27,7 @@ LOCAL_PRIVILEGED_MODULE := true
 
 #LOCAL_SDK_VERSION := current
 LOCAL_RENDERSCRIPT_TARGET_API := 23
+LOCAL_MULTILIB := 32
 
 #LOCAL_OVERRIDES_PACKAGES := Camera2
 
@@ -37,6 +38,7 @@ LOCAL_MULTILIB := 32
 # If this is an unbundled build (to install separately) then include
 # the libraries in the APK, otherwise just put them in /system/lib and
 # leave them out of the APK
+
 #ifneq (,$(TARGET_BUILD_APPS))
   LOCAL_JNI_SHARED_LIBRARIES := libjni_snapcammosaic libjni_snapcamtinyplanet libjni_imageutil
 #else
diff --git a/res/drawable-hdpi/deep_portrait.png b/res/drawable-hdpi/deep_portrait.png
new file mode 100755
index 0000000000000000000000000000000000000000..67853ef2cf34de7b565603ee5faf94ee4d0e62d6
Binary files /dev/null and b/res/drawable-hdpi/deep_portrait.png differ
diff --git a/res/drawable-hdpi/deep_portrait_black.png b/res/drawable-hdpi/deep_portrait_black.png
new file mode 100755
index 0000000000000000000000000000000000000000..c47a92e3322b3c9af2ee98a202ce952641fbb955
Binary files /dev/null and b/res/drawable-hdpi/deep_portrait_black.png differ
diff --git a/res/drawable-hdpi/deep_portrait_on.png b/res/drawable-hdpi/deep_portrait_on.png
new file mode 100755
index 0000000000000000000000000000000000000000..18cf7774aa31711f2dcf73537e68fbfb90bc64ef
Binary files /dev/null and b/res/drawable-hdpi/deep_portrait_on.png differ
diff --git a/res/drawable-mdpi/deep_portrait.png b/res/drawable-mdpi/deep_portrait.png
new file mode 100755
index 0000000000000000000000000000000000000000..bfe7cb8ac402190227f49ffefa7c336db77f856f
Binary files /dev/null and b/res/drawable-mdpi/deep_portrait.png differ
diff --git a/res/drawable-mdpi/deep_portrait_black.png b/res/drawable-mdpi/deep_portrait_black.png
new file mode 100755
index 0000000000000000000000000000000000000000..d0d5f0c500748f06a7f10feb05ad91bbd5d92450
Binary files /dev/null and b/res/drawable-mdpi/deep_portrait_black.png differ
diff --git a/res/drawable-mdpi/deep_portrait_on.png b/res/drawable-mdpi/deep_portrait_on.png
new file mode 100755
index 0000000000000000000000000000000000000000..87d24160820e13ebfe183ae047da4dec60b7e44d
Binary files /dev/null and b/res/drawable-mdpi/deep_portrait_on.png differ
diff --git a/res/layout/capture_module.xml b/res/layout/capture_module.xml
index b3ade9f552ac67153ef95eee15c9dc72924d74fb..a9c8f88e5d6f6e97ca5f6cebfce06f896165a605 100755
--- a/res/layout/capture_module.xml
+++ b/res/layout/capture_module.xml
@@ -32,6 +32,7 @@
     <FrameLayout
         android:layout_width="match_parent"
         android:layout_height="match_parent"
+        android:id="@+id/mdp_preivew_frame"
         android:layout_gravity="center_vertical|center_horizontal">
         <com.android.camera.ui.AutoFitSurfaceView
             android:id="@+id/mdp_preview_content"
@@ -41,10 +42,16 @@
         <com.android.camera.ui.AutoFitSurfaceView
             android:layout_width="300dp"
             android:layout_height="300dp"
-	    android:id="@+id/mdp_preview_content_mono"
+	        android:id="@+id/mdp_preview_content_mono"
             android:visibility="gone"/>
     </FrameLayout>
 
+    <FrameLayout
+        android:id="@+id/camera_glpreview"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:layout_gravity="center_vertical|center_horizontal" />
+
     <View
         android:id="@+id/preview_cover"
         android:layout_width="match_parent"
diff --git a/res/layout/one_ui_layout.xml b/res/layout/one_ui_layout.xml
index 59e31b95d8019d97fde174d01588a27d15e35260..61c55cd3a5828e76c562545730db975b1a813dfb 100644
--- a/res/layout/one_ui_layout.xml
+++ b/res/layout/one_ui_layout.xml
@@ -132,6 +132,10 @@
         android:id="@+id/ts_makeup_switcher"
         style="@style/OneUIMenuButton" />
 
+    <com.android.camera.ui.RotateImageView
+        android:id="@+id/deepportrait_switcher"
+        style="@style/OneUIMenuButton" />
+
     <LinearLayout
         android:id="@+id/remaining_photos"
         android:layout_width="wrap_content"
@@ -164,6 +168,19 @@
         android:layout_width="20dp"
         android:src="@drawable/icon_x" />
 
+
+    <SeekBar
+        android:layout_width="320dp"
+        android:layout_height="40dp"
+        android:maxHeight="3dip"
+        android:minHeight="1dip"
+        android:visibility="gone"
+        android:layout_gravity="center_horizontal|bottom"
+        android:layout_marginBottom="90dp"
+        android:progressDrawable="@drawable/beautify_progressbar_style"
+        android:thumb="@drawable/ic_beautify_oval"
+        android:id="@+id/deepportrait_seekbar"/>
+
     <LinearLayout
         android:layout_width="match_parent"
         android:layout_height="wrap_content"
diff --git a/res/values/camera2arrays.xml b/res/values/camera2arrays.xml
index 8723ecd667d86b8d13d05de4067313fd5686e801..80a0151b4792a68b74aa9def0d31655e2a22e315 100755
--- a/res/values/camera2arrays.xml
+++ b/res/values/camera2arrays.xml
@@ -159,6 +159,7 @@
         <item>104</item>
         <item>109</item>
         <item>110</item>
+	<item>111</item>
     </string-array>
 
     <!-- Camera Preferences Scene Mode dialog box entries -->
@@ -187,6 +188,7 @@
         <item>@string/pref_camera_scenemode_entry_panorama</item>
         <item>@string/pref_camera_scenemode_entry_promode</item>
         <item>@string/pref_camera_scenemode_entry_deepzoom</item>
+	<item>@string/pref_camera_scenemode_entry_deepportrait</item>
     </string-array>
 
     <array name="pref_camera2_scenemode_thumbnails" translatable="false">
@@ -214,6 +216,7 @@
         <item>@drawable/scene_panorama</item>
         <item>@drawable/promode</item>
         <item>@drawable/sharp_photo</item>
+	<item>@drawable/deep_portrait</item>
     </array>
 
     <array name="pref_camera2_scenemode_black_thumbnails" translatable="false">
@@ -241,6 +244,7 @@
         <item>@drawable/ic_scene_mode_black_panorama</item>
         <item>@drawable/ic_scene_mode_black_dual_camera</item>
         <item>@drawable/ic_scene_mode_black_sharp_photo</item>
+	<item>@drawable/deep_portrait_black</item>
     </array>
 
     <!-- Camera Preferences Scene Mode dialog box entries -->
@@ -269,6 +273,7 @@
         <item>@string/pref_camera2_scene_mode_panorama_instructional_content</item>
         <item>@string/pref_camera2_scene_mode_pro_instructional_content</item>
         <item>@string/pref_camera2_scene_mode_deepzoom_instructional_content</item>
+	<item>@string/pref_camera2_scene_mode_deepportrait_instructional_content</item>
     </string-array>
 
     <string-array name="pref_camera2_whitebalance_entryvalues" translatable="false">
@@ -1176,4 +1181,10 @@ for time lapse recording -->
         <item>@string/pref_camera2_video_hdr_entry_value_disable</item>
         <item>@string/pref_camera2_video_hdr_entry_value_enable</item>
     </string-array>
+
+    <string-array name="pref_camera2_deepportrait_entryvalues" translatable="false">
+        <item>@string/pref_camera2_deepportrait_entry_value_disable</item>
+        <item>@string/pref_camera2_deepportrait_entry_value_enable</item>
+    </string-array>
+
 </resources>
diff --git a/res/values/qcomstrings.xml b/res/values/qcomstrings.xml
index 7074df50d1f5eb493da632d9b012ae4829ebdb79..215669f166ee38c3e63cb9c55fe3c76dcaddea1a 100755
--- a/res/values/qcomstrings.xml
+++ b/res/values/qcomstrings.xml
@@ -1087,7 +1087,7 @@
     <string name="pref_camera2_scene_mode_blur_buster_instructional_content"  translatable="true">BlurBuster reduces blur from shaky hands.It can be helpful when taking photos in difficult places.</string>
     <string name="pref_camera2_scene_mode_pro_instructional_content"  translatable="true">With Pro Mode, you can manually control settings for ISO,Exposure, White Balance, and Focus. You will have easy access to all of these advanced settings</string>
     <string name="pref_camera2_scene_mode_deepzoom_instructional_content"  translatable="true">With DeepZoom Mode, you can use the 2X or 4X to take picture, then you can get the deepzoom`s picture </string>
-
+    <string name="pref_camera2_scene_mode_deepportrait_instructional_content" translatable="true">With DeepPortrait, you can take selfies, with a blurred background. You can use the slider to adjust the amount of the blur</string>
     <string name="pref_camera2_not_show_again">Do not show again</string>
     <string name="pref_camera2_scene_mode_instructional_ok" translatable="true">OK</string>
 
@@ -1253,5 +1253,9 @@
     <string name="pref_camera2_video_hdr_entry_disable" translatable="true">disable</string>
     <string name="pref_camera2_video_hdr_entry_value_enable" translatable="false">1</string>
     <string name="pref_camera2_video_hdr_entry_value_disable" translatable="false">0</string>
+
+    <string name="pref_camera2_deepportrait_entry_value_disable" translatable="false">off</string>
+    <string name="pref_camera2_deepportrait_entry_value_enable" translatable="false">on</string>
+    <string name="pref_camera_scenemode_entry_deepportrait" translatable="false">Deepportrait</string>
 </resources>
 
diff --git a/src/com/android/camera/CaptureModule.java b/src/com/android/camera/CaptureModule.java
index 4d79d6bb0e1e200083f97e2f7868794a624052b7..23447c4fdde52db4d3b952a6836dcfce19a1baaf 100755
--- a/src/com/android/camera/CaptureModule.java
+++ b/src/com/android/camera/CaptureModule.java
@@ -66,6 +66,7 @@ import android.media.MediaMetadataRetriever;
 import android.media.MediaRecorder;
 import android.media.MediaCodecInfo;
 import android.net.Uri;
+import android.os.AsyncTask;
 import android.os.Bundle;
 import android.os.Debug;
 import android.os.Handler;
@@ -91,9 +92,14 @@ import android.graphics.Canvas;
 import android.graphics.Color;
 import android.util.AttributeSet;
 
+import com.android.camera.deepportrait.CamGLRenderObserver;
+import com.android.camera.deepportrait.CamGLRenderer;
+import com.android.camera.deepportrait.DPImage;
+import com.android.camera.deepportrait.GLCameraPreview;
 import com.android.camera.exif.ExifInterface;
 import com.android.camera.imageprocessor.filter.BlurbusterFilter;
 import com.android.camera.imageprocessor.filter.ChromaflashFilter;
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.imageprocessor.filter.ImageFilter;
 import com.android.camera.imageprocessor.PostProcessor;
 import com.android.camera.imageprocessor.FrameProcessor;
@@ -137,7 +143,8 @@ public class CaptureModule implements CameraModule, PhotoController,
         MediaSaveService.Listener, ClearSightImageProcessor.Callback,
         SettingsManager.Listener, LocationManager.Listener,
         CountDownView.OnCountDownFinishedListener,
-        MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener {
+        MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener,
+        CamGLRenderObserver {
     public static final int DUAL_MODE = 0;
     public static final int BAYER_MODE = 1;
     public static final int MONO_MODE = 2;
@@ -436,6 +443,9 @@ public class CaptureModule implements CameraModule, PhotoController,
     private long mIsoExposureTime;
     private int mIsoSensitivity;
 
+    private CamGLRenderer mRenderer;
+    private boolean mDeepPortraitMode = false;
+
     private class SelfieThread extends Thread {
         public void run() {
             try {
@@ -941,6 +951,12 @@ public class CaptureModule implements CameraModule, PhotoController,
         return value.equals("enable");
     }
 
+    public boolean isDeepPortraitMode() {
+        String value = mSettingsManager.getValue(SettingsManager.KEY_SCENE_MODE);
+        if (value == null) return  false;
+        return Integer.valueOf(value) == SettingsManager.SCENE_MODE_DEEPPORTRAIT_INT;
+    }
+
     private boolean isMpoOn() {
         String value = mSettingsManager.getValue(SettingsManager.KEY_MPO);
         if (value == null) return false;
@@ -974,6 +990,14 @@ public class CaptureModule implements CameraModule, PhotoController,
         return CameraProfile.getJpegEncodingQualityParameter(value);
     }
 
+    public CamGLRenderer getCamGLRender() {
+        return  mRenderer;
+    }
+
+    public GLCameraPreview getGLCameraPreview() {
+        return  mUI.getGLCameraPreview();
+    }
+
     public LocationManager getLocationManager() {
         return mLocationManager;
     }
@@ -1104,7 +1128,8 @@ public class CaptureModule implements CameraModule, PhotoController,
         }
     }
 
-    private void updatePreviewSurfaceReadyState(boolean rdy) {
+    private void
+    updatePreviewSurfaceReadyState(boolean rdy) {
         if (rdy != mSurfaceReady) {
             if (rdy) {
                 Log.i(TAG, "Preview Surface is ready!");
@@ -1201,20 +1226,26 @@ public class CaptureModule implements CameraModule, PhotoController,
                             Log.d(TAG, "cameracapturesession - onClosed");
                         }
                     };
-            waitForPreviewSurfaceReady();
-            Surface surface = getPreviewSurfaceForSession(id);
 
-            if(id == getMainCameraId()) {
-                mFrameProcessor.setOutputSurface(surface);
+            Surface surface = null;
+            if (!mDeepPortraitMode) {
+                waitForPreviewSurfaceReady();
+                surface = getPreviewSurfaceForSession(id);
+
+                if(id == getMainCameraId()) {
+                    mFrameProcessor.setOutputSurface(surface);
+                }
             }
 
             if(isClearSightOn()) {
-                mPreviewRequestBuilder[id].addTarget(surface);
-                list.add(surface);
+                if (surface != null) {
+                    mPreviewRequestBuilder[id].addTarget(surface);
+                    list.add(surface);
+                }
                 ClearSightImageProcessor.getInstance().createCaptureSession(
                         id == BAYER_ID, mCameraDevice[id], list, captureSessionCallback);
             } else if (id == getMainCameraId()) {
-                if(mFrameProcessor.isFrameFilterEnabled()) {
+                if(mFrameProcessor.isFrameFilterEnabled() && !mDeepPortraitMode) {
                     mActivity.runOnUiThread(new Runnable() {
                         public void run() {
                             mUI.getSurfaceHolder().setFixedSize(mPreviewSize.getHeight(), mPreviewSize.getWidth());
@@ -1246,8 +1277,10 @@ public class CaptureModule implements CameraModule, PhotoController,
                     mCameraDevice[id].createCaptureSession(list, captureSessionCallback, null);
                 }
             } else {
-                mPreviewRequestBuilder[id].addTarget(surface);
-                list.add(surface);
+                if (surface != null) {
+                    mPreviewRequestBuilder[id].addTarget(surface);
+                    list.add(surface);
+                }
                 list.add(mImageReader[id].getSurface());
                 // Here, we create a CameraCaptureSession for camera preview.
                 mCameraDevice[id].createCaptureSession(list, captureSessionCallback, null);
@@ -2556,8 +2589,9 @@ public class CaptureModule implements CameraModule, PhotoController,
             ClearSightImageProcessor.getInstance().close();
         }
         closeCamera();
-        resetAudioMute();
         mUI.showPreviewCover();
+        if (mUI.getGLCameraPreview() != null)
+            mUI.getGLCameraPreview().onPause();
         mUI.hideSurfaceView();
         mFirstPreviewLoaded = false;
         stopBackgroundThread();
@@ -2567,7 +2601,6 @@ public class CaptureModule implements CameraModule, PhotoController,
         closeVideoFileDescriptor();
     }
 
-    @Override
     public void onResumeBeforeSuper() {
         // must change cameraId before "mPaused = false;"
         int intentCameraId = CameraUtil.getCameraFacingIntentExtras(mActivity);
@@ -2605,6 +2638,11 @@ public class CaptureModule implements CameraModule, PhotoController,
     private ArrayList<Integer> getFrameProcFilterId() {
         ArrayList<Integer> filters = new ArrayList<Integer>();
 
+        if(mDeepPortraitMode) {
+            filters.add(FrameProcessor.FILTER_DEEP_PORTRAIT);
+            return filters;
+        }
+
         String scene = mSettingsManager.getValue(SettingsManager.KEY_MAKEUP);
         if(scene != null && !scene.equalsIgnoreCase("0")) {
             filters.add(FrameProcessor.FILTER_MAKEUP);
@@ -2612,7 +2650,6 @@ public class CaptureModule implements CameraModule, PhotoController,
         if(isTrackingFocusSettingOn()) {
             filters.add(FrameProcessor.LISTENER_TRACKING_FOCUS);
         }
-
         return filters;
     }
 
@@ -2687,7 +2724,9 @@ public class CaptureModule implements CameraModule, PhotoController,
         Log.d(TAG, "updatePreviewSize final preview size = " + width + ", " + height);
 
         mPreviewSize = new Size(width, height);
-        mUI.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+        if (!mDeepPortraitMode) {
+            mUI.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+        }
     }
 
     private void openProcessors() {
@@ -2725,11 +2764,11 @@ public class CaptureModule implements CameraModule, PhotoController,
                 Log.d(TAG, "Chosen postproc filter id : " + getPostProcFilterId(mode));
                 mPostProcessor.onOpen(getPostProcFilterId(mode), isFlashOn,
                         isTrackingFocusSettingOn(), isMakeupOn, isSelfieMirrorOn,
-                        mSaveRaw, mIsSupportedQcfa);
+                        mSaveRaw, mIsSupportedQcfa, mDeepPortraitMode);
             } else {
                 mPostProcessor.onOpen(PostProcessor.FILTER_NONE, isFlashOn,
                         isTrackingFocusSettingOn(), isMakeupOn, isSelfieMirrorOn,
-                        mSaveRaw, mIsSupportedQcfa);
+                        mSaveRaw, mIsSupportedQcfa, mDeepPortraitMode);
             }
         }
         if(mFrameProcessor != null) {
@@ -2751,6 +2790,7 @@ public class CaptureModule implements CameraModule, PhotoController,
     public void onResumeAfterSuper() {
         Log.d(TAG, "onResume " + getCameraMode());
         reinit();
+        mDeepPortraitMode = isDeepPortraitMode();
         initializeValues();
         updatePreviewSize();
         mCameraIdList = new ArrayList<>();
@@ -2786,7 +2826,15 @@ public class CaptureModule implements CameraModule, PhotoController,
             msg.arg1 = cameraId;
             mCameraHandler.sendMessage(msg);
         }
-        mUI.showSurfaceView();
+        if (!mDeepPortraitMode) {
+            mUI.showSurfaceView();
+            mUI.stopDeepPortraitMode();
+        } else {
+            mUI.startDeepPortraitMode(mPreviewSize);
+            if (mUI.getGLCameraPreview() != null)
+                mUI.getGLCameraPreview().onResume();
+        }
+
         if (!mFirstTimeInitialized) {
             initializeFirstTime();
         } else {
@@ -3324,6 +3372,15 @@ public class CaptureModule implements CameraModule, PhotoController,
         return mOrientation;
     }
 
+    public int getSensorOrientation() {
+        int degree = 0;
+        if(getMainCameraCharacteristics() != null) {
+            degree = getMainCameraCharacteristics().
+                    get(CameraCharacteristics.SENSOR_ORIENTATION);
+        }
+        return degree;
+    }
+
     @Override
     public void onShowSwitcherPopup() {
 
@@ -5825,6 +5882,39 @@ public class CaptureModule implements CameraModule, PhotoController,
     boolean checkSessionAndBuilder(CameraCaptureSession session, CaptureRequest.Builder builder) {
         return session != null && builder != null;
     }
+
+    public void onRenderComplete(DPImage dpimage, boolean isError) {
+        dpimage.mImage.close();
+        if(isError) {
+            getGLCameraPreview().requestRender();
+        }
+    }
+
+    public void onRenderSurfaceCreated() {
+        updatePreviewSurfaceReadyState(true);
+        mUI.initThumbnail();
+        if (getFrameFilters().size() == 0) {
+            Toast.makeText(mActivity, "DeepPortrait is not supported",
+                    Toast.LENGTH_LONG).show();
+            return;
+        }
+        mRenderer = getGLCameraPreview().getRendererInstance();
+        DeepPortraitFilter filter = (DeepPortraitFilter)getFrameFilters().get(0);
+        if (filter != null) {
+            if (filter.getDPInitialized()) {
+                int degree = getSensorOrientation();
+                int adjustedRotation = ( degree - getDisplayOrientation() + 360 ) % 360;
+                int surfaceRotation =
+                        90 * mActivity.getWindowManager().getDefaultDisplay().getRotation();
+                mRenderer.setMaskResolution(filter.getDpMaskWidth(),filter.getDpMaskHieght());
+                mRenderer.setRotationDegree(
+                        adjustedRotation, (degree - surfaceRotation + 360) % 360);
+            }
+        }
+    }
+    public void onRenderSurfaceDestroyed() {
+        mRenderer = null;
+    }
 }
 
 class Camera2GraphView extends View {
diff --git a/src/com/android/camera/CaptureUI.java b/src/com/android/camera/CaptureUI.java
index 950820fcce48afc059a8692a98b66ca351c35dd9..617d9ef71b90cce78794973122dce05bd8e51253 100755
--- a/src/com/android/camera/CaptureUI.java
+++ b/src/com/android/camera/CaptureUI.java
@@ -46,6 +46,7 @@ import android.renderscript.Type;
 import android.text.TextUtils;
 import android.util.DisplayMetrics;
 import android.util.Log;
+import android.util.Size;
 import android.view.Display;
 import android.view.Gravity;
 import android.view.LayoutInflater;
@@ -65,6 +66,8 @@ import android.widget.LinearLayout;
 import android.widget.RelativeLayout;
 import android.widget.SeekBar;
 import android.widget.TextView;
+
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.ui.AutoFitSurfaceView;
 import com.android.camera.ui.Camera2FaceView;
 import com.android.camera.ui.CameraControls;
@@ -82,7 +85,8 @@ import com.android.camera.ui.SelfieFlashView;
 import com.android.camera.ui.TrackingFocusRenderer;
 import com.android.camera.ui.ZoomRenderer;
 import com.android.camera.util.CameraUtil;
-
+import com.android.camera.deepportrait.CamGLRenderer;
+import com.android.camera.deepportrait.GLCameraPreview;
 import org.codeaurora.snapcam.R;
 
 import java.util.List;
@@ -110,6 +114,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
     private AutoFitSurfaceView mSurfaceViewMono;
     private SurfaceHolder mSurfaceHolder;
     private SurfaceHolder mSurfaceHolderMono;
+    private GLCameraPreview mGLSurfaceView = null;
     private int mOrientation;
     private int mFilterMenuStatus;
     private PreviewGestures mGestures;
@@ -188,7 +193,9 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
     private View mSceneModeSwitcher;
     private View mFrontBackSwitcher;
     private ImageView mMakeupButton;
+    private ImageView mDeepportraitSwitcher;
     private SeekBar mMakeupSeekBar;
+    private SeekBar mDeepportraitSeekBar;
     private View mMakeupSeekBarLayout;
     private View mSeekbarBody;
     private TextView mRecordingTimeView;
@@ -199,6 +206,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
     private ImageView mSeekbarToggleButton;
     private View mProModeCloseButton;
     private RotateLayout mSceneModeLabelRect;
+    private LinearLayout mSceneModeLabelView;
     private TextView mSceneModeName;
     private ImageView mExitBestMode;
     private RotateLayout mDeepZoomModeRect;
@@ -241,6 +249,12 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         }
     }
 
+    public void initThumbnail() {
+        if (mThumbnail == null)
+            mThumbnail = (ImageView) mRootView.findViewById(R.id.preview_thumb);
+        mActivity.updateThumbnail(mThumbnail);
+    }
+
     private void previewUIDestroyed() {
         mModule.onPreviewUIDestroyed();
     }
@@ -293,6 +307,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         mFrontBackSwitcher = mRootView.findViewById(R.id.front_back_switcher);
         mMakeupButton = (ImageView) mRootView.findViewById(R.id.ts_makeup_switcher);
         mMakeupSeekBarLayout = mRootView.findViewById(R.id.makeup_seekbar_layout);
+        mDeepportraitSwitcher = (ImageView) mRootView.findViewById(R.id.deepportrait_switcher);
         mSeekbarBody = mRootView.findViewById(R.id.seekbar_body);
         mSeekbarToggleButton = (ImageView) mRootView.findViewById(R.id.seekbar_toggle);
         mSeekbarToggleButton.setOnClickListener(new View.OnClickListener() {
@@ -323,6 +338,29 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
             public void onStopTrackingTouch(SeekBar seekBar) {
             }
         });
+        mDeepportraitSeekBar = (SeekBar)mRootView.findViewById(R.id.deepportrait_seekbar);
+        mDeepportraitSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
+            @Override
+            public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+                 if (mModule.getCamGLRender() != null) {
+                     module.getCamGLRender().setBlurLevel(progress);
+                 }
+            }
+
+            @Override
+            public void onStartTrackingTouch(SeekBar seekBar) {
+
+            }
+
+            @Override
+            public void onStopTrackingTouch(SeekBar seekBar) {
+                final SharedPreferences prefs =
+                        PreferenceManager.getDefaultSharedPreferences(mActivity);
+                SharedPreferences.Editor editor = prefs.edit();
+                editor.putInt(SettingsManager.KEY_DEEPPORTRAIT_VALUE, seekBar.getProgress());
+                editor.commit();
+            }
+        });
         mMakeupButton.setOnClickListener(new View.OnClickListener(){
             @Override
             public void onClick(View v) {
@@ -333,6 +371,26 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
             }
         });
         setMakeupButtonIcon();
+
+        mDeepportraitSwitcher.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                if (module != null && !module.isAllSessionClosed()) {
+                    String value = mSettingsManager.getValue(SettingsManager.KEY_SCENE_MODE);
+                    if(value == null ||
+                            Integer.valueOf(value) != SettingsManager.SCENE_MODE_DEEPPORTRAIT_INT) {
+                        mSettingsManager.setValue(SettingsManager.KEY_SCENE_MODE,""+
+                                SettingsManager.SCENE_MODE_DEEPPORTRAIT_INT);
+                    } else {
+                        mSettingsManager.setValue(SettingsManager.KEY_SCENE_MODE,
+                                ""+SettingsManager.SCENE_MODE_AUTO_INT);
+                    }
+                }
+                setDeepportraitButtonIcon();
+            }
+        });
+        setDeepportraitButtonIcon();
+
         mFlashButton = (FlashToggleButton) mRootView.findViewById(R.id.flash_button);
         mProModeCloseButton = mRootView.findViewById(R.id.promode_close_button);
         mProModeCloseButton.setOnClickListener(new View.OnClickListener() {
@@ -584,6 +642,20 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         return mDeepZoomValue;
     }
 
+    private void setDeepportraitButtonIcon() {
+        boolean enable = DeepPortraitFilter.isSupportedStatic();
+        mDeepportraitSwitcher.setEnabled(enable);
+        mActivity.runOnUiThread(new Runnable() {
+            public void run() {
+                if(mModule.isDeepPortraitMode()) {
+                    mDeepportraitSwitcher.setImageResource(R.drawable.deep_portrait_on);
+                } else {
+                    mDeepportraitSwitcher.setImageResource(R.drawable.deep_portrait);
+                }
+            }
+        });
+    }
+
     public void onCameraOpened(List<Integer> cameraIds) {
         mGestures.setCaptureUI(this);
         if (mModule.isDeepZoom()) {
@@ -599,6 +671,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         initFilterModeButton();
         initFlashButton();
         setMakeupButtonIcon();
+        setDeepportraitButtonIcon();
         showSceneModeLabel();
         updateMenus();
         if(mModule.isTrackingFocusSettingOn()) {
@@ -912,7 +985,12 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         mIsSceneModeLabelClose = false;
         int index = mSettingsManager.getValueIndex(SettingsManager.KEY_SCENE_MODE);
         CharSequence sceneModeNameArray[] = mSettingsManager.getEntries(SettingsManager.KEY_SCENE_MODE);
-        if ( index > 0 && index < sceneModeNameArray.length ) {
+        if (mModule.isDeepPortraitMode()) {
+            mSceneModeLabelRect.setVisibility(View.GONE);
+            mExitBestMode.setVisibility(View.GONE);
+            return;
+        }
+        if ( index > 0 && index < sceneModeNameArray.length) {
             mSceneModeName.setText(sceneModeNameArray[index]);
             mSceneModeLabelRect.setVisibility(View.VISIBLE);
             mExitBestMode.setVisibility(View.VISIBLE);
@@ -947,6 +1025,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         if(value != null && value.equals("0")) {
             mMakeupButton.setVisibility(View.INVISIBLE);
         }
+        mDeepportraitSwitcher.setVisibility(View.INVISIBLE);
         mIsVideoUI = true;
         mPauseButton.setVisibility(View.VISIBLE);
     }
@@ -1182,6 +1261,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         if (mFilterModeSwitcher != null) mFilterModeSwitcher.setVisibility(status);
         if (mFilterModeSwitcher != null) mFilterModeSwitcher.setVisibility(status);
         if (mMakeupButton != null) mMakeupButton.setVisibility(status);
+        if (mDeepportraitSwitcher != null) mDeepportraitSwitcher.setVisibility(status);
     }
 
     public void initializeControlByIntent() {
@@ -1227,6 +1307,38 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         mActivity.setSystemBarsVisibility(false);
     }
 
+    public void startDeepPortraitMode(Size preview) {
+        mSurfaceView.setVisibility(View.GONE);
+        mSurfaceViewMono.setVisibility(View.GONE);
+        mGLSurfaceView = new GLCameraPreview(
+                    mActivity, preview.getWidth(), preview.getHeight(), mModule);
+        FrameLayout layout = (FrameLayout) mActivity.findViewById(R.id.camera_glpreview);
+        layout.addView(mGLSurfaceView);
+        mGLSurfaceView.setVisibility(View.VISIBLE);
+        mRootView.requestLayout();
+        final SharedPreferences prefs =
+                PreferenceManager.getDefaultSharedPreferences(mActivity);
+        int progress = prefs.getInt(SettingsManager.KEY_DEEPPORTRAIT_VALUE,50);
+        mDeepportraitSeekBar.setProgress(progress);
+        mDeepportraitSeekBar.setVisibility(View.VISIBLE);
+        mRenderOverlay.setVisibility(View.GONE);
+    }
+
+    public void stopDeepPortraitMode() {
+        FrameLayout layout = (FrameLayout)mActivity.findViewById(R.id.camera_glpreview);
+        if (mGLSurfaceView != null) {
+            mGLSurfaceView.setVisibility(View.GONE);
+            layout.removeView(mGLSurfaceView);
+        }
+        mGLSurfaceView = null;
+        mDeepportraitSeekBar.setVisibility(View.GONE);
+        mRenderOverlay.setVisibility(View.VISIBLE);
+    }
+
+    public GLCameraPreview getGLCameraPreview() {
+        return  mGLSurfaceView;
+    }
+
     public void updateMenus() {
         boolean enableMakeupMenu = true;
         boolean enableFilterMenu = true;
@@ -1652,7 +1764,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         if (mGestures != null) {
             mGestures.setEnabled(previewFocused);
         }
-        if (mRenderOverlay != null) {
+        if (mRenderOverlay != null && !mModule.isDeepPortraitMode()) {
             // this can not happen in capture mode
             mRenderOverlay.setVisibility(previewFocused ? View.VISIBLE : View.GONE);
         }
diff --git a/src/com/android/camera/SettingsManager.java b/src/com/android/camera/SettingsManager.java
index 37376715d33f0a9e7821bd436a5a7365e17b8711..d17df06cbff3fdb174abdc861dd1991e958c6c56 100755
--- a/src/com/android/camera/SettingsManager.java
+++ b/src/com/android/camera/SettingsManager.java
@@ -52,6 +52,7 @@ import com.android.camera.imageprocessor.filter.BeautificationFilter;
 import com.android.camera.imageprocessor.filter.BestpictureFilter;
 import com.android.camera.imageprocessor.filter.BlurbusterFilter;
 import com.android.camera.imageprocessor.filter.ChromaflashFilter;
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.imageprocessor.filter.OptizoomFilter;
 import com.android.camera.imageprocessor.filter.SharpshooterFilter;
 import com.android.camera.imageprocessor.filter.StillmoreFilter;
@@ -100,6 +101,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
     public static final int SCENE_MODE_TRACKINGFOCUS_INT = SCENE_MODE_CUSTOM_START + 8;
     public static final int SCENE_MODE_PROMODE_INT = SCENE_MODE_CUSTOM_START + 9;
     public static final int SCENE_MODE_DEEPZOOM_INT = SCENE_MODE_CUSTOM_START + 10;
+	public static final int SCENE_MODE_DEEPPORTRAIT_INT = SCENE_MODE_CUSTOM_START + 11;
     public static final String SCENE_MODE_DUAL_STRING = "100";
     public static final String KEY_CAMERA_SAVEPATH = "pref_camera2_savepath_key";
     public static final String KEY_RECORD_LOCATION = "pref_camera2_recordlocation_key";
@@ -160,6 +162,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
     public static final String KEY_QCFA = "pref_camera2_qcfa_key";
     public static final String KEY_EIS_VALUE = "pref_camera2_eis_key";
     public static final String KEY_FOVC_VALUE = "pref_camera2_fovc_key";
+    public static final String KEY_DEEPPORTRAIT_VALUE = "pref_camera2_deepportrait_key";
 
     public static final HashMap<String, Integer> KEY_ISO_INDEX = new HashMap<String, Integer>();
     public static final String KEY_BSGC_DETECTION = "pref_camera2_bsgc_key";
@@ -1229,12 +1232,20 @@ public class SettingsManager implements ListMenu.SettingsListener {
         Size[] sizes = map.getOutputSizes(ImageFormat.JPEG);
         List<String> res = new ArrayList<>();
 
+        boolean isDeepportrait = getDeepportraitEnabled();
+
         if (getQcfaPrefEnabled() && getIsSupportedQcfa(cameraId)) {
             res.add(getSupportedQcfaDimension(cameraId));
         }
 
         if (sizes != null) {
             for (int i = 0; i < sizes.length; i++) {
+                if (isDeepportrait &&
+                        (Math.min(sizes[i].getWidth(),sizes[i].getHeight()) < 720 ||
+                        Math.max(sizes[i].getWidth(),sizes[i].getHeight()) <= 1024)) {
+                    //some reslutions are not supported in deepportrait
+                    continue;
+                }
                 res.add(sizes[i].toString());
             }
         }
@@ -1352,7 +1363,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
         if (BlurbusterFilter.isSupportedStatic()) modes.add(SCENE_MODE_BLURBUSTER_INT + "");
         if (SharpshooterFilter.isSupportedStatic()) modes.add(SCENE_MODE_SHARPSHOOTER_INT + "");
         if (TrackingFocusFrameListener.isSupportedStatic()) modes.add(SCENE_MODE_TRACKINGFOCUS_INT + "");
-        if (DeepZoomFilter.isSupportedStatic()) modes.add(SCENE_MODE_DEEPZOOM_INT + "");
+        if (DeepPortraitFilter.isSupportedStatic()) modes.add(SCENE_MODE_DEEPPORTRAIT_INT+"");
         modes.add("" + SCENE_MODE_PROMODE_INT);
         for (int mode : sceneModes) {
             modes.add("" + mode);
@@ -1523,14 +1534,21 @@ public class SettingsManager implements ListMenu.SettingsListener {
     }
 
     public boolean getQcfaPrefEnabled() {
-        ListPreference qcfaPref = mPreferenceGroup.findPreference(KEY_QCFA);
-        String qcfa = qcfaPref.getValue();
+        String qcfa = getValue(KEY_QCFA);
         if(qcfa != null && qcfa.equals("enable")) {
             return true;
         }
         return false;
     }
 
+    public boolean getDeepportraitEnabled() {
+        String dp = getValue(KEY_SCENE_MODE);
+        if( dp!= null && Integer.valueOf(dp) == SCENE_MODE_DEEPPORTRAIT_INT) {
+            return true;
+        }
+        return false;
+    }
+
     public boolean getIsSupportedQcfa (int cameraId) {
         byte isSupportQcfa = 0;
         try {
diff --git a/src/com/android/camera/deepportrait/CamGLRenderObserver.java b/src/com/android/camera/deepportrait/CamGLRenderObserver.java
new file mode 100755
index 0000000000000000000000000000000000000000..53faa4543912984d9be4445bfa1cb09e7a69e875
--- /dev/null
+++ b/src/com/android/camera/deepportrait/CamGLRenderObserver.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package com.android.camera.deepportrait;
+
+// Wrapper for native library
+
+public interface CamGLRenderObserver
+{
+    public void onRenderComplete(DPImage dpimage, boolean isError);
+    public void onRenderSurfaceCreated();
+    public void onRenderSurfaceDestroyed();
+}
diff --git a/src/com/android/camera/deepportrait/CamGLRenderer.java b/src/com/android/camera/deepportrait/CamGLRenderer.java
new file mode 100755
index 0000000000000000000000000000000000000000..ef8e3b1f4c16849e243664a7dc7e234e21268c6a
--- /dev/null
+++ b/src/com/android/camera/deepportrait/CamGLRenderer.java
@@ -0,0 +1,695 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package com.android.camera.deepportrait;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+import java.nio.ShortBuffer;
+import java.util.Vector;
+import java.lang.Thread;
+import android.media.Image;
+import android.media.Image.Plane;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.content.Context;
+import android.opengl.GLES30;
+import android.opengl.GLSurfaceView;
+import android.opengl.GLSurfaceView.Renderer;
+import android.opengl.Matrix;
+import android.util.Log;
+
+
+public class CamGLRenderer implements Renderer
+{
+    // MVP
+    private final float[] mtrxProjection        = new float[16];
+    private final float[] mtrxView              = new float[16];
+    private final float[] mtrxProjectionAndView = new float[16];
+    // Vertex shader points
+
+    public FloatBuffer mSquareVertices;
+    public FloatBuffer[] mSquareTextureCoordinates = new FloatBuffer[4]; // 4 positions
+    // synchronized vector
+    private Vector<DPImage> mFrameQueue;
+
+    private final boolean SHOW_LOGS = false;
+
+    /** Program handles */
+    private int mConvertProgramHandle;
+    private int mBlurProgramHandle;
+    private int mProgramHandle;
+    private Boolean mActive = false;
+
+    // Effects
+    Boolean mBokehEffect = true;
+
+    // Our screenresolution
+    float mScreenWidth = 0;
+    float mScreenHeight = 0;
+
+    // Our screenresolution
+    int mScreenROIX      = 0;
+    int mScreenROIY      = 0;
+    int mScreenROIWidth  = 0;
+    int mScreenROIHeight = 0;
+
+    //Display image resolution
+    int mFrameWidth = 0;
+    int mFrameHeight = 0;
+
+    // Misc
+    Context mContext;
+    long mLastTime;
+    int mProgram;
+    private CamRenderTexture mCamTexture;
+
+    private ByteBuffer scratchRGB;
+    private CamGLRenderObserver mObserver;
+
+    private final int NUM_PROGRAMS = 3;
+    private int[] mVerticesHandle    = new int[NUM_PROGRAMS];
+    private int[] mTexCoordLocHandle = new int[NUM_PROGRAMS];
+    private int[] mMVPMtrxhandle     = new int[NUM_PROGRAMS];
+    private int mRotMtrxhandle;
+    private int mSurfaceRotMtrxhandle;
+    private int mFlipMtrxhandle;
+    private int mInYHandle;
+    private int mInCHandle;
+    private int mPositionConv;
+    private int[] mInRGBHandle = new int[8];
+    private int mForegroundRGBHandle;
+    private int mBackGroundRGBHandle;
+    private int mMaskHandle;
+    private int mXPixelOffsetUniform;
+    private int mYPixelOffsetUniform;
+    private int mMipLevelUniform;
+    private int mBlurLevel = 50;
+    private int mRotationDegree = 90;
+    private int mMaskWidth = 0;
+    private int mMaskHeight = 0;
+    private boolean mTexturesCreated = false;
+    private static final String TAG = "<dp><app><CamGLRenderer>";
+    private long prevTime, currentTime;
+    private long minFrameDelta = 33;
+    private int mFrameRotation = 0;
+
+    private final CamRenderTexture.BlurType blurType = CamRenderTexture.BlurType.BlurTypeGaussianDilated;
+
+    private final boolean ROTATE_MASK = false;
+
+    private final float[] flipNone = new float[] { 1.0f, 0.0f, 1.0f, 0.0f }; // x, 1-x, y, 1-y
+    private final float[] flipX    = new float[] { 0.0f, 1.0f, 1.0f, 0.0f }; // x, 1-x, y, 1-y
+    private final float[] flipY    = new float[] { 1.0f, 0.0f, 0.0f, 1.0f }; // x, 1-x, y, 1-y
+    private final float[] flipXY   = new float[] { 0.0f, 1.0f, 0.0f, 1.0f }; // x, 1-x, y, 1-y
+
+    // clockwise rotations. All in column major
+    private final float[] rotNone  = new float[] {  1.0f,  0.0f, 0.0f,  0.0f,  1.0f, 0.0f, 0.0f, 0.0f, 1.0f };
+    // rotmatrix of 90 + move to 1st quadrant
+    private final float[] rot90    = new float[] {  0.0f, -1.0f, 1.0f,  1.0f,  0.0f, 0.0f, 0.0f, 0.0f, 1.0f }; // 1-y, x
+    // rotmatrix of 180 + move to 1st quadrant
+    private final float[] rot180   = new float[] { -1.0f,  0.0f, 1.0f,  0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f };
+    // rotmatrix of 270 + move to 1st quadrant
+    private final float[] rot270   = new float[] {  0.0f,  1.0f, 0.0f, -1.0f,  0.0f, 1.0f, 0.0f, 0.0f, 1.0f }; // y, 1-x
+
+    private float[] mRotationMatrix        = new float[9];
+    private float[] mSurfaceRotationMatrix = new float[9];
+    private GLSurfaceView mSurfaceView;
+
+    public void sendFrame( DPImage dpimage )
+    {
+        if ( !mActive ) return;
+        synchronized ( mFrameQueue ) {
+            if ( mFrameQueue.size() > 3 ) {
+                DPImage oldImage = mFrameQueue.get( 0 );
+                mFrameQueue.removeElementAt( 0 );
+                mObserver.onRenderComplete( oldImage, true );
+            }
+            mFrameQueue.add( dpimage );
+        }
+    }
+
+    public void setBlurLevel( int level )
+    {
+        mBlurLevel = level;
+        Log.e( TAG, "Blur Level " + mBlurLevel );
+    }
+
+    public void setRotationDegree( int camRotation, int frameRotation )
+    {
+        System.arraycopy( getRotationMatrix3x3( frameRotation ), 0, mSurfaceRotationMatrix, 0, 9 );
+
+        mFrameRotation = frameRotation;
+        mRotationDegree = ( camRotation + frameRotation ) % 360 ;
+        System.arraycopy( getRotationMatrix3x3( mRotationDegree ), 0, mRotationMatrix, 0, 9 );
+        switch ( camRotation ) {
+        case  90:
+            // transpose applied. apply H flip for 90 degree rotation - 1st column
+            mRotationMatrix[0] *= -1;
+            mRotationMatrix[1] *= -1;
+            mRotationMatrix[2] = mRotationMatrix[2] > 0.0f ? 0.0f : 1.0f;
+            break;
+        case 180:
+            // V flip applied. apply H flip for 180 degree rotation.
+            mRotationMatrix[0] *= -1;
+            mRotationMatrix[1] *= -1;
+            mRotationMatrix[2] = mRotationMatrix[2] > 0.0f ? 0.0f : 1.0f;
+            break;
+        case 270:
+            // transpose + H flip applied. correct rotation. No op
+            break;
+        }
+        Log.e( TAG, "setRotationDegree cam " + camRotation + " adjusted " + mRotationDegree +
+               " frame " + frameRotation );
+    }
+
+    public void prepareRotationMatrix( int camRotation )
+    {
+        mRotationDegree = mFrameRotation;
+        System.arraycopy( getRotationMatrix3x3( mRotationDegree ), 0, mRotationMatrix, 0, 9 );
+        if ( ROTATE_MASK ) {
+            switch ( camRotation ) {
+            case  90:
+                // H flip applied. apply V flip for 90 degree rotation - 1st column
+                mRotationMatrix[0] *= -1;
+                mRotationMatrix[1] *= -1;
+                mRotationMatrix[2] = mRotationMatrix[2] > 0.0f ? 0.0f : 1.0f;
+                break;
+            case 180:
+                // V flip applied. apply V flip for 180 degree rotation.
+                mRotationMatrix[3] *= -1;
+                mRotationMatrix[4] *= -1;
+                mRotationMatrix[5] = mRotationMatrix[5] > 0.0f ? 0.0f : 1.0f;
+                break;
+            }
+        }
+        Log.e( TAG, "setRotationDegree per frame single cam " + camRotation + " adjusted " + mRotationDegree +
+               " frame " + mFrameRotation );
+    }
+
+    public void setMaskResolution( int width, int height )
+    {
+        mMaskWidth  = width;
+        mMaskHeight = height;
+        Log.e( TAG, "setMaskResolution width " + width + " height " + height );
+    }
+
+    public float[] getRotationMatrix( int rotationDegree )
+    {
+        float[] rotMat   = new float[4];
+        float cosTheta = (float)Math.cos( Math.toRadians( rotationDegree ) );
+        float sinTheta = (float)Math.sin( Math.toRadians( rotationDegree ) );
+        rotMat[0] = cosTheta;
+        rotMat[1] = -sinTheta;
+        rotMat[2] = sinTheta;
+        rotMat[3] = cosTheta;
+        return rotMat;
+    }
+
+    public float[] getRotationMatrix3x3( int rotationDegree )
+    {
+        switch ( rotationDegree ) {
+            case  90: return rot90;
+            case 180: return rot180;
+            case 270: return rot270;
+        }
+        return rotNone;
+    }
+
+    public float[] getFlipMatrix( int rotationDegree )
+    {
+        switch ( rotationDegree ) {
+        case  90: return flipX;
+        case 180: return flipY;
+        case 270: return flipXY;
+        }
+        return flipNone;
+    }
+
+    public CamGLRenderer( Context c, int textureWidth, int textureHeight,
+                          CamGLRenderObserver observer, GLSurfaceView surfaceView )
+    {
+        mObserver = observer;
+        mContext = c;
+        mCamTexture = new CamRenderTexture();
+        mFrameQueue = new Vector<DPImage>(5);
+        mSurfaceView = surfaceView;
+
+        // Create our UV coordinates.
+        float[] squareTextureCoordinateData = new float[] {
+            0.0f, 0.0f,
+            1.0f, 0.0f,
+            0.0f, 1.0f,
+            1.0f, 1.0f
+        };
+        float[] squareTextureCoordinateDataHFlip = new float[] {
+            1.0f, 0.0f,
+            0.0f, 0.0f,
+            1.0f, 1.0f,
+            0.0f, 1.0f
+        };
+        float[] squareTextureCoordinateDataVFlip = new float[] {
+            0.0f, 1.0f,
+            1.0f, 1.0f,
+            0.0f, 0.0f,
+            1.0f, 0.0f
+        };
+        float[] squareTextureCoordinateDataHVFlip = new float[] {
+            1.0f, 1.0f,
+            0.0f, 1.0f,
+            1.0f, 0.0f,
+            0.0f, 0.0f
+        };
+        // We have to create the vertices of our triangle.
+        float[] squareVerticesData = new float[] {
+            -1.0f, -1.0f,
+             1.0f, -1.0f,
+            -1.0f,  1.0f,
+             1.0f,  1.0f,
+        };
+
+        // The texture buffer
+        for ( int i = 0; i < 4; ++i ) {
+            mSquareTextureCoordinates[i] = ByteBuffer.allocateDirect(
+                                               squareTextureCoordinateData.length * 4 ).order(
+                                               ByteOrder.nativeOrder() ).asFloatBuffer();
+        }
+        mSquareTextureCoordinates[0].put( squareTextureCoordinateData ).position( 0 );
+        mSquareTextureCoordinates[1].put( squareTextureCoordinateDataHFlip ).position( 0 );
+        mSquareTextureCoordinates[2].put( squareTextureCoordinateDataVFlip ).position( 0 );
+        mSquareTextureCoordinates[3].put( squareTextureCoordinateDataHVFlip ).position( 0 );
+
+        // The vertex buffer.
+        mSquareVertices = ByteBuffer.allocateDirect( squareVerticesData.length * 4 ).order(
+                          ByteOrder.nativeOrder() ).asFloatBuffer();
+        mSquareVertices.put( squareVerticesData ).position(0);
+
+        // initialize bytebuffer for the draw list
+        // short[] drawIndicesData = new short[] {0, 1, 2, 0, 2, 3}; // The order of vertex rendering.
+        // mSquareDrawIndices = ByteBuffer.allocateDirect( drawIndicesData.length * 2).order(
+        //                                                 ByteOrder.nativeOrder() ).asShortBuffer();
+        // mSquareDrawIndices.put( drawIndicesData ).position(0);
+
+        mFrameHeight = textureHeight;
+        mFrameWidth  = textureWidth;
+        // mRotationMatrix = getRotationMatrix( 90 );
+        mTexturesCreated = false;
+        prevTime = System.currentTimeMillis();
+    }
+
+    public void onPause()
+    {
+        mActive = false;
+    }
+
+    public void onResume()
+    {
+        mActive = true;
+    }
+
+    public void open()
+    {
+    }
+
+    public void close()
+    {
+        mFrameHeight   = 0;
+        mFrameWidth    = 0;
+        mCamTexture.deleteTextures();
+        mCamTexture = null;
+    }
+
+    @Override
+    public void onSurfaceCreated( GL10 gl, EGLConfig config )
+    {
+        Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
+        // Set the clear color to black
+        GLES30.glClearColor( 0.0f, 0.0f, 0.0f, 1 );
+
+        // Set the camera position (View matrix)
+        Matrix.setLookAtM( mtrxView, 0, 0f, 0f, 1f, 0f, 0f, 0f, 0f, 1.0f, 0.0f );
+
+        int convertVertexShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_VERTEX_SHADER, getShaderByName("convVertexShaderSource"));
+        int normalVertexShaderHandle = CamRenderShader.compileShader(
+                GLES30.GL_VERTEX_SHADER, getShaderByName("norVertexShaderSource"));
+        int vertexShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_VERTEX_SHADER, getShaderByName("vertexShaderSource"));
+        int convertShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_FRAGMENT_SHADER, getShaderByName("convertShaderSource"));
+        int blurShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_FRAGMENT_SHADER, getShaderByName("blurShaderRGBSource"));
+        int fragmentShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_FRAGMENT_SHADER, getShaderByName("blendFragShaderRGBSource"));
+
+        //----------------  Convert shader program -----------------------------------------------------
+        mConvertProgramHandle = CamRenderShader.createAndLinkProgram( convertVertexShaderHandle, convertShaderHandle );
+        mVerticesHandle[0]    = GLES30.glGetAttribLocation(  mConvertProgramHandle, "vPosition"   );
+        mTexCoordLocHandle[0] = GLES30.glGetAttribLocation(  mConvertProgramHandle, "a_texCoord"  );
+        mMVPMtrxhandle[0]     = GLES30.glGetUniformLocation( mConvertProgramHandle, "uMVPMatrix"  );
+        mPositionConv         = GLES30.glGetUniformLocation( mConvertProgramHandle, "positionConv"  );
+        mInYHandle            = GLES30.glGetUniformLocation( mConvertProgramHandle, "y_texture"   );
+        mInCHandle            = GLES30.glGetUniformLocation( mConvertProgramHandle, "uv_texture"  );
+        //----------------------------------------------------------------------------------------------
+
+        //----------------  Blur + Blend shader program --------------------------------------------------------
+        // mProgramHandle        = CamRenderShader.createAndLinkProgram( vertexShaderHandle, fragmentShaderHandle );
+        // mVerticesHandle[1]    = GLES30.glGetAttribLocation(  mProgramHandle, "vPosition"        );
+        // mTexCoordLocHandle[1] = GLES30.glGetAttribLocation(  mProgramHandle, "a_texCoord"       );
+        // mMVPMtrxhandle[1]     = GLES30.glGetUniformLocation( mProgramHandle, "uMVPMatrix"       );
+        // mInRGBHandle          = GLES30.glGetUniformLocation( mProgramHandle, "rgb_texture"      );
+        // mBackGroundRGBHandle  = GLES30.glGetUniformLocation( mProgramHandle, "bg_rgb_texture"   );
+        // mMaskHandle           = GLES30.glGetUniformLocation( mProgramHandle, "mask_texture"     );
+        // mXPixelOffsetUniform  = GLES30.glGetUniformLocation( mProgramHandle, "xPixelBaseOffset" );
+        // mYPixelOffsetUniform  = GLES30.glGetUniformLocation( mProgramHandle, "yPixelBaseOffset" );
+        // mMipLevelUniform      = GLES30.glGetUniformLocation( mProgramHandle, "mipLevel"         );
+        //----------------------------------------------------------------------------------------------
+
+        //----------------  Blur shader program --------------------------------------------------------
+        mBlurProgramHandle    = CamRenderShader.createAndLinkProgram( normalVertexShaderHandle, blurShaderHandle );
+        mVerticesHandle[2]    = GLES30.glGetAttribLocation(  mBlurProgramHandle, "vPosition"        );
+        mTexCoordLocHandle[2] = GLES30.glGetAttribLocation(  mBlurProgramHandle, "a_texCoord"       );
+        mMVPMtrxhandle[2]     = GLES30.glGetUniformLocation( mBlurProgramHandle, "uMVPMatrix"       );
+        for ( int i = 0; i < 8; ++i ) {
+            mInRGBHandle[i] = GLES30.glGetUniformLocation( mBlurProgramHandle, "rgb_texture");
+        }
+        mXPixelOffsetUniform  = GLES30.glGetUniformLocation( mBlurProgramHandle, "xPixelBaseOffset" );
+        mYPixelOffsetUniform  = GLES30.glGetUniformLocation( mBlurProgramHandle, "yPixelBaseOffset" );
+        mMipLevelUniform      = GLES30.glGetUniformLocation( mBlurProgramHandle, "mipLevel"         );
+        //----------------------------------------------------------------------------------------------
+
+        //----------------  Blend shader program --------------------------------------------------------
+        mProgramHandle        = CamRenderShader.createAndLinkProgram( vertexShaderHandle, fragmentShaderHandle );
+        mVerticesHandle[1]    = GLES30.glGetAttribLocation(  mProgramHandle, "vPosition"      );
+        mTexCoordLocHandle[1] = GLES30.glGetAttribLocation(  mProgramHandle, "a_texCoord"     );
+        mMVPMtrxhandle[1]     = GLES30.glGetUniformLocation( mProgramHandle, "uMVPMatrix"     );
+        mForegroundRGBHandle  = GLES30.glGetUniformLocation( mProgramHandle, "rgb_texture"    );
+        mBackGroundRGBHandle  = GLES30.glGetUniformLocation( mProgramHandle, "bg_rgb_texture" );
+        mMaskHandle           = GLES30.glGetUniformLocation( mProgramHandle, "mask_texture"   );
+        mRotMtrxhandle        = GLES30.glGetUniformLocation( mProgramHandle, "rotMat"         );
+        mSurfaceRotMtrxhandle = GLES30.glGetUniformLocation( mProgramHandle, "surfaceRotMat"  );
+        mFlipMtrxhandle       = GLES30.glGetUniformLocation( mProgramHandle, "flipMat"        );
+        //----------------------------------------------------------------------------------------------
+
+        mActive = true;
+    }
+
+    @Override
+    public void onSurfaceChanged( GL10 gl, int width, int height )
+    {
+
+        // We need to know the current width and height.
+        mScreenWidth = width;
+        mScreenHeight = height;
+        float aspectRatio = (float)mFrameWidth/mFrameHeight;
+        float screenAspectRatio = (float)mScreenWidth/mScreenHeight;
+        Log.d(TAG,"onSurfaceChanged aspectRatio="+aspectRatio+" screenAspectRatio="+screenAspectRatio+" w="+width+" h="+height);
+
+        if ( screenAspectRatio > aspectRatio ) {
+            mScreenROIWidth  = (int)Math.min( mScreenWidth,  mScreenWidth * aspectRatio / screenAspectRatio );
+            mScreenROIHeight = (int)mScreenHeight;
+        } else {
+            mScreenROIWidth = (int) mScreenWidth;
+            mScreenROIHeight = (int) Math.min( mScreenHeight,  mScreenWidth * aspectRatio);
+        }
+        mScreenROIX = (  (int)mScreenWidth -  mScreenROIWidth )/2;
+        mScreenROIY = ( (int)mScreenHeight - mScreenROIHeight )/2;
+
+        // Clear our matrices
+        for ( int i = 0; i < 16; i++ ) {
+            mtrxProjection[i]        = 0.0f;
+            mtrxProjectionAndView[i] = 0.0f;
+        }
+
+        Log.e( TAG, "onSurfaceChanged Frame_dim " + mFrameWidth + " x " + mFrameHeight +
+                    " ROI ( " + mScreenROIX + " " + mScreenROIY +
+                    "  " + mScreenROIWidth + " " + mScreenROIHeight + ")" );
+        // Setup our screen width and height for normal sprite translation.
+        Matrix.orthoM( mtrxProjection, 0, -aspectRatio, aspectRatio, -1, 1, 0, 50 );
+
+        // Calculate the projection and view transformation
+        Matrix.multiplyMM( mtrxProjectionAndView, 0, mtrxProjection, 0, mtrxView, 0 );
+    }
+
+    public void executeConverter( ByteBuffer bufferY, ByteBuffer bufferC, boolean offline )
+    {
+        // clear Screen and Depth Buffer, we have set the clear color as black.
+        GLES30.glClear( GLES30.GL_COLOR_BUFFER_BIT );
+
+        if ( offline ) {
+            GLES30.glViewport( 0, 0, ( int )mFrameWidth, ( int )mFrameHeight );
+            GLES30.glBindFramebuffer( GLES30.GL_FRAMEBUFFER, mCamTexture.getInRGBFBO( 0 ) );
+            GLES30.glFramebufferTexture2D( GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0,
+                                           GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( 0 ), 0 );
+        } else {
+            GLES30.glViewport( 0, 0, ( int )mScreenWidth, ( int )mScreenHeight );
+        }
+
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE0 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInYTex() );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth, mFrameHeight,
+                                GLES30.GL_LUMINANCE, GLES30.GL_UNSIGNED_BYTE, bufferY );
+
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE1 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInCTex() );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth/2, mFrameHeight/2,
+                                GLES30.GL_LUMINANCE_ALPHA, GLES30.GL_UNSIGNED_BYTE, bufferC );
+
+        GLES30.glUseProgram( mConvertProgramHandle );
+        if (offline) {
+            GLES30.glUniform1i(mPositionConv,0);
+        } else {
+            GLES30.glUniform1i(mPositionConv,1);
+        }
+        GLES30.glUniform1i ( mInYHandle, 0 );
+        GLES30.glUniform1i ( mInCHandle, 1 );
+        GLES30.glVertexAttribPointer( mVerticesHandle[0], 2, GLES30.GL_FLOAT, false, 0, mSquareVertices );
+        GLES30.glVertexAttribPointer ( mTexCoordLocHandle[0], 2, GLES30.GL_FLOAT, false, 0, mSquareTextureCoordinates[0] );
+        GLES30.glUniformMatrix4fv( mMVPMtrxhandle[0], 1, false, mtrxProjectionAndView, 0);
+        GLES30.glEnableVertexAttribArray( mVerticesHandle[0] );
+        GLES30.glEnableVertexAttribArray ( mTexCoordLocHandle[0] );
+
+        //GLES30.glDrawElements( GLES30.GL_TRIANGLES, 6, GLES30.GL_UNSIGNED_SHORT, mSquareDrawIndices );
+        GLES30.glDrawArrays( GLES30.GL_TRIANGLE_STRIP, 0, 4 );
+
+        if ( offline ) {
+            int status = GLES30.glCheckFramebufferStatus( GLES30.GL_FRAMEBUFFER );
+            if ( status == GLES30.GL_FRAMEBUFFER_COMPLETE ) {
+            /// Debug
+            ///    GLES30.glReadPixels( 0, 0, mFrameWidth, mFrameHeight, GLES30.GL_RGB, GLES30.GL_UNSIGNED_BYTE, scratchRGB );
+            ///    Log.e( TAG, "RGB Buffer " + scratchRGB.get(1000) + " " + scratchRGB.get(1001) +
+            ///           "handles "  + mCamTexture.getInRGBFBO() + " " + mCamTexture.getInRGBTex() );
+            } else {
+                Log.e( TAG, "FBO status " + status + "error " + GLES30.glGetError() );
+            }
+        }
+
+        // Disable vertex array
+        GLES30.glDisableVertexAttribArray( mVerticesHandle[0] );
+        GLES30.glDisableVertexAttribArray( mTexCoordLocHandle[0] );
+        // Reset FBO
+        GLES30.glBindFramebuffer( GLES30.GL_FRAMEBUFFER, 0 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, 0 );
+    }
+
+    public void executeBlur( int level )
+    {
+        int viewPortScaleFactor = 1;
+        int texScaleFactor = 1;
+        float blurScaleFactor = 1.0f; // 2x->.5
+
+        switch ( blurType )
+        {
+        case BlurTypeGaussianPyramid:
+            viewPortScaleFactor = level + 1;
+            texScaleFactor = level;
+            break;
+        case BlurTypeGaussianDilated:
+            blurScaleFactor = 4.0f;
+            break;
+        case BlurTypeGaussianKernelSize:
+            break;
+        }
+
+        GLES30.glClear( GLES30.GL_COLOR_BUFFER_BIT );
+        //GLES30.glViewport( 0, 0, ( int )mFrameWidth/(level+1), ( int )mFrameHeight/(level+1) );
+        GLES30.glViewport( 0, 0, ( int )mFrameWidth/viewPortScaleFactor,
+                           ( int )mFrameHeight/viewPortScaleFactor );
+
+        // Bind Mask texture to texturename
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE0 );
+        // GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( 0 ) );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( level - 1 ) );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0,
+                                mFrameWidth/texScaleFactor,
+                                mFrameHeight/texScaleFactor,
+                                GLES30.GL_RGB, GLES30.GL_UNSIGNED_BYTE, null );
+        GLES30.glUniform1i ( mInRGBHandle[level-1], 0 );
+
+        GLES30.glBindFramebuffer( GLES30.GL_FRAMEBUFFER, mCamTexture.getInRGBFBO( level ) );
+        GLES30.glFramebufferTexture2D( GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0,
+                                       GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( level ), 0 );
+
+        float xPixelOffset = blurScaleFactor/(float)mFrameWidth;
+        float yPixelOffset = blurScaleFactor/(float)mFrameHeight;
+        float mipLevel = (float)level;
+        GLES30.glUniform1f( mMipLevelUniform, mipLevel );
+        GLES30.glUniform1f( mXPixelOffsetUniform, xPixelOffset );
+        GLES30.glUniform1f( mYPixelOffsetUniform, yPixelOffset );
+
+        GLES30.glUseProgram( mBlurProgramHandle );
+
+        GLES30.glVertexAttribPointer( mVerticesHandle[2], 2, GLES30.GL_FLOAT, false, 0, mSquareVertices );
+        GLES30.glEnableVertexAttribArray( mVerticesHandle[2] );
+        GLES30.glVertexAttribPointer ( mTexCoordLocHandle[2], 2, GLES30.GL_FLOAT, false, 0, mSquareTextureCoordinates[0] );
+        GLES30.glEnableVertexAttribArray ( mTexCoordLocHandle[2] );
+
+        // GLES30.glDrawElements( GLES30.GL_TRIANGLES, 6, GLES30.GL_UNSIGNED_SHORT, mSquareDrawIndices );
+        GLES30.glDrawArrays( GLES30.GL_TRIANGLE_STRIP, 0, 4 );
+
+        // Disable vertex array
+        GLES30.glDisableVertexAttribArray( mVerticesHandle[2] );
+        GLES30.glDisableVertexAttribArray( mTexCoordLocHandle[2] );
+
+        // Reset FBO
+        GLES30.glBindFramebuffer( GLES30.GL_FRAMEBUFFER, 0 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, 0 );
+    }
+
+    public void executeBlend( ByteBuffer bufferMask, int level )
+    {
+        GLES30.glClear( GLES30.GL_COLOR_BUFFER_BIT /*| GLES30.GL_DEPTH_BUFFER_BIT*/ );
+        GLES30.glViewport( mScreenROIX, mScreenROIY, ( int )mScreenROIWidth, ( int )mScreenROIHeight );
+        //GLES30.glEnable( GLES30.GL_DEPTH_TEST );
+        GLES30.glUseProgram( mProgramHandle );
+
+        // Bind Mask texture to texturename
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE0 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( 0 ) );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth, mFrameHeight,
+                                GLES30.GL_RGB, GLES30.GL_UNSIGNED_BYTE, null );
+        GLES30.glUniform1i ( mForegroundRGBHandle, 0 );
+
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE1 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex(level));
+        //GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth/(level+1), mFrameHeight/(level+1),
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth, mFrameHeight,
+                                GLES30.GL_RGB, GLES30.GL_UNSIGNED_BYTE, null );
+        GLES30.glUniform1i ( mBackGroundRGBHandle, 1 );
+
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE2 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getMaskTex() );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mMaskWidth, mMaskHeight,
+                                GLES30.GL_LUMINANCE, GLES30.GL_UNSIGNED_BYTE, bufferMask );
+        GLES30.glUniform1i ( mMaskHandle, 2 );
+
+        GLES30.glVertexAttribPointer( mVerticesHandle[1], 2, GLES30.GL_FLOAT, false, 0, mSquareVertices );
+        GLES30.glEnableVertexAttribArray( mVerticesHandle[1] );
+        GLES30.glVertexAttribPointer ( mTexCoordLocHandle[1], 2, GLES30.GL_FLOAT, false, 0, mSquareTextureCoordinates[0] );
+        GLES30.glEnableVertexAttribArray ( mTexCoordLocHandle[1] );
+        GLES30.glUniformMatrix4fv( mMVPMtrxhandle[1], 1, false, mtrxProjectionAndView, 0 );
+        GLES30.glUniformMatrix3fv( mRotMtrxhandle, 1, false, mRotationMatrix, 0 );
+        GLES30.glUniformMatrix3fv( mSurfaceRotMtrxhandle, 1, false, mSurfaceRotationMatrix, 0 );
+        GLES30.glUniformMatrix2fv( mFlipMtrxhandle, 1, false, flipNone, 0 );
+
+        // GLES30.glDrawElements( GLES30.GL_TRIANGLES, 6, GLES30.GL_UNSIGNED_SHORT, mSquareDrawIndices );
+        GLES30.glDrawArrays( GLES30.GL_TRIANGLE_STRIP, 0, 4 );
+
+        // Disable vertex array
+        GLES30.glDisableVertexAttribArray( mVerticesHandle[1] );
+        GLES30.glDisableVertexAttribArray( mTexCoordLocHandle[1] );
+    }
+
+    @Override
+    public void onDrawFrame( GL10 unused )
+    {
+        if ( !mActive || mFrameQueue.size() == 0 ) {
+            return;
+        }
+
+        currentTime = System.currentTimeMillis();
+        long delta = currentTime - prevTime;
+        Log.d(TAG,"frame delta time = "+delta);
+        try {
+            if ( minFrameDelta > delta )
+                Thread.sleep( minFrameDelta - delta );
+        } catch ( InterruptedException e ) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+        prevTime = System.currentTimeMillis();
+
+        if ( !mTexturesCreated && mMaskWidth > 0 && mMaskHeight  > 0 ) {
+            Log.d( TAG, "onDrawFrame createTextures " + blurType );
+            mCamTexture.createTextures( mFrameWidth, mFrameHeight,
+                                        mMaskWidth, mMaskHeight, 8,
+                                        blurType );
+            mTexturesCreated = true;
+        } else if ( !mTexturesCreated ) {
+            // No op
+            return;
+        }
+
+        DPImage dpimage = mFrameQueue.get( 0 );
+        mFrameQueue.removeElementAt( 0 );
+        Plane[] planes = dpimage.mImage.getPlanes();
+        ByteBuffer bufferY  = planes[0].getBuffer();
+        ByteBuffer bufferC = planes[2].getBuffer();
+
+        if ( dpimage.mMask == null) {
+            executeConverter( bufferY, bufferC, false );
+            Log.d( TAG, "onDrawFrame no processing" );
+        } else {
+            int mipLevel = (int)(( mBlurLevel * 8.0f )/100.0f);
+            if ( mipLevel >= 7 )
+                mipLevel = 7;// clamp
+            Log.d( TAG, "[DP_BUF_DBG] onDrawFrame frame " + dpimage.mSeqNumber + " mipLevel "
+                    + mipLevel );
+            executeConverter( bufferY, bufferC, true );
+
+            for ( int lvl = 1; lvl <= mipLevel; ++lvl ) {
+               executeBlur( lvl );
+            }
+
+            // Set rotation
+            if ( dpimage.mOrientation >= 0 ) {
+                prepareRotationMatrix( dpimage.mOrientation );
+            }
+            executeBlend( dpimage.mMask, mipLevel );
+        }
+        if ( mActive ) {
+            mObserver.onRenderComplete( dpimage, false );
+        }
+    }
+
+    private native String getShaderByName(String type);
+}
diff --git a/src/com/android/camera/deepportrait/CamRenderShader.java b/src/com/android/camera/deepportrait/CamRenderShader.java
new file mode 100755
index 0000000000000000000000000000000000000000..5b80ed919d5bf58b5dd5837592bbcdae543e0d3c
--- /dev/null
+++ b/src/com/android/camera/deepportrait/CamRenderShader.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package com.android.camera.deepportrait;
+import android.opengl.GLES30;
+import android.util.Log;
+
+public class CamRenderShader
+{
+    public static final String TAG = "<dp><app><CamRenderShader>";
+
+    public static int compileShader( final int shaderType, final String shaderSource )
+    {
+        int shaderHandle = GLES30.glCreateShader( shaderType );
+
+        if ( shaderHandle != 0 )
+        {
+            // Pass in the shader source.
+            GLES30.glShaderSource( shaderHandle, shaderSource );
+
+            // Compile the shader.
+            GLES30.glCompileShader( shaderHandle );
+
+            // Get the compilation status.
+            final int[] compileStatus = new int[1];
+            GLES30.glGetShaderiv( shaderHandle, GLES30.GL_COMPILE_STATUS, compileStatus, 0 );
+
+            // If the compilation failed, delete the shader.
+            if ( compileStatus[0] == 0 ) 
+            {
+                Log.e( TAG, "Error compiling shader: " + GLES30.glGetShaderInfoLog( shaderHandle ) );
+                GLES30.glDeleteShader( shaderHandle );
+                shaderHandle = 0;
+            }
+        }
+
+        if ( shaderHandle == 0 )
+        {
+            throw new RuntimeException( "Error creating shader." );
+        }
+
+        return shaderHandle;
+    }
+
+    public static int createAndLinkProgram( final int vertexShaderHandle,
+                                            final int fragmentShaderHandle )
+    {
+        int programHandle = GLES30.glCreateProgram();
+
+        if ( programHandle != 0 ) {
+            // Bind the vertex shader to the program.
+            GLES30.glAttachShader( programHandle, vertexShaderHandle );
+            
+            // Bind the fragment shaders to the program
+            GLES30.glAttachShader( programHandle, fragmentShaderHandle );
+
+            // Link the two shaders together into a program.
+            GLES30.glLinkProgram( programHandle );
+
+            // Get the link status.
+            final int[] linkStatus = new int[1];
+            GLES30.glGetProgramiv( programHandle, GLES30.GL_LINK_STATUS, linkStatus, 0 );
+
+            // If the link failed, delete the program.
+            if ( linkStatus[0] == 0 ) 
+            {
+                Log.e(TAG, "Error compiling program: " + GLES30.glGetProgramInfoLog(programHandle));
+                GLES30.glDeleteProgram(programHandle);
+                programHandle = 0;
+            }
+        }
+
+        if ( programHandle == 0 ) {
+            throw new RuntimeException("Error creating program.");
+        }
+        
+        return programHandle;
+    }
+}
diff --git a/src/com/android/camera/deepportrait/CamRenderTexture.java b/src/com/android/camera/deepportrait/CamRenderTexture.java
new file mode 100755
index 0000000000000000000000000000000000000000..c0b4108b312ce5d78c47bf12bbecbcc36d64d10a
--- /dev/null
+++ b/src/com/android/camera/deepportrait/CamRenderTexture.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package com.android.camera.deepportrait;
+
+import java.nio.ByteBuffer;
+import android.opengl.GLES30;
+
+public class CamRenderTexture
+{
+    int[] mTextureHandle;
+    int[] mFBO;
+    int[] mRBO;
+    public enum BlurType
+    {
+        BlurTypeGaussianDilated,
+        BlurTypeGaussianPyramid,
+        BlurTypeGaussianKernelSize,
+    }
+
+    public int getInYTex()  { return mTextureHandle[0]; }
+    public int getInCTex()  { return mTextureHandle[1]; }
+    public int getMaskTex() { return mTextureHandle[2]; }
+    public int getInRGBTex( int level ) { return mTextureHandle[3 + level]; }
+    public int getInRGBFBO( int level ) { return mFBO[level]; }
+    public int getInRGBRBO( int level ) { return mRBO[level]; }
+
+    public void createTextures( int width, int height, int maskW, int maskH,
+                                int levels, BlurType blurType )
+    {
+        mTextureHandle = new int[3 + levels];
+        mFBO = new int[levels];
+        mRBO = new int[levels];
+        GLES30.glGenTextures( mTextureHandle.length, mTextureHandle, 0 );
+
+        // Input Luma
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mTextureHandle[0] );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR );
+        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
+        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
+        GLES30.glTexImage2D( GLES30.GL_TEXTURE_2D, 0, GLES30.GL_LUMINANCE, width, height, 0,
+                             GLES30.GL_LUMINANCE, GLES30.GL_UNSIGNED_BYTE, null );
+
+        // Input chroma
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mTextureHandle[1] );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR );
+        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
+        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
+        GLES30.glTexImage2D( GLES30.GL_TEXTURE_2D, 0, GLES30.GL_LUMINANCE_ALPHA, width/2, height/2, 0,
+                             GLES30.GL_LUMINANCE_ALPHA, GLES30.GL_UNSIGNED_BYTE, null );
+
+        // mask
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mTextureHandle[2] );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_NEAREST );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_NEAREST );
+        GLES30.glTexImage2D( GLES30.GL_TEXTURE_2D, 0, GLES30.GL_LUMINANCE, maskW, maskH, 0,
+                             GLES30.GL_LUMINANCE , GLES30.GL_UNSIGNED_BYTE, null );
+
+        // Input RGB
+        GLES30.glGenFramebuffers( levels, mFBO, 0 );
+
+        for ( int i = 0; i < levels; ++i )
+        {
+            int scaleFactor = ( blurType == BlurType.BlurTypeGaussianPyramid ) ? i + 1 : 1;
+            GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mTextureHandle[3 + i] );
+            GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR );
+            GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR );
+            GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
+            GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
+            GLES30.glTexImage2D( GLES30.GL_TEXTURE_2D, 0, GLES30.GL_RGB,
+                                 width/scaleFactor, height/scaleFactor, 0,
+                                 GLES30.GL_RGB , GLES30.GL_UNSIGNED_BYTE, null );
+        }
+        //ToDo: move to render buffers
+        //  GLES30.glGenRenderbuffers( 1, mRBO, 0 );
+        //  GLES30.glBindRenderbuffer( GLES30.GL_RENDERBUFFER, mRBO[0]);
+        //  GLES30.glRenderbufferStorage( GLES30.GL_RENDERBUFFER, GLES30.GL_RGB, width, height );
+    }
+
+    public void deleteTextures()
+    {
+        GLES30.glDeleteTextures( mTextureHandle.length, mTextureHandle, 0 );
+        GLES30.glDeleteFramebuffers ( mFBO.length, mFBO, 0 );
+     //   GLES30.glDeleteRenderbuffers( mRBO.length, mRBO, 0 );
+    }
+}
diff --git a/src/com/android/camera/deepportrait/DPImage.java b/src/com/android/camera/deepportrait/DPImage.java
new file mode 100755
index 0000000000000000000000000000000000000000..381270682938aecd25a5741e8d0dbe57fc3f0d3c
--- /dev/null
+++ b/src/com/android/camera/deepportrait/DPImage.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package com.android.camera.deepportrait;
+import android.media.Image;
+import android.media.Image.Plane;
+import java.nio.ByteBuffer;
+
+public class DPImage
+{
+    public Image mImage;
+    public ByteBuffer mMask;
+    public int mMaskWidth = 0;
+    public int mMaskHeight = 0;
+    public int mSeqNumber = 0;
+    public int mOrientation = 0;
+    public DPImage( Image aImage, ByteBuffer aMask, int orientation)
+    {
+        mImage = aImage;
+        mMask= aMask;
+        mOrientation = orientation;
+    }
+
+    public DPImage(Image aImage, int orientation)
+    {
+        mImage = aImage;
+        mMask = null;
+        mOrientation = orientation;
+    }
+}
diff --git a/src/com/android/camera/deepportrait/GLCameraPreview.java b/src/com/android/camera/deepportrait/GLCameraPreview.java
new file mode 100755
index 0000000000000000000000000000000000000000..7d62faebfafa255438b344236f0f7b84ef4710f1
--- /dev/null
+++ b/src/com/android/camera/deepportrait/GLCameraPreview.java
@@ -0,0 +1,343 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ * Not a Contribution
+ *
+ * Copyright 2008-2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package com.android.camera.deepportrait;
+
+import android.app.Activity;
+import android.content.Context;
+import android.opengl.GLSurfaceView;
+import android.view.SurfaceHolder;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.opengles.GL10;
+import android.opengl.GLES30;
+import android.util.Log;
+
+public class GLCameraPreview extends GLSurfaceView
+{
+    private CamGLRenderer mRenderer;
+    private CamGLRenderObserver mObserver;
+    public static String TAG = "<dp><app><GLSurfaceView>";
+
+    private static class ContextFactory implements EGLContextFactory
+    {
+        private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+        public  static final int EGL_CONTEXT_PRIORITY_LEVEL_IMG = 0x3100;
+        public  static final int EGL_CONTEXT_PRIORITY_HIGH_IMG = 0x3101;
+        public  static final int EGL_CONTEXT_PRIORITY_MED_IMG = 0x3102;
+
+        public EGLContext createContext(EGL10 egl, EGLDisplay display,
+                EGLConfig eglConfig)
+        {
+            Log.w(TAG, "creating OpenGL ES 3.0 context");
+            checkEglError("Before eglCreateContext", egl);
+            int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 3,
+                                  EGL_CONTEXT_PRIORITY_LEVEL_IMG, EGL_CONTEXT_PRIORITY_MED_IMG,
+                                  EGL10.EGL_NONE, EGL10.EGL_NONE };
+            EGLContext context = egl.eglCreateContext(display, eglConfig,
+                    EGL10.EGL_NO_CONTEXT, attrib_list);
+            checkEglError("After eglCreateContext", egl);
+            return context;
+        }
+
+        public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context)
+        {
+            egl.eglDestroyContext(display, context);
+        }
+    }
+
+    private static void checkEglError(String prompt, EGL10 egl)
+    {
+        int error;
+        while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS)
+        {
+            Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
+        }
+    }
+
+    private static class ConfigChooser implements EGLConfigChooser
+    {
+
+        public ConfigChooser(int r, int g, int b, int a, int depth, int stencil)
+        {
+            mRedSize = r;
+            mGreenSize = g;
+            mBlueSize = b;
+            mAlphaSize = a;
+            mDepthSize = depth;
+            mStencilSize = stencil;
+        }
+
+        /*
+         * This EGL config specification is used to specify 2.0 rendering. We
+         * use a minimum size of 4 bits for red/green/blue, but will perform
+         * actual matching in chooseConfig() below.
+         */
+        private static int EGL_OPENGL_ES2_BIT = 4;
+        private static int[] s_configAttribs2 = { EGL10.EGL_RED_SIZE, 4,
+                EGL10.EGL_GREEN_SIZE, 4, EGL10.EGL_BLUE_SIZE, 4,
+                EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL10.EGL_NONE };
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display)
+        {
+
+            /*
+             * Get the number of minimally matching EGL configurations
+             */
+            int[] num_config = new int[1];
+            egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
+
+            int numConfigs = num_config[0];
+
+            if (numConfigs <= 0)
+            {
+                throw new IllegalArgumentException(
+                        "No configs match configSpec");
+            }
+
+            /*
+             * Allocate then read the array of minimally matching EGL configs
+             */
+            EGLConfig[] configs = new EGLConfig[numConfigs];
+            egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs,
+                    num_config);
+
+            printConfigs(egl, display, configs);
+            /*
+             * Now return the "best" one
+             */
+            return chooseConfig(egl, display, configs);
+        }
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+                EGLConfig[] configs)
+        {
+            for (EGLConfig config : configs)
+            {
+                int d = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_DEPTH_SIZE, 0);
+                int s = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_STENCIL_SIZE, 0);
+
+                // We need at least mDepthSize and mStencilSize bits
+                if (d < mDepthSize || s < mStencilSize)
+                    continue;
+
+                // We want an *exact* match for red/green/blue/alpha
+                int r = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_RED_SIZE, 0);
+                int g = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_GREEN_SIZE, 0);
+                int b = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_BLUE_SIZE, 0);
+                int a = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_ALPHA_SIZE, 0);
+
+                if (r == mRedSize && g == mGreenSize && b == mBlueSize
+                        && a == mAlphaSize)
+                    return config;
+            }
+            return null;
+        }
+
+        private int findConfigAttrib(EGL10 egl, EGLDisplay display,
+                EGLConfig config, int attribute, int defaultValue)
+        {
+
+            if (egl.eglGetConfigAttrib(display, config, attribute, mValue))
+            {
+                return mValue[0];
+            }
+            return defaultValue;
+        }
+
+        private void printConfigs(EGL10 egl, EGLDisplay display,
+                EGLConfig[] configs)
+        {
+            int numConfigs = configs.length;
+            Log.w(TAG, String.format("%d configurations", numConfigs));
+            for (int i = 0; i < numConfigs; i++)
+            {
+                Log.w(TAG, String.format("Configuration %d:\n", i));
+                printConfig(egl, display, configs[i]);
+            }
+        }
+
+        private void printConfig(EGL10 egl, EGLDisplay display, EGLConfig config)
+        {
+            int[] attributes = { EGL10.EGL_BUFFER_SIZE, EGL10.EGL_ALPHA_SIZE,
+                    EGL10.EGL_BLUE_SIZE,
+                    EGL10.EGL_GREEN_SIZE,
+                    EGL10.EGL_RED_SIZE,
+                    EGL10.EGL_DEPTH_SIZE,
+                    EGL10.EGL_STENCIL_SIZE,
+                    EGL10.EGL_CONFIG_CAVEAT,
+                    EGL10.EGL_CONFIG_ID,
+                    EGL10.EGL_LEVEL,
+                    EGL10.EGL_MAX_PBUFFER_HEIGHT,
+                    EGL10.EGL_MAX_PBUFFER_PIXELS,
+                    EGL10.EGL_MAX_PBUFFER_WIDTH,
+                    EGL10.EGL_NATIVE_RENDERABLE,
+                    EGL10.EGL_NATIVE_VISUAL_ID,
+                    EGL10.EGL_NATIVE_VISUAL_TYPE,
+                    0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
+                    EGL10.EGL_SAMPLES,
+                    EGL10.EGL_SAMPLE_BUFFERS,
+                    EGL10.EGL_SURFACE_TYPE,
+                    EGL10.EGL_TRANSPARENT_TYPE,
+                    EGL10.EGL_TRANSPARENT_RED_VALUE,
+                    EGL10.EGL_TRANSPARENT_GREEN_VALUE,
+                    EGL10.EGL_TRANSPARENT_BLUE_VALUE,
+                    0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
+                    0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
+                    0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
+                    0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
+                    EGL10.EGL_LUMINANCE_SIZE, EGL10.EGL_ALPHA_MASK_SIZE,
+                    EGL10.EGL_COLOR_BUFFER_TYPE, EGL10.EGL_RENDERABLE_TYPE,
+                    0x3042 // EGL10.EGL_CONFORMANT
+            };
+            String[] names = { "EGL_BUFFER_SIZE", "EGL_ALPHA_SIZE",
+                    "EGL_BLUE_SIZE", "EGL_GREEN_SIZE", "EGL_RED_SIZE",
+                    "EGL_DEPTH_SIZE", "EGL_STENCIL_SIZE", "EGL_CONFIG_CAVEAT",
+                    "EGL_CONFIG_ID", "EGL_LEVEL", "EGL_MAX_PBUFFER_HEIGHT",
+                    "EGL_MAX_PBUFFER_PIXELS", "EGL_MAX_PBUFFER_WIDTH",
+                    "EGL_NATIVE_RENDERABLE", "EGL_NATIVE_VISUAL_ID",
+                    "EGL_NATIVE_VISUAL_TYPE", "EGL_PRESERVED_RESOURCES",
+                    "EGL_SAMPLES", "EGL_SAMPLE_BUFFERS", "EGL_SURFACE_TYPE",
+                    "EGL_TRANSPARENT_TYPE", "EGL_TRANSPARENT_RED_VALUE",
+                    "EGL_TRANSPARENT_GREEN_VALUE",
+                    "EGL_TRANSPARENT_BLUE_VALUE", "EGL_BIND_TO_TEXTURE_RGB",
+                    "EGL_BIND_TO_TEXTURE_RGBA", "EGL_MIN_SWAP_INTERVAL",
+                    "EGL_MAX_SWAP_INTERVAL", "EGL_LUMINANCE_SIZE",
+                    "EGL_ALPHA_MASK_SIZE", "EGL_COLOR_BUFFER_TYPE",
+                    "EGL_RENDERABLE_TYPE", "EGL_CONFORMANT" };
+            int[] value = new int[1];
+            for (int i = 0; i < attributes.length; i++)
+            {
+                int attribute = attributes[i];
+                String name = names[i];
+                if (egl.eglGetConfigAttrib(display, config, attribute, value))
+                {
+                    Log.w(TAG, String.format("  %s: %d\n", name, value[0]));
+                } else
+                {
+                    // Log.w(TAG, String.format("  %s: failed\n", name));
+                    while (egl.eglGetError() != EGL10.EGL_SUCCESS)
+                        ;
+                }
+            }
+        }
+
+        // Subclasses can adjust these values:
+        protected int mRedSize;
+        protected int mGreenSize;
+        protected int mBlueSize;
+        protected int mAlphaSize;
+        protected int mDepthSize;
+        protected int mStencilSize;
+        private int[] mValue = new int[1];
+    }
+
+    public GLCameraPreview( Context context, int textureWidth,
+                            int textureHeight, CamGLRenderObserver observer )
+    {
+        super( context );
+        mObserver = observer;
+        // Create an OpenGL ES 3.0 context.
+        setEGLContextClientVersion( 3 );
+
+        /*
+         * Setup the context factory for 2.0 rendering. See ContextFactory class
+         * definition below
+         */
+        setEGLContextFactory(new ContextFactory());
+
+        /*
+         * We need to choose an EGLConfig that matches the format of our surface
+         * exactly. This is going to be done in our custom config chooser. See
+         * ConfigChooser class definition below.
+         */
+        boolean translucent = false;
+        int depth = 0;
+        int stencil = 0;
+        //setEGLConfigChooser(translucent ? new ConfigChooser(8, 8, 8, 8, depth,
+        //        stencil) : new ConfigChooser(5, 6, 5, 0, depth, stencil));
+
+        // Set the Renderer for drawing on the GLSurfaceView
+        mRenderer = new CamGLRenderer( context, textureWidth, textureHeight, observer, this );
+        setRenderer( mRenderer );
+
+        // Render the view only when there is a change in the drawing data
+        setRenderMode( GLSurfaceView.RENDERMODE_WHEN_DIRTY );
+        // setRenderMode( GLSurfaceView.RENDERMODE_CONTINUOUSLY );
+
+        mRenderer.open();
+    }
+
+    @Override
+    public void onPause()
+    {
+        super.onPause();
+        mRenderer.onPause();
+    }
+
+    @Override
+    public void onResume()
+    {
+        super.onResume();
+        mRenderer.onResume();
+    }
+
+    @Override
+    public void surfaceChanged( SurfaceHolder holder, int format, int w, int h )
+    {
+        super.surfaceChanged( holder, format, w, h );
+    }
+
+    @Override
+    public void surfaceCreated( SurfaceHolder holder )
+    {
+        super.surfaceCreated( holder );
+        mObserver.onRenderSurfaceCreated();
+        if (mRenderer != null) {
+            mRenderer.onResume();
+        }
+    }
+
+    @Override
+    public void surfaceDestroyed( SurfaceHolder holder )
+    {
+        super.surfaceDestroyed( holder );
+        if ( mRenderer != null ) {
+            mObserver.onRenderSurfaceDestroyed();
+            Log.e( TAG, " surfaceDestroyed Close renderer" );
+            mRenderer.onPause();
+            mRenderer.close();
+            mRenderer = null;
+        }
+    }
+
+    public CamGLRenderer getRendererInstance()
+    {
+        return mRenderer;
+    }
+}
diff --git a/src/com/android/camera/imageprocessor/FrameProcessor.java b/src/com/android/camera/imageprocessor/FrameProcessor.java
index 2e7ded16932a858a5cae772fced79a9bd0cea20a..4eaf7f7ad746954d4edcf12e671d1b0d2375e37b 100755
--- a/src/com/android/camera/imageprocessor/FrameProcessor.java
+++ b/src/com/android/camera/imageprocessor/FrameProcessor.java
@@ -46,8 +46,12 @@ import android.view.Surface;
 import android.widget.Toast;
 
 import com.android.camera.CaptureModule;
+import com.android.camera.PhotoModule;
 import com.android.camera.SettingsManager;
+import com.android.camera.deepportrait.DPImage;
+import com.android.camera.deepportrait.GLCameraPreview;
 import com.android.camera.imageprocessor.filter.BeautificationFilter;
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.imageprocessor.filter.ImageFilter;
 import com.android.camera.imageprocessor.filter.TrackingFocusFrameListener;
 import com.android.camera.ui.RotateTextToast;
@@ -90,8 +94,11 @@ public class FrameProcessor {
     public static final int FILTER_NONE = 0;
     public static final int FILTER_MAKEUP = 1;
     public static final int LISTENER_TRACKING_FOCUS = 2;
+    public static final int FILTER_DEEP_PORTRAIT = 3;
     private CaptureModule mModule;
     private boolean mIsVideoOn = false;
+    private boolean mIsDeepPortrait = false;
+    private DeepPortraitFilter mDeepPortraitFilter = null;
 
     public FrameProcessor(Activity activity, CaptureModule module) {
         mActivity = activity;
@@ -101,14 +108,14 @@ public class FrameProcessor {
 
         mRs = RenderScript.create(mActivity);
         mRsYuvToRGB = new ScriptC_YuvToRgb(mRs);
-        mRsRotator = new ScriptC_rotator(mRs);
+            mRsRotator = new ScriptC_rotator(mRs);
     }
 
     private void init(Size previewDim) {
         mIsActive = true;
         mSize = previewDim;
         synchronized (mAllocationLock) {
-            mInputImageReader = ImageReader.newInstance(mSize.getWidth(), mSize.getHeight(), ImageFormat.YUV_420_888, 8);
+            mInputImageReader = ImageReader.newInstance(mSize.getWidth(), mSize.getHeight(), ImageFormat.YUV_420_888, 12);
 
             Type.Builder rgbTypeBuilder = new Type.Builder(mRs, Element.RGBA_8888(mRs));
             rgbTypeBuilder.setX(mSize.getHeight());
@@ -190,12 +197,29 @@ public class FrameProcessor {
 
     public void onOpen(ArrayList<Integer> filterIds, final Size size) {
         cleanFilterSet();
+        boolean hasDeepportraitFilter = false;
         if (filterIds != null) {
             for (Integer i : filterIds) {
                 addFilter(i.intValue());
+                if (i == FILTER_DEEP_PORTRAIT) {
+                    hasDeepportraitFilter = true;
+                }
             }
         }
-        if(isFrameFilterEnabled() || isFrameListnerEnabled()) {
+
+        mIsDeepPortrait = hasDeepportraitFilter;
+        if (mIsDeepPortrait && mPreviewFilters.size() != 0) {
+            mDeepPortraitFilter =
+                    (DeepPortraitFilter)mPreviewFilters.get(0);
+            mDeepPortraitFilter.init(size.getWidth(),size.getHeight(),0,0);
+            if (!mDeepPortraitFilter.getDPInitialized())
+                Toast.makeText(mActivity, "Deepportrait init failed",
+                    Toast.LENGTH_LONG).show();
+        } else {
+            mDeepPortraitFilter = null;
+        }
+
+        if(isFrameFilterEnabled() || isFrameListnerEnabled() || mIsDeepPortrait) {
             init(size);
         }
     }
@@ -206,6 +230,8 @@ public class FrameProcessor {
             filter = new BeautificationFilter(mModule);
         } else if (filterId == LISTENER_TRACKING_FOCUS) {
             filter = new TrackingFocusFrameListener(mModule);
+        } else if (filterId == FILTER_DEEP_PORTRAIT) {
+            filter = new DeepPortraitFilter(mModule,mModule.getCamGLRender());
         }
 
         if (filter != null && filter.isSupported()) {
@@ -292,6 +318,10 @@ public class FrameProcessor {
 
     public List<Surface> getInputSurfaces() {
         List<Surface> surfaces = new ArrayList<Surface>();
+        if (mIsDeepPortrait) {
+            surfaces.add(getReaderSurface());
+            return surfaces;
+        }
         if (mPreviewFilters.size() == 0 && mFinalFilters.size() == 0) {
             surfaces.add(mSurfaceAsItIs);
             if (mIsVideoOn) {
@@ -390,6 +420,20 @@ public class FrameProcessor {
                         image.close();
                         return;
                     }
+                    if (mIsDeepPortrait) {
+                        //render to GLSurfaceView directly
+                        GLCameraPreview preview = mModule.getGLCameraPreview();
+                        if (mDeepPortraitFilter != null && mDeepPortraitFilter.getDPInitialized()
+                                && preview != null) {
+                            DPImage DpImage = new DPImage(image,0);
+                            mDeepPortraitFilter.addImage(null,null,1,DpImage);
+                            preview.getRendererInstance().sendFrame(DpImage);
+                            preview.requestRender();
+                        } else {
+                            image.close();
+                        }
+                        return;
+                    }
                     mIsAllocationEverUsed = true;
                     ByteBuffer bY = image.getPlanes()[0].getBuffer();
                     ByteBuffer bVU = image.getPlanes()[2].getBuffer();
@@ -411,7 +455,7 @@ public class FrameProcessor {
                             filter.init(mSize.getWidth(), mSize.getHeight(), stride, stride);
                             if (filter instanceof BeautificationFilter) {
                                 filter.addImage(bY, bVU, 0, new Boolean(false));
-                            } else {
+                            } else{
                                 filter.addImage(bY, bVU, 0, new Boolean(true));
                             }
                             needToFeedSurface = true;
diff --git a/src/com/android/camera/imageprocessor/PostProcessor.java b/src/com/android/camera/imageprocessor/PostProcessor.java
old mode 100644
new mode 100755
index 72a92b4b3393324829950b733b7164b74bf16790..c227f32b20b65dd00f60e9b9ceb51cd7a7b41c9b
--- a/src/com/android/camera/imageprocessor/PostProcessor.java
+++ b/src/com/android/camera/imageprocessor/PostProcessor.java
@@ -30,6 +30,8 @@ package com.android.camera.imageprocessor;
 
 import android.content.ContentResolver;
 import android.content.Context;
+import android.content.SharedPreferences;
+import android.graphics.Bitmap;
 import android.graphics.ImageFormat;
 import android.graphics.Rect;
 import android.graphics.YuvImage;
@@ -50,6 +52,7 @@ import android.media.ImageWriter;
 import android.os.Handler;
 import android.os.HandlerThread;
 import android.os.Looper;
+import android.preference.PreferenceManager;
 import android.util.Log;
 import android.widget.Toast;
 
@@ -59,11 +62,13 @@ import com.android.camera.Exif;
 import com.android.camera.MediaSaveService;
 import com.android.camera.PhotoModule;
 import com.android.camera.SettingsManager;
+import com.android.camera.deepportrait.DPImage;
 import com.android.camera.exif.ExifInterface;
 import com.android.camera.exif.Rational;
 import com.android.camera.imageprocessor.filter.BestpictureFilter;
 import com.android.camera.imageprocessor.filter.BlurbusterFilter;
 import com.android.camera.imageprocessor.filter.ChromaflashFilter;
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.imageprocessor.filter.OptizoomFilter;
 import com.android.camera.imageprocessor.filter.SharpshooterFilter;
 import com.android.camera.imageprocessor.filter.StillmoreFilter;
@@ -148,6 +153,7 @@ public class PostProcessor{
     private LinkedList<ZSLQueue.ImageItem> mFallOffImages = new LinkedList<ZSLQueue.ImageItem>();
     private int mPendingContinuousRequestCount = 0;
     public int mMaxRequiredImageNum;
+    private boolean mIsDeepPortrait = false;
 
     public int getMaxRequiredImageNum() {
         return mMaxRequiredImageNum;
@@ -439,6 +445,12 @@ public class PostProcessor{
             mZSLReprocessImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(), ImageFormat.JPEG, mMaxRequiredImageNum);
             mZSLReprocessImageReader.setOnImageAvailableListener(processedImageAvailableListener, mHandler);
         }
+        if (mIsDeepPortrait) {
+            ImageFilter imageFilter = mController.getFrameFilters().get(0);
+            DeepPortraitFilter deepPortraitFilter =
+                    (DeepPortraitFilter) imageFilter;
+            deepPortraitFilter.initSnapshot(pictureSize.getWidth(),pictureSize.getHeight());
+        }
     }
 
     public boolean takeZSLPicture() {
@@ -678,9 +690,10 @@ public class PostProcessor{
 
     public void onOpen(int postFilterId, boolean isFlashModeOn, boolean isTrackingFocusOn,
                        boolean isMakeupOn, boolean isSelfieMirrorOn, boolean isSaveRaw,
-                       boolean isSupportedQcfa) {
+                       boolean isSupportedQcfa, boolean isDeepPortrait) {
         mImageHandlerTask = new ImageHandlerTask();
         mSaveRaw = isSaveRaw;
+        mIsDeepPortrait = isDeepPortrait;
         if(setFilter(postFilterId) || isFlashModeOn || isTrackingFocusOn || isMakeupOn || isSelfieMirrorOn
                 || PersistUtil.getCameraZSLDisabled()
                 || !SettingsManager.getInstance().isZSLInAppEnabled()
@@ -690,7 +703,7 @@ public class PostProcessor{
                 || "18".equals(SettingsManager.getInstance().getValue(
                                   SettingsManager.KEY_SCENE_MODE))
                 || mController.getCameraMode() == CaptureModule.DUAL_MODE
-                || isSupportedQcfa) {
+                || isSupportedQcfa || isDeepPortrait) {
             mUseZSL = false;
         } else {
             mUseZSL = true;
@@ -949,6 +962,17 @@ public class PostProcessor{
             }
             mOrientation = CameraUtil.getJpegRotation(mController.getMainCameraId(), mController.getDisplayOrientation());
         }
+        if (mIsDeepPortrait) {
+            ImageFilter imageFilter = mController.getFrameFilters().get(0);
+            DeepPortraitFilter deepPortraitFilter =
+                    (DeepPortraitFilter) imageFilter;
+            if (!deepPortraitFilter.getDPStillInit()) {
+                mStatus = STATUS.BUSY;
+                if(mWatchdog != null) {
+                    mWatchdog.startMonitor();
+                }
+            }
+        }
         if(mFilter != null && mCurrentNumImage >= mFilter.getNumRequiredImage()) {
             return;
         }
@@ -967,10 +991,78 @@ public class PostProcessor{
                         ByteBuffer vuBuf = image.getPlanes()[2].getBuffer();
 
                         if(mFilter == null) {
-                            mDefaultResultImage = new ImageFilter.ResultImage(ByteBuffer.allocateDirect(mStride * mHeight*3/2),
-                                                                    new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
-                            yBuf.get(mDefaultResultImage.outBuffer.array(), 0, yBuf.remaining());
-                            vuBuf.get(mDefaultResultImage.outBuffer.array(), mStride*mHeight, vuBuf.remaining());
+                            if (mIsDeepPortrait) {
+                                ImageFilter imageFilter = mController.getFrameFilters().get(0);
+                                DeepPortraitFilter deepPortraitFilter =
+                                        (DeepPortraitFilter) imageFilter;
+                                DPImage dpImage = new DPImage(image,0);
+                                long current = System.currentTimeMillis();
+                                deepPortraitFilter.addImage(null,null,0,dpImage);
+                                if (DEBUG_DUMP_FILTER_IMG) {
+                                    ImageFilter.ResultImage debugResultImage = new
+                                            ImageFilter.ResultImage(ByteBuffer.allocateDirect(
+                                            mStride * mHeight * 3 / 2), new Rect(0, 0, mWidth,
+                                            mHeight), mWidth, mHeight, mStride);
+                                    yBuf.get(debugResultImage.outBuffer.array(), 0, yBuf.remaining());
+                                    vuBuf.get(debugResultImage.outBuffer.array(), mStride * mHeight,
+                                            vuBuf.remaining());
+                                    yBuf.rewind();
+                                    vuBuf.rewind();
+
+                                    byte[] bytes = nv21ToJpeg(debugResultImage, mOrientation, null);
+                                    mActivity.getMediaSaveService().addImage(
+                                            bytes, "Debug_beforeApplyingFilter" + numImage, 0L, null,
+                                            debugResultImage.outRoi.width(),
+                                            debugResultImage.outRoi.height(),
+                                            mOrientation, null, mController.getMediaSavedListener(),
+                                            mActivity.getContentResolver(), "jpeg");
+
+                                    if (dpImage.mMask != null) {
+                                        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+                                        Bitmap mask = DeepPortraitFilter.DpMaskToImage(
+                                                dpImage.mMask, dpImage.mMaskWidth,dpImage.mMaskHeight);
+                                        mask.compress(Bitmap.CompressFormat.JPEG, 75, baos);
+                                        byte[] data = baos.toByteArray();
+                                        mActivity.getMediaSaveService().addImage(
+                                                data, "DPmask" + System.currentTimeMillis(), 0L, null,
+                                                dpImage.mMaskWidth,
+                                                dpImage.mMaskHeight,
+                                                mOrientation, null, mController.getMediaSavedListener(),
+                                                mActivity.getContentResolver(), "jpeg");
+                                    }
+                                }
+                                if (dpImage.mMask == null) {
+                                    Log.d(TAG,"can't generate deepportrait mask");
+                                    mDefaultResultImage = new ImageFilter.ResultImage(
+                                            ByteBuffer.allocateDirect(mStride * mHeight*3/2),
+                                            new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
+                                    yBuf.get(mDefaultResultImage.outBuffer.array(), 0, yBuf.remaining());
+                                    vuBuf.get(mDefaultResultImage.outBuffer.array(), mStride*mHeight, vuBuf.remaining());
+                                } else {
+                                    ByteBuffer dstY = ByteBuffer.allocateDirect(yBuf.capacity());
+                                    ByteBuffer dstVU = ByteBuffer.allocateDirect(vuBuf.capacity());
+                                    final SharedPreferences prefs =
+                                            PreferenceManager.getDefaultSharedPreferences(mActivity);
+                                    int level = prefs.getInt(SettingsManager.KEY_DEEPPORTRAIT_VALUE
+                                            ,50);
+                                    deepPortraitFilter.renderDeepportraitImage(
+                                            dpImage,dstY,dstVU,0, level/100f);
+                                    Log.d(TAG,"process Dp snapshot cost time "+ (System.currentTimeMillis() - current));
+                                    mDefaultResultImage = new ImageFilter.ResultImage(
+                                            ByteBuffer.allocateDirect(mStride * mHeight*3/2),
+                                            new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
+                                    dstY.get(mDefaultResultImage.outBuffer.array(), 0,
+                                            dstY.remaining());
+                                    dstVU.get(mDefaultResultImage.outBuffer.array(), mStride*mHeight,
+                                            dstVU.remaining());
+                                }
+                            } else {
+                                mDefaultResultImage = new ImageFilter.ResultImage(
+                                        ByteBuffer.allocateDirect(mStride * mHeight*3/2),
+                                        new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
+                                yBuf.get(mDefaultResultImage.outBuffer.array(), 0, yBuf.remaining());
+                                vuBuf.get(mDefaultResultImage.outBuffer.array(), mStride*mHeight, vuBuf.remaining());
+                            }
                             image.close();
                         } else {
                             if (DEBUG_DUMP_FILTER_IMG) {
@@ -1070,9 +1162,12 @@ public class PostProcessor{
                     }
                     if(resultImage != null) {
                         //Start processing FrameProcessor filter as well
-                        for (ImageFilter filter : mController.getFrameFilters()) {
-                            filter.init(resultImage.width, resultImage.height, resultImage.stride, resultImage.stride);
-                            filter.addImage(resultImage.outBuffer, null, 0, new Boolean(false));
+                        if (!mIsDeepPortrait) {
+                            for (ImageFilter filter : mController.getFrameFilters()) {
+                                filter.init(resultImage.width, resultImage.height,
+                                        resultImage.stride, resultImage.stride);
+                                filter.addImage(resultImage.outBuffer, null, 0, new Boolean(false));
+                            }
                         }
 
                         if(isSelfieMirrorOn() && !mController.isBackCamera()) {
@@ -1194,7 +1289,7 @@ public class PostProcessor{
         }
     };
 
-    private byte[] nv21ToJpeg(ImageFilter.ResultImage resultImage, int orientation, TotalCaptureResult result) {
+    public byte[] nv21ToJpeg(ImageFilter.ResultImage resultImage, int orientation, TotalCaptureResult result) {
         BitmapOutputStream bos = new BitmapOutputStream(1024);
         YuvImage im = new YuvImage(resultImage.outBuffer.array(), ImageFormat.NV21,
                                     resultImage.width, resultImage.height, new int[]{resultImage.stride, resultImage.stride});
diff --git a/src/com/android/camera/imageprocessor/filter/DeepPortraitFilter.java b/src/com/android/camera/imageprocessor/filter/DeepPortraitFilter.java
new file mode 100755
index 0000000000000000000000000000000000000000..9cadcf325a7e14593a19342deeaaa3425828cef6
--- /dev/null
+++ b/src/com/android/camera/imageprocessor/filter/DeepPortraitFilter.java
@@ -0,0 +1,325 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package com.android.camera.imageprocessor.filter;
+
+import android.graphics.Bitmap;
+import android.graphics.Color;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.media.Image;
+import android.os.AsyncTask;
+import android.os.Environment;
+import android.os.Handler;
+import android.util.Log;
+import android.util.Size;
+
+import com.android.camera.CaptureModule;
+import com.android.camera.deepportrait.CamGLRenderer;
+import com.android.camera.deepportrait.DPImage;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+public class DeepPortraitFilter implements ImageFilter {
+    private static String TAG = "DeepPortraitFilter";
+    private static String VIDEO_DLC = "deepportrait_preview.dlce";
+    private static String SNAPSHOT_DLC = "deepportrait_snapshot.dlce";
+    private static String SD_ROOT_PATH = Environment.getExternalStorageDirectory().toString();
+    private static boolean mIsSupported = false;
+    int mWidth;
+    int mHeight;
+    int mSnapshotWidth;
+    int mSnapshotHeight;
+    int mStrideY;
+    int mStrideVU;
+    private CaptureModule mModule;
+    private CamGLRenderer mRender;
+    private Boolean mDPInitialized = false;
+    private Boolean mDPStillInit = false;
+    private int mVideoMaskSize = 0;
+    private static final int DP_QUEUE_SIZE = 30;
+    private ByteBuffer[] mMaskBufArray = new ByteBuffer[DP_QUEUE_SIZE];
+    private int mSeqNo;
+
+    public DeepPortraitFilter(CaptureModule module, CamGLRenderer render) {
+        mModule = module;
+        mRender = render;
+    }
+
+    @Override
+    public List<CaptureRequest> setRequiredImages(CaptureRequest.Builder builder) {
+        return null;
+    }
+
+    @Override
+    public String getStringName() {
+        return null;
+    }
+
+    @Override
+    public int getNumRequiredImage() {
+        return 0;
+    }
+
+    @Override
+    public void init(int width, int height, int strideY, int strideVU) {
+        mWidth = width;
+        mHeight = height;
+        mStrideY = strideY;
+        mStrideVU = strideVU;
+        mSeqNo = 0;
+        mDPInitialized = initPreview(width, height);
+        if (mDPInitialized) {
+            mVideoMaskSize = getMaskBufferSize();
+            for ( int i = 0; i < mMaskBufArray.length; ++i ) {
+                mMaskBufArray[i]  = ByteBuffer.allocateDirect(mVideoMaskSize);
+            }
+        }
+        Log.d(TAG,"init width = " +width +" height = " + height);
+    }
+
+    public void initSnapshot(int width, int height) {
+        String dlcPath = SD_ROOT_PATH + File.separator + SNAPSHOT_DLC;
+        File dlc = new File(dlcPath);
+        if (!dlc.exists()) {
+            mDPStillInit = false;
+            return;
+        }
+        mSnapshotWidth = width;
+        mSnapshotHeight = height;
+        new InitializeDpSnapShot().execute();
+        Log.d(TAG,"initSnapshot width = " +width +" height = " + height);
+    }
+
+    public boolean initPreview(int width, int height) {
+        String dlcPath = SD_ROOT_PATH + File.separator + VIDEO_DLC;
+        File dlc = new File(dlcPath);
+        if (!dlc.exists()) {
+            return false;
+        }
+        return initVideoDeepPortrait(width, height);
+    }
+
+    public boolean getDPInitialized() {
+        return mDPInitialized;
+    }
+
+    public boolean getDPStillInit() {
+        return mDPStillInit;
+    }
+
+    @Override
+    public void deinit() {
+        mDPInitialized = false;
+        mDPStillInit = false;
+    }
+
+    @Override
+    //inputimage is DPimage, imageNum > 0 preview ; imageNum = 0 snapshot
+    public void addImage(ByteBuffer bY, ByteBuffer bVU, int imageNum, Object inputImage) {
+        DPImage dpImage = (DPImage)inputImage;
+        Image image = dpImage.mImage;
+        Image.Plane[] planes = image.getPlanes();
+        ByteBuffer bufferY = planes[0].getBuffer();
+        ByteBuffer bufferC = planes[2].getBuffer();
+        if (imageNum > 0) {
+            mSeqNo++;
+            ByteBuffer mask = mMaskBufArray[mSeqNo % mMaskBufArray.length];
+            dpImage.mMask = mask;
+            dpImage.mSeqNumber = mSeqNo;
+            int displayOrientation = mModule.getDisplayOrientation() == -1?
+                    0:mModule.getDisplayOrientation();
+            int sensorOrientation = mModule.getSensorOrientation();
+            int adjustedRotation = ( sensorOrientation - displayOrientation + 360 ) % 360;
+            dpImage.mOrientation = adjustedRotation;
+            runDpVideoWarpMask( bufferY, bufferC, planes[0].getRowStride(),
+                    planes[2].getRowStride(),adjustedRotation,mask,getMaskWidth());
+        } else {
+            int[] maskSize = new int[2];
+            boolean success = false;
+            if (mDPStillInit) {
+                success = getSnapshotMaskBufferSize(mSnapshotWidth,mSnapshotHeight,maskSize);
+            }
+            int maskWidth = maskSize[0];
+            int maskHeight = maskSize[1];
+            int size = maskWidth * maskHeight;
+            if (!success || size == 0) {
+                Log.d(TAG,"failed to get SnapshotMaskBufferSize success = "
+                        + success +" size = " + size);
+                return;
+            }
+            ByteBuffer mask = ByteBuffer.allocateDirect(maskWidth * maskHeight);
+            dpImage.mMask = mask;
+            dpImage.mMaskWidth = maskWidth;
+            dpImage.mMaskHeight = maskHeight;
+            int displayOrientation = mModule.getDisplayOrientation() == -1?
+                    0:mModule.getDisplayOrientation();
+            int sensorOrientation = mModule.getSensorOrientation();
+            int adjustedRotation = ( sensorOrientation - displayOrientation + 360 ) % 360;
+            dpImage.mOrientation = adjustedRotation;
+            runDpSnapshotWarpMask(bufferY,bufferC,
+                    planes[0].getRowStride(), planes[2].getRowStride(),
+                    mask,maskWidth,adjustedRotation);
+        }
+    }
+
+    @Override
+    public ResultImage processImage() {
+        return null;
+    }
+
+    public boolean renderDeepportraitImage(DPImage dpImage,ByteBuffer dstY, ByteBuffer dstVU,
+                                        int effect, float intensity) {
+        boolean ret;
+        Image image = dpImage.mImage;
+        Image.Plane[] planes = image.getPlanes();
+        ByteBuffer bufferY = planes[0].getBuffer();
+        ByteBuffer bufferC = planes[2].getBuffer();
+        int width = image.getWidth();
+        int height = image.getHeight();
+        int strideY = planes[0].getRowStride();
+        int strideVU = planes[2].getRowStride();
+        if (dpImage.mMask == null) {
+            return false;
+        }
+        ret = initDpEffect(bufferY,bufferC,width,height,strideY,strideVU,
+                dpImage.mMask,dpImage.mMaskWidth,dpImage.mMaskHeight,dpImage.mMaskWidth);
+        Log.d(TAG,"initDpEffect success = " + ret);
+        if (ret) {
+            ret = renderDpEffect(dstY,dstVU,width,height,strideY,strideVU,effect,intensity,
+                    dpImage.mOrientation);
+            Log.d(TAG,"renderDpEffect  success = " + ret);
+        }
+        return ret;
+    }
+
+    public static Bitmap DpMaskToImage(ByteBuffer maskBuffer, int width, int height) {
+        byte[] maskArray = new byte[width * height];
+        maskBuffer.get(maskArray);
+        int[] rgbArray = new int[maskArray.length];
+        for (int i = 0; i < maskArray.length; i++) {
+            int alpha  = (int) maskArray[i];
+            rgbArray[i] = Color.rgb(alpha,alpha,alpha);
+        }
+        Bitmap maskImage = Bitmap.createBitmap(rgbArray,width,height, Bitmap.Config.ARGB_8888);
+        return maskImage;
+    }
+
+    @Override
+    public boolean isSupported() {
+        return mIsSupported;
+    }
+
+    public static boolean isSupportedStatic(){return mIsSupported;}
+
+    @Override
+    public boolean isFrameListener() {
+        return false;
+    }
+
+    @Override
+    public boolean isManualMode() {
+        return false;
+    }
+
+    @Override
+    public void manualCapture(CaptureRequest.Builder builder, CameraCaptureSession captureSession,
+                              CameraCaptureSession.CaptureCallback callback,
+                              Handler handler) throws CameraAccessException {
+
+    }
+
+    private class InitializeDpSnapShot extends AsyncTask<Void, Void, Void>
+    {
+
+        @Override
+        protected void onPreExecute()
+        {
+            super.onPreExecute();
+        }
+
+        @Override
+        protected void onPostExecute(Void params)
+        {
+            super.onPostExecute(params);
+        }
+
+        @Override
+        protected Void doInBackground(Void... params)
+        {
+
+            if ( !mDPStillInit ) {
+                mDPStillInit = initSnapshotDeepPortrait(mSnapshotWidth, mSnapshotHeight);
+            }
+            return null;
+        }
+    }
+
+    public int getDpMaskWidth() {
+        return getMaskWidth();
+    }
+
+    public int getDpMaskHieght() {
+        return getMaskHeight();
+    }
+
+
+    private native boolean initVideoDeepPortrait(int width, int height);
+    private native boolean initSnapshotDeepPortrait(int width, int height);
+    private native boolean runDpVideoWarpMask(ByteBuffer yData, ByteBuffer vuData, int yStride,
+                                              int vuStride, int orientation,
+                                              ByteBuffer mask, int maskStride);
+    private native boolean runDpSnapshotWarpMask(ByteBuffer yData, ByteBuffer vuData, int yStride,
+                                                 int vuStride, ByteBuffer mask, int maskStride,
+                                                 int orientation);
+    private native boolean getSnapshotMaskBufferSize(int width, int height, int[] maskSize);
+    private native int getMaskBufferSize( );
+    private native int getMaskWidth( );
+    private native int getMaskHeight( );
+    private native boolean initDpEffect(ByteBuffer yData, ByteBuffer vuData, int width, int height,
+                                        int yStride, int vuStride, ByteBuffer mask, int maskWidth,
+                                        int maskHeight,int maskStride);
+    private native boolean renderDpEffect(ByteBuffer dstYData, ByteBuffer dstVUData,int width,
+                                          int height, int yStride, int vuStride,int effect,
+                                          float intensity, int orientation);
+
+    static {
+        try {
+            System.loadLibrary("jni_deepportrait");
+            mIsSupported = true;
+        }catch(UnsatisfiedLinkError e) {
+            mIsSupported = false;
+            Log.d(TAG,"failed to load jni_deepportrait");
+        }
+    }
+}
diff --git a/src/com/android/camera/ui/OneUICameraControls.java b/src/com/android/camera/ui/OneUICameraControls.java
index 8d156e3fea3af70764d2d01bdc745ecad27ea45b..504cb2679d28397693ac2e07500b47bcf7c06ebe 100755
--- a/src/com/android/camera/ui/OneUICameraControls.java
+++ b/src/com/android/camera/ui/OneUICameraControls.java
@@ -58,6 +58,7 @@ public class OneUICameraControls extends RotatableLayout {
     private View mPreview;
     private View mSceneModeSwitcher;
     private View mFilterModeSwitcher;
+    private View mDeepportraitSwitcher;
     private View mMakeupSeekBar;
     private View mMakeupSeekBarLowText;
     private View mMakeupSeekBarHighText;
@@ -152,6 +153,7 @@ public class OneUICameraControls extends RotatableLayout {
         mMakeupSeekBarLayout = findViewById(R.id.makeup_seekbar_layout);
         ((SeekBar)mMakeupSeekBar).setMax(100);
         mFlashButton = findViewById(R.id.flash_button);
+        mDeepportraitSwitcher = findViewById(R.id.deepportrait_switcher);
         mMute = findViewById(R.id.mute_button);
         mPreview = findViewById(R.id.preview_thumb);
         mSceneModeSwitcher = findViewById(R.id.scene_mode_switcher);
@@ -229,8 +231,8 @@ public class OneUICameraControls extends RotatableLayout {
 
         mViews = new View[]{
                 mSceneModeSwitcher, mFilterModeSwitcher, mFrontBackSwitcher,
-                mTsMakeupSwitcher, mFlashButton, mShutter, mPreview, mVideoShutter,
-                mPauseButton, mCancelButton
+                mTsMakeupSwitcher,mDeepportraitSwitcher, mFlashButton, mShutter,
+                mPreview, mVideoShutter, mPauseButton, mCancelButton
         };
         mBottomLargeSize = getResources().getDimensionPixelSize(
                 R.dimen.one_ui_bottom_large);
@@ -309,7 +311,12 @@ public class OneUICameraControls extends RotatableLayout {
         } else {
             v.setY(mHeight - mBottom + (mBottom - h) / 2);
         }
-        float bW = mWidth / 5f;
+        float bW;
+        if (top) {
+            bW = mWidth / 6f;
+        } else {
+            bW = mWidth / 5f;
+        }
         v.setX(bW * idx + (bW - w) / 2);
     }
 
@@ -341,6 +348,7 @@ public class OneUICameraControls extends RotatableLayout {
             setLocation(mFrontBackSwitcher, true, 2);
             setLocation(mTsMakeupSwitcher, true, 3);
             setLocation(mFlashButton, true, 4);
+            setLocation(mDeepportraitSwitcher,true,5);
             if (mIntentMode == CaptureModule.INTENT_MODE_CAPTURE) {
                 setLocation(mShutter, false, 2);
                 setLocation(mCancelButton, false, 0.85f);