From cfdc874eb6e23e101bd32d10183e419894e405f2 Mon Sep 17 00:00:00 2001
From: junjiez <junjiez@codeaurora.org>
Date: Fri, 22 Sep 2017 13:44:33 +0800
Subject: [PATCH] SnapdragonCamera:DeepPortrait

Add deepportrait feature in SnapdragonCamera.

Change-Id: I5fd223cacfb2526efffbf3f13f7c33cafd8ac99d
---
 Android.mk                                    |   2 +
 res/drawable-hdpi/deep_portrait.png           | Bin 0 -> 2198 bytes
 res/drawable-hdpi/deep_portrait_black.png     | Bin 0 -> 3400 bytes
 res/drawable-hdpi/deep_portrait_on.png        | Bin 0 -> 2859 bytes
 res/drawable-mdpi/deep_portrait.png           | Bin 0 -> 1060 bytes
 res/drawable-mdpi/deep_portrait_black.png     | Bin 0 -> 2471 bytes
 res/drawable-mdpi/deep_portrait_on.png        | Bin 0 -> 2135 bytes
 res/layout/capture_module.xml                 |   9 +-
 res/layout/one_ui_layout.xml                  |  17 +
 res/values/camera2arrays.xml                  |  11 +
 res/values/qcomstrings.xml                    |   6 +-
 src/com/android/camera/CaptureModule.java     | 126 +++-
 src/com/android/camera/CaptureUI.java         | 118 ++-
 src/com/android/camera/SettingsManager.java   |  24 +-
 .../deepportrait/CamGLRenderObserver.java     |  40 +
 .../camera/deepportrait/CamGLRenderer.java    | 695 ++++++++++++++++++
 .../camera/deepportrait/CamRenderShader.java  | 105 +++
 .../camera/deepportrait/CamRenderTexture.java | 114 +++
 .../android/camera/deepportrait/DPImage.java  |  57 ++
 .../camera/deepportrait/GLCameraPreview.java  | 343 +++++++++
 .../camera/imageprocessor/FrameProcessor.java |  52 +-
 .../camera/imageprocessor/PostProcessor.java  | 115 ++-
 .../filter/DeepPortraitFilter.java            | 325 ++++++++
 .../camera/ui/OneUICameraControls.java        |  14 +-
 24 files changed, 2130 insertions(+), 43 deletions(-)
 mode change 100644 => 100755 Android.mk
 create mode 100755 res/drawable-hdpi/deep_portrait.png
 create mode 100755 res/drawable-hdpi/deep_portrait_black.png
 create mode 100755 res/drawable-hdpi/deep_portrait_on.png
 create mode 100755 res/drawable-mdpi/deep_portrait.png
 create mode 100755 res/drawable-mdpi/deep_portrait_black.png
 create mode 100755 res/drawable-mdpi/deep_portrait_on.png
 create mode 100755 src/com/android/camera/deepportrait/CamGLRenderObserver.java
 create mode 100755 src/com/android/camera/deepportrait/CamGLRenderer.java
 create mode 100755 src/com/android/camera/deepportrait/CamRenderShader.java
 create mode 100755 src/com/android/camera/deepportrait/CamRenderTexture.java
 create mode 100755 src/com/android/camera/deepportrait/DPImage.java
 create mode 100755 src/com/android/camera/deepportrait/GLCameraPreview.java
 mode change 100644 => 100755 src/com/android/camera/imageprocessor/PostProcessor.java
 create mode 100755 src/com/android/camera/imageprocessor/filter/DeepPortraitFilter.java

diff --git a/Android.mk b/Android.mk
old mode 100644
new mode 100755
index d6d543157..186f19672
--- a/Android.mk
+++ b/Android.mk
@@ -27,6 +27,7 @@ LOCAL_PRIVILEGED_MODULE := true
 
 #LOCAL_SDK_VERSION := current
 LOCAL_RENDERSCRIPT_TARGET_API := 23
+LOCAL_MULTILIB := 32
 
 #LOCAL_OVERRIDES_PACKAGES := Camera2
 
@@ -37,6 +38,7 @@ LOCAL_MULTILIB := 32
 # If this is an unbundled build (to install separately) then include
 # the libraries in the APK, otherwise just put them in /system/lib and
 # leave them out of the APK
+
 #ifneq (,$(TARGET_BUILD_APPS))
   LOCAL_JNI_SHARED_LIBRARIES := libjni_snapcammosaic libjni_snapcamtinyplanet libjni_imageutil
 #else
diff --git a/res/drawable-hdpi/deep_portrait.png b/res/drawable-hdpi/deep_portrait.png
new file mode 100755
index 0000000000000000000000000000000000000000..67853ef2cf34de7b565603ee5faf94ee4d0e62d6
GIT binary patch
literal 2198
zcmV;H2x<3;P)<h;3K|Lk000e1NJLTq003A3003181^@s6%?l&y00001b5ch_0Itp)
z=>Px-P)S5VRCodHoZE|4RUF4>yyQ4qnnkAB4Wgz{D+{btvI`VF1oo2p5ZM(WG^qZ7
z9<rB`5~6%43L;ZfC==3@fr65}b%D&1vb+*WUdqz2>GRb+!|}{szqK!G?YW%!!I!<y
z`mNvXd-hp-?X~wl%}u>fIyyRrk=8^o1&jluYrt;P;TjxZe>?cLxw-iS+W|>q08-n)
z3t$^K3G#|;wgdUNHZ4ZOwLwHPcnE9(p+!FBySd?rRlyWK9mF6Ow`YlB%xD;rDON}A
zi)T?#%?HKo-$yeAwt`jF@R=G#v0MA)(M(~3!N;j(SKFu@23JLLWj`5;dWoW*DNvZE
zWc$ETa4fMum#r`XXbP8CUS>-7Y+j>Y+aGdg<v2(W(D!w4AGpL<6n(}gePtuttTM~z
zyHDt;b|~1Dq3V6~c@hjMT9|CX6JRg!iE<iRG*+*%qlg8*O3|+aV?rg3uk#b1Y)n}Y
zDrS%Pp|n9jN2VofQ3gNh$S06*OO)H79w8@0P?Dc%Rfp~;a9)U5X+GpTiNVGxGt+GQ
zL>nb7b(VETF*|l4gqiKhVQK6fqWPz=Jw|al$wZ$fDh9AlvU{Qtfa7T64@Hf_zvihf
zS<YOv!=((>p2S#g_?wXAYIG^CibgOg`IZbf6i++1<wlQ%FISJbTOE_&>)c^aYrgmB
zL#lUpocVI~n7h?63BJl5w&@5zUEPVOM3PuXj9^x-hM8X#lVIKUamjv{B4WE0)5^6}
zN-+txd`CaIrLbbfv~o>xmAxsNV^XkuTg7J2K3`8Orj={HD0RJ~IVO%Z$33Smk19S|
zF|Axn<rb6R-;B(x`LSVEN@c~gaxE1!Cc(Rn%!V_$A8Y7?s*+g44W~aFx>Bl`1pAV{
zrc~znjN3fjcYE}O)iDY7C4Fb%tn(Y*;W6<Y3?6g0Iwrx#3478Vy6G-M7g}|<;ZQmL
zZRmTaig9l-u;diV@v*a444_z+ERLN=Q2$iY0{*lrLZ_lH2pTxWnPMcd*f>SGwxpPs
zZi+QTOeeDup6s@<MsUc}r?fhWp{Fk`QLfXh4n=jb2);N&LZ4*l6ROMl436}eY;cK1
zaOVReR>E0|w(RrhvV56Ya?TSoUSGyD-6gO@W1e;C8Z<e@0={#Cb){E0pD0H#<KA8}
zvKG{}9<1q6C6dI_qgO83k>u>s7jBP2aFWdjMpl|SMY~N?AD39VV_}W4AJ*H1`W%zs
zwVovJ_2@&Y@9{YE<?1o-0X0XBo=LODy`$*cfgaimeost3!5g3*D3(i(q3D~>sFC;@
zS5<ZGb3z{j_@7f3!}W1Z=-9RXVh$0C^L}kGV?X$_X~fFvGi#1U`fgQhf>#pyfIEDb
zCMy|T1{rpSWay*Zlw&lBN-$HO6J$OQSogzJoSs$>Eh9Y+mD9W=M^D${>Zk-OEecr2
zW|1ewBCi2>TzX$ESd`@E^V2u<oMCpl&N8&W9l8p9zGKAO89JWJU-TomCh)GB{rcS-
z>l*@mwtW5=z<(weRT8|_$Y?T0+ZBdBmn!i!$uQbx=z^+x3&ARCe!$9nzN{}hU>X&=
z;;0${tSh_Y=&vuEl6(O9a*1;A-3%z3M$gz&*fLkuN&7*zkAd8muChsN<-?NqdZzjc
zqEht?tMVG%lDz3)yQIe6+O71>`zps^&51vQxGXDI7v&|G{#k6+COH&vU(BRZf|C^W
zj<D}J1ijypS9}*Midp&l$(0k#5u`gEzO#JpF&Lk|wTJmh?7UxG-|WvI*=`J!zL!?I
z6^*ZrhQIc11{F0_egxOlWySUlAvJEgb}bt#R+obqJ3SVdVytoY_X5QpGHk3^hFN)4
zj}XjHSz~OQk$#pn<a+b{^$j(j^_O8q#`PK(#ddushGJe*tSGRRW-Y+n2JQ#LfJ?Tx
z6h^ycqgWcFb$jS@u$>d0p8kw3QQl3U5Is62GH(F7{1vVSS${E?LBJmRXBJ{!O6OlQ
z8vIAUAHdfMzG5rV53&s^r3t1m-R`*=+*$(_TU7KANTIvT<nkRs%RFzPFZGKWY$2CZ
zMGGlxM<HXu<6tHD1B8^hMH3^5xgit5mugdus&#D9yfWJnWGZ+UoB%Q88=PhxM!MxK
zTOc1XL^Z4SsoesXZNTXyunEMJ-8e5Zw45$eK7f416melK=x3uGC;Bssh2S3$OHShW
zH4ySok;WuG|BaP2e#MFY@Tu5&48s=i5r`rBf3wv<YrnyWtv_t!JBguKdHz$4?wLg#
zwbaM%{G7@@2AUbH2AZ#ym0xh6X;9~sXe(!CwN)<EbvZ$A0-aMwO(WG-R`hh{18Fl3
z7r}30@JNBEy2RZCG}X~o?_nHf+iL~VN0ZzXa0}2>Q`-wcUU`dJAN3j$@M(}$G{$WJ
z&w<Ha=Y6DuYDsERZF$D=g=UF3(}U4Rflt)uc`)8|*bkJdF{w2xFK$k7<hv<JmyS=t
zmHmLbTqUUHq;@u@EK7Ba$u!gKj8cD9cuuN=25406!1s5JVygQ)cQme$#*i=3X;rGB
zMH~QeVabZKNTTOBU=8Mfq1S5IfWUyb0OG_Y;&PnG>b&ls&AzoZ{0dVLE$<Bo3?y8<
zYLFo=AMDb@_IlS>LAxF`Dl#wZ^4}kK#7V*pn^akUx^7ghz*=iaKP&b})^gSaZ`Q0!
zoVIF+(5jGi@>-QMZX<QKxDlt5G*(RXF!B`N#KWFGjjA5$DSV;`Zu4}y-J>5+yBTQH
z5D{&;&>gp7G-E{YsD>%G`a(yEVCQ0S_4)L>h%F*`Kq4|QKyXBI<yy0d&5>g?vir^d
Y176RGa7+!aegFUf07*qoM6N<$f}+qR+yDRo

literal 0
HcmV?d00001

diff --git a/res/drawable-hdpi/deep_portrait_black.png b/res/drawable-hdpi/deep_portrait_black.png
new file mode 100755
index 0000000000000000000000000000000000000000..c47a92e3322b3c9af2ee98a202ce952641fbb955
GIT binary patch
literal 3400
zcmV-O4Y%@%P)<h;3K|Lk000e1NJLTq002t>002q|1^@s6^Af2~00001b5ch_0Itp)
z=>Px?0!c(cRCodHoKI|2#~sJ_y<Pve3r=nR8L%;i7VLx=Qni3uRj3k5)l*t}3B5Fx
zdZ6OaDuEkNsZyn?Qq@aUscL%Yp;1zLh*VDGk_ws>(m*i8fQvD946%a&<Nw}${d}L_
znzysh_Ih^~jFX=<e(%kj`TqXQ%x``(^VUvm2~bm0Q~BWFU`;ZaEK8+Qm7eF7JB~B%
zy6$K^9-pYIs~hg_?jC>AqB>7N02iK{NNZ#9_?npKt;HyHJXdjF$~B!Pxu1+Vj;<pZ
zagh7|STcE==YG-+Qi}B0DDl`h#bR-IRhO5S*G^4M?eQF^jUKd}yzO2pwUKMIES@J3
zr)kw&TsxdZ;<Dp;mr6=XlH=p!_sAP2B}vPI+;!{LRhE{Pw$g)+Ij-ABN|L++lIr&f
z58O84V=496r<*D&D%PXaFHr6xX_S;A6=JA#UEz$8E(%|Kift(kjbc%vtgNhga%$>V
z45aVEq?uGfvgId*qoMmHQgbquYNMB3WRiHmb5VR*w4Z=mfutd^YATuh34UoG_g^NJ
zL|{&l*Nfxsi=|TXKNHM?a_Gu0p{|^LS<H27asKK#N@fGA0a_?q3qSd_-;*>6ST|U)
zj8&dUB(~7n2NAlSWGeWOyb;npjB^fSU1B&%(P5tTPGLeh2B_v*!(_05>rS5K?bVaY
zNL8dXsUhl_C=SB+)b#YU3_qHFxzw-Qop}81Ieb&fjmKXne)>Umb+rtcGcnAz<DY&?
z{Wrjl_I>aLP_yjPC;ZeF4DkxeOhg!K1Z9t)g^Z-$uvq`49;%^PFC6ekB6v)QzvQ5t
zRWgT1;oz?O5~JnY0JR%neOTsog3G^<FP}1PywBxP!%#8R1U!0O$G3GO&l;L(iZiSq
zs361ggLI~itL@k%j)3wlI}PC`P@tO@e~bJcQlUYl>i76~;Br4fnkDg<9AsGS3|5U*
zUteEAi#8*qT02a`^nn|D_wGGUW}${k<0geCTrw2JhpAC=NS<)!lDcOJ(vA-~pXEp5
z@zeBz9~DDb;WC+=&f<w2lE)@bt33;Uv=!mB^vEDu{~A;JzcVr|JqTC$5pWgAW7Wx9
zH5T!Z7R2R-;3@poP)$vZoVs=Lalr@J3_>fA$8-k;@waL$BvBA)I#osE<Bg4tMcI!E
zaD^>CVUiq@#~Rr-mK>^y`H<l-dj9-*QyD8t2v_)kDe@1K<gn`CO~t~@^0^ltE+m6A
zJeZt{O2RjXq2#ctpH(n~i!zELZgXgoWtD_4J|;!UW0uMJ*{-otsk?AChg%g(ld6`M
z7A=0QlW>I(aCl)YK2z*uNFHls*I2Qb_L9tHjCMA3AYs*iq%9EOY6doUP$Y-su|`&n
z#rDb_B-4IWCVAe<(b3U0nVEK;d|dD&&?W<UB3|W}Wz|>=hZ~47W{QI2*uV~Eo0)Ip
zS-8TFz#0qWiFlP?mQ`aR_+G@wZe`DFB0zjTKN07z<>P`MQEDaf?1{+FJIk7}jvqfR
ztlgv)5z=T{x3z#a;e~644Jso!b_a6ldC8EY<0uxhU8KpZMQuEO#w{s1R19f_%R>50
z7S9<uy4J5>uZhKs{qhpUR?l*WhK6d9Kqt%E0X3=5^}H))rj6$Sw=V)qa@5_udslK<
z(bEbD!Xz6S8V0#)Cd>*1nU%ULD=R<boplnf@FRdF$0R-eGp<%#fGuOiu3x{d-OvjN
zX0FlvQ=I+#_ur3DsNk#sSJ>uh&O^!L%T+L!831d>5*#hu!NoL|hQo?er%pYiGPWte
z)gHVVls*jPvBuJ_u{`fTNM=UOr5unp@)5@l-sx1wR4QL(gj4V1g3rCVHhjPbB$CG(
zSzX|=jXRa{ycdyf9Vt8mX<b-HkQm3ue8N{W3S6^%8+o~sTUAw6$JWe?fao;oYg7#P
zT#s)4zeGI#2VyPx(j`eQ)A*ZA<DVK+xxRW)NO6a4;QdHm=%TtzIKm5Xg`fUlMAuIx
z2YjrK9IIdmm!j*Rrc0eRno@k^yQ=5~x`un5Q8cGV*!VbAlonow9^x-K$d3wEb7Bxx
zD1%8<fyL%=+H#IMXGsHd)U6@EU}J$HoG23Cvw<8@@AJ;HZmiX-S9c)jd-#l~AXG+@
zl7;dm7WFg(W*Z&8;x9S!w#s9!n84ZZz;~FwL^}b$M3$)a<6=ZyAsq1aF<aPICB+vY
zTQhEkOog^^23Y7}ni)^Kl-l14N6-2WVKaI1Glft|fD@k0#piaw*W7?Cgx74{Vi`*e
zrcvk?*RZh?44-fE=}=w1+svw``oaM_+=G(v>-G8arx;XBV@)t4=@dHr(T04?nxyA_
ztEZ=@IUj{oPxXaE3i~Ya)5H?ymk*0M>lD*i<19DMz*%dyZ~$7u7cSo+SlmgzBKpNp
zz3QnxmBK?=_@e>8q8#QfM#y~1I{M`V!QuNj;Rl8zmQ(KG-AR?n1w6ny>DSA2>cV;e
z)_Va~xVPm3y>!aBiNtFt@@|B&^qHf!Zq_30LJB%6r}Cui&m$e@U1rDEimi3nK^BEq
zX(I8)<m9Brp*9A;8e$>?#e+yZlpTE=l+)~I8unv`-`ji^`M0sLth176usm~%>9c&1
zjvhUF3uQiH=i$;EGUmD@i7p4O@Q)lhqM7jNCj!M67$(1s7>Yf?7t3aEF_m^yj`}?&
zhH!;%Rw%}E(R)@Pw1&I6-=tR_F_WhH@NB7kljbWY+@iY^3G`Tu#lF)j&i6JM7SJ~l
zBZm(kHg{HyVl$6Hxe=Z+zNHYZ@FOc(&RnSbFN$4jYins&SJzr%p;{DZ@?3WlA7^Z%
z+ib&U=|leT2HkBU?*Dqpq<+_v6EUm)6P$Jmhd%|l3(jQ}xx&v@t~jaG4Th$^j2_y&
zd2@e9M@RPO1UYfbE>uYOReYqCzs}{9syjP7S1@ZekZO5uB6!?{a@)99%bO?F_fYWG
zLgSBmvlNuyL`K+H&%i@R_^W7`Rori+u^MTtA>`?!tPV5Odo@)mY1=!mz;!XkQd#YA
z%1D*$RIW#$U9@Hs#%V;zR(`9KUL>OzH1oqs|JL}E<ejGccZ~db9|3&_UaROyTPPS_
zM5`fIg1I$qe9Cz|Pcb2MQN9CSH!)^6z3x1}?dhewRu(dve5T!nH^1lGNZ0-$znl3p
z0$e4T+b`j*l7;Sj5&G3!qj!|?N##`!-o2!Sc+B-)a$l7m(norw@e93{@-y9Lfd2@C
zPLlT4he>L&xrCW7UxHpn$d|xvLb#b8n7fpY>Z!hPNclFH<d+`Or&Ib#PsE6tv1g^5
zQ&^_-XBqp?5efd7?))4z<eV2fd#ZciWEgw`qif)NNY6OOFwich{71&j#&a&D1#|^&
zpcOap0@%+p^tOPl6QxGdVqA;(DcIE{aS`Rz+b*)=_((5I3Xl8nyE-2BG3@mT(SDK~
z_MC?0ofyiA>Ztq<Ni$tH=^DP`f+B2M@PoMPzQ1F~jtl+${W6s5<U^`A(ca#EXJBAp
z6duCT&t0VOeUgs8<kXeYj$8(wOMd0+hYr$tSiTOr#l3u521NmsVcmnA|Gi2{Ia0GX
z$%kq~FYF*k5VF%ZisxP9-2g3rosGJiHm7cI=Ou+M9M?C}b6Y4_PYkvN#h<0ueT8S8
z1kFxR+^eIQ`l}(X@oTf+FslB_{k(f)I*;2!FBl|cukL63l=i)^vYYTbjQTwmuv_!p
z^+cG*LXuB%N?yq=`AO#fecIPb??KrC;bh!Ma+w)65cvIz!qcR5<@EJF@BRV9msJh~
z4Nb*Y@)2eWk+q~q@<~p~TN}*mFH0ZvydR<KwCoV*X^t#ku{NCVAu2}wI&_$KKcm#w
z#mq1L^#9R>^$qET&YBt1og#hJuWk=frY)^%*lG=A?j)JN>e4~LMS9d(-hE6OA}t5f
z>66YF&q_b;!v`zUPj-+!WS2*D;J|@6WBKP;*9`U)$H-alqjXN&zmHI#nmhfcfPK=F
zWX5g1vWx69s|_(#D}mXcD4gzs>8lF=dbZ#)zLB2jZ*~?>c9DIgx<3p#TG~TAur<OK
zcjAuoW<>t7%L?>VjF>^Pi|nIJ-&n*VEyN;gGD-!3B`3VyM*gyV^!1$iHuZLfg4o5k
z51E=yv6I)#Wwq`@G{AiO8J1Nrr?l)T(02eu!yRXJV4qk*&g-p{ClwBu28m-R{fDAg
zmc5k%1*I>ykDU2TzU)#fJNf5vV-pj9r+e)V>ZC84N)`T_Q)>GCa=gk=a!r{AbDXmk
zWo6$_FjgBdR8&~63QAgpe!dC&$WHnwOom$O%ateHSo$opOkGYr$Db9Sv_wmozi%ha
zhWd_bn4zfspD!rN0<sTwa(3<7Rl*)nHTySy7!g(W^Pb20ahh$OhZ>8{i4!Nj@L3D=
eh-1f&dH)B(FyX#cks>7k0000<MNUMnLSTZ+{jcr-

literal 0
HcmV?d00001

diff --git a/res/drawable-hdpi/deep_portrait_on.png b/res/drawable-hdpi/deep_portrait_on.png
new file mode 100755
index 0000000000000000000000000000000000000000..18cf7774aa31711f2dcf73537e68fbfb90bc64ef
GIT binary patch
literal 2859
zcmV+`3)J+9P)<h;3K|Lk000e1NJLTq003A3003181^@s6%?l&y00001b5ch_0Itp)
z=>Px<<w-<ARCodHU2BY9MHznQ+e^!(wpoo(DI!5&)t1thL|SexWp|~<Mu<ky7!|K1
z$|drPpg;Vfe?(Cd3>r*`_ozkNNb>D&T`tl}XjzbejaVyIC>UA`$gS<}_&j^M+i&;#
z-gnMz&X#50<n(+y@BKM*=FFLyGj(z_(i483{6m&XO;(DR#o}ZU&WjK#57K^=?ME5U
zdvG3+Q{_=P-MyBukDh<sQ4`9r@0Nq|NyKw0j->#tBU5Y++9HqPxC@~FRC?WCGwDYX
zo>_pscQOP}$K-0{x2ohEx8n}@zFZ<d5$Al#wo{v%Ie-WK`2hCj2*6HHe!H~Eb#jTj
zZ#tujziU-Enc{=Bw(h?LaG7w~wRVcD!+)%jQMq^ACw*5;)jHhfkClD6WxZTpah{1g
zCmW;-P0~5;<*NKwSA)H81GpPz3|u103p*YIVyTF#tHA^Q+eq-Mne$4zV^2Zv1@sPH
z9M|`<0KXUD_Y&Bg=(?ph?c^rubJwR=$S+w9zytm=0I?xun9l&b-$8a09!`(QT1Rko
z8t`ue^K2X+gpUv5e4;Mgz;UR}eeTYNk|;mp1H8?5$X;Qe^fWUACr^X_4fK9*mmYV>
zl+~ESzFiK;mAF7(2iPZRVu#Cj%A4g~Xph-{jz;m@;JtDcq7Q*PAjiux)TrBXgG-2b
zVfx(<<rvs<G7v!RG+cPb7M0@GVDD=(D7#UccpJAShc~ZYS&x^2(bfVvOZiY8l#TGc
zPPM?hrdLje?=e-TAR}%K9+C?K;O>$)$rYu8OHg>h#%;?k6(e4F#u$;}0z3xrEb?-L
zoa<h&L<;d}n?5WG#c1iK;Kc=4jN4OWr}W9MQp8wHB=ND+FrqghhEiHqJb(xNLIkly
z4a5Q5Dh6TbgT8cHB1x>eL;+4bfCXJ3i)v{7sv<9&1Fq~?1IiXVCcy8u980I{F!^ON
zj~VZWTNdqDEzJNg9>8ASsrq*Eyj<w^sQj`S^hKW<S+Qz5Da+#lEHclCfp2Y9ljYNj
zwNRY#0QQ(q&}k-S7<8X@ta=tHI2H&g)D0150PN8_t(q*KR;&f%j0Z3~M;bXi;;JUg
zrxk0#IO72<@*g8My?<;d6=YdHtyl}WjR&xkf2xsrjCT$B6I6cL4Emx^joOUy3QZ?v
zc|3q65cH^=nKH)WVq?;68W}SxgQ*y1#sj!581%*A@Jf>K(i5<xkGKUCL+p3}cM4~2
z52yh+S*!BOW;iTA(dhshk5_bdF-|;yUzM*RG`+XrWsM~BX~i-KY14<7)rMH7B(WMI
z`B`xQ9`^H*)E9zb)g=C6y)-@;D`Sp8aR45b^8xU2X1q=w!g8j)rc4>k#K(wfim@J)
z^GunX%s2p#rJ6b)%o%fbweSQeZ7$AkIIIx|;E4qymj84~TNcvuIQ1H$G%<&4wBW}9
zxLvll@O7lf$xE1S+;XHK@~_2mKVIBlGQEUiHkEZ|9DsY=zv0pvP=8+D<Mb|^9&bJl
zeR(Xs(#T;zK8abA$Xy->V20!5_i9v}E+6#KrfHh|4tdU4SZ(aaCY_QB;sIQfKbSF&
z$@@)ynaok?H_e4&HI;3qKBQ>tg}Gdlwom?Rbk|-)EoWQ310%<`pv(4n*k}d`*4pIL
zgfkHX7kYCwW-~eYAJz<a#ag2luLg5botNR}j3!GKfd4)mUjh)zCriuvL7QH_IPOCO
zm%hB*9SdBVj|VWVx5+PgN<lFeM5nWvKE@PjG>gOq_!RjKT<&RhJFUdyQj|~aDNOE>
zB*qk?`9$IZ%nt)=@)@+X_8eSIX&>|DlxR6Dh_#3@F}BDbFUAF!3HG`=Qu(HpE85`f
zp8=Q#!3^1COpGlu#Rr%P^t&5Ti~pwP;*3Qqs#1_~x+=j~7*hpQ4Zukve^>MIvO?wM
zWjNF36M4;M)vFR<YkcA?jK{tsYw)T(ZFzh~pwmM-t6WZX04FaMdh`cN{9YntD-??!
zWoAyiH@WGd)ctX2ho42wYCRC|FfZ1Qshx}6Bhytg{ryM3Tn@=r_-Q>&&rvlmC!h_s
zN7phr)rG68!AU}!)KCb>A*o}Y?sLM|?5so`>e7Y=Zenv|Rp8QHbpR*cZcTDj{$<Vb
z@*{j+b(12Y7gLA2v|&+Aeqgc7=2Z(Yw-j)JQMM$*x8_JRtV7=wZ);F3z(lP>t_R4W
zbqM^vBO7ts8H?zXzAL8c0q$`-a2sD`=IJtSCzBYn#~$dDzAL8c0cL{z?he%AyNz7)
z^lo$=`l8PnpqT+UN!0QkYC>8;W$9SHTG=YCY{nd;nXuZ~=Q3yL(f82E_N8f+Qa&>P
zC;eP~NvTgUhTNR~)6GSbCEdCD)|2Y_f7lO({C1l)ll^*DpysZ@T-|yI%xe}pTvC&5
zDo<ljNB?Gi9_sgIY>c!CZT}X`z>QS3Z;$?9&=N^gKI~7#!c~rtaVHYaxA8awN4h&^
zzPw#7a=Rg)p6>A{W0mB;QY!FLaQKw3<vbq7I?xgHGM>TjQmHt=+?QcM&H;3*Q9F}E
zum?k4;v$efm+#BV0slB+D6-ySH){CS@na0<KHL=r_p!(aaPrN=dfbxz2xcvkvOD=%
zz^=(BFw^k!EGm<-gZ|@)>1UIF7yXfJ$g~-ir;a=~;w|vmtR}MoT=(yVgRkKD2#(II
z&Vup|8+`VnS;@NtNwUMr!?@tqSa2{5MSx4aevRW6^r;_EWSWZ80X*oJA!rVy5a2lE
z<VigCF38;WPLGCd@oU&s&||(h7CCtUHTv_o(;f}Eupc_yig3VRgWK-GXy9_!)SBFi
zIpCFr!DY65IMNnvIt*UMAXYSDQukL&V7o@W26IXML9A==@_`63a&NS)uzMH4*dS}%
zz6d@<w)ieAjN&$q>yXE4oYU>8mL(XD&~?a%(F1w4m0D|nlO3D54I=xi#fZ1QM>oi!
zJH(@g^=NC|lf{hY)ZXk*LCsr*c(|-;IpR4R=i*HXAAr5MkS%%~tpTq4+mXVRnbUCc
z90LCHc<to56W?L<xc!;!RH-oR7opF-5;32P*jFDuza>WqKgPrFv(xpO0Uq?%;wpc4
zx?sz{lQ!-i1Xjo5r#iYZe`zWAdMh0AXJJcRc0sx9^&;e#?v2;UwdlituQA=J04Lv+
zam;i<qfm?ATzblja2pI-V*o8wUuQ+hw|rcsc^x88gG+ZKCes1_1q^*G0Kt9We+sw5
zu7KR^V%PntNazk67nq4TtSIesS4_wp0eHZl2XOy1!DxBrHi#$5WtDzk(K3)3Dtyd$
z%2RSDa9U?Z;P9PbpSzzjjs4(j0!k0!zf!pT4GS*8;m0(@z@ZOp(<GKj4Zhbe9v_L)
z8}pa(5L<zVn7)9|b~wAZL;egr1V@n6u`Lth!=EF9*zGE*!LOj*)xqT#LpRTX+W;ji
znv*HLg$6D^OK8XI`bSwd(L`K2MWFEyE;Z^%*==joo9(|l<U0UaUw$=~w9}Xe!2b*&
z<`%D;@UoD7j%>CAoM56a1tX@wwuY$`4(;jFH(q`_+p)@>pN$Qtsh05#WvS`F$(J#r
z_Gwe*jmtb9?X5*i39vr9OI43o#W~80KsO(9U3s<V^nwU<5#WVEu{nXf2rR6jt1&;X
z_MBb-0bo1P4-QDqvGXD@2XJ2GQtGv%i3kl`q%<`r)`S4C{XZ^ylU=@9RlNWJ002ov
JPDHLkV1icFe3$?L

literal 0
HcmV?d00001

diff --git a/res/drawable-mdpi/deep_portrait.png b/res/drawable-mdpi/deep_portrait.png
new file mode 100755
index 0000000000000000000000000000000000000000..bfe7cb8ac402190227f49ffefa7c336db77f856f
GIT binary patch
literal 1060
zcmV+<1l#+GP)<h;3K|Lk000e1NJLTq001li001ip1^@s61A%I}00001b5ch_0Itp)
z=>Px&-bqA3R9FesncZv7VHn3f<|P~2RuUSmED|M_Xi|$4QWR0noj4%vz!^DKYlr>?
z2h*f+XdEELq!u|)UXmP)=4H*>e7<(yPuFwr_j~U9w`Xsz`u4l-`~H5v*R|X4^-)?d
z4KY4G-ipmJumiM!Mxgi%-hwCKdZ|=;#c}R}M%%$P@C(F39C6#{q+<$Sh;>+H{lcs^
zO(R`Zn)%Jzdk5ut(KRbgQ+<C%2QQgB57vW4z7g^|H>Wjm!r1S*Gj^on?VxHO<0Y|z
z;s9}N+=}UHTBp)x@W)gfB43<pDlU-5hrBkY>cv}FgbepMC>c!;x2m6JTs3vv9z&9s
zJd>kn!&?I98^Jhb7-~Y#svbof-eBBD*s+ghoRm7vh#^}~Go76lZMe?ZjG4S>YPHrg
z>Ql5~d%o63-SY`nYd!N7Zl!3$dN1(Nz!kpA4^od#(_r{-Odd^ZX;|iaqZK9-LXF*o
z$?RaJXv2pB{hnZ)G7L4LXH}2nhWV%5gwxi*?NKmJ8HSo|&O1e$&cSpYu%Q>5rgVo1
zCYojtz7(x3>XXE$&~xBpjHEtEXh_lkX?;95%h5FXlyFY=WkoMM7pb~ETE!6@_Yy<<
zvecidFU+H0r{5NH>`gU7(`$4-?M&5+S9FI5&<sgvVwuTfX~>n=KulEAI@_`deDd4F
z@mZ>&xJEZjt~<}n2ZipK!r#c3$Eu10#Px9}rpa0_dP;bYL7#Bco#M0^=cVG<yNdP5
z(@f7iThs>Zmxpdsy81IiYv2d)A#GbGd8Q^Q7+&!&@@m0-;X8H5<U32AXW@AY>xrcc
z$4Bl10D^tsNYh-npc72>9A3be!!RD-P2o|KhpAor?zAB%X*{Kd*`X$IgbHg+l@BHl
zQ@gfCT6u(|lyUh-k32HQOIh0u_JN&1kK(NFu=~vC#RC=&x<zeLpK`okb9f3KgL~j1
zyKg_Am`VtY8o@a*07gM9zCu`?XRS3LPApVQHC4MY&#ycWsRYh|5s(`1ARZ{2<Rsz%
zaZ^J*s2BS?NToZA-3e|g+7B)RH4#0nd=+t>Mp}8K`~lrwUvWfy7G%X!CVRk9Q23*>
zqDaO-p3Yk}20>Pwt&qa#0O41_UFLRG5NBbann1N}sJD)Cc?RKA>n3wky5*HkYe-ZZ
zs0SMpPe*22fc?q;4dWFx)SDKmdI0Lhhk9($E}jJT=c23J3%^WD)f!A(4_A$adOV>k
ze509Es<*0B!Q5O8e*}fw)NpGsc_kRnr(vs33H`s?{}TVs;p7|tJou{?l{jAy)5yws
e&`e4TBYy$NnI7Y+N%B$v0000<MNUMnLSTZT>-9GP

literal 0
HcmV?d00001

diff --git a/res/drawable-mdpi/deep_portrait_black.png b/res/drawable-mdpi/deep_portrait_black.png
new file mode 100755
index 0000000000000000000000000000000000000000..d0d5f0c500748f06a7f10feb05ad91bbd5d92450
GIT binary patch
literal 2471
zcmV;Y30U@tP)<h;3K|Lk000e1NJLTq001}u001}$1^@s6sD?Wp00001b5ch_0Itp)
z=>Px;VM#<mRA>d=nn`RNRS<@!ml-ePfP-zanSc{Q1R)_&APXEoihu)1M2K(-VwDi$
zfCNY&A>hD)0|(&7fddkV9N7f|1;i$ZvOpq8NC*V5W8n~R2)1Y3Gu_SikLOL=%Q(|L
zgJV)s&0G4tdR4FLRn_ZhC-vWf1!)(|oC8oU^nL$A%5Jc|rKKg)(a{kM4-eD)??6m6
z8q#fTZS6xtLs#3jy*UWO)hw{svaB?v4Tj|fc@+LleZOs4?*u{cNq2Ymk-omZQTl@N
zm|0cWt*xyK;ON1WWqohi_AuyyJ{V4eMV{r@fZtNK{Sx!ngUz!_GMGsNS0dO4C1y`A
z)Kmfo%=Hj-ADVvvn&&k_NT&frWO~<NZ!f~!)zmYfF@2e19+WBmRmKyPC&3X=rf5u8
za~w9Qi%FNK!cgr)U|ok)PM{vp23-PEPa@nJ>Sn_v*dS-@K+17;I+pdF6^6aVV(}!J
zE&y4_ak@gwx`d5c&$Fu--wA4SY$RJ8+V-8PAo!E^>);T0MxX<8dI8h=2Q+3^MBsz;
zKMqIpK%FOPNy5v?7}VxAfilIdY|#6h!IKd>dCPG=<k=?BSOnFXgS6|q*CNc`2vZVn
zKib&}8)Cjz&<1Q^Q?^Bgq<)mXCwSIXE1`+wj7d%Guch6_pt}(021+B$NzTr5nC3S6
za<vdCkI`8?#ypwiQncBle$<wRz6Z?%G<($-7i2C;VGd_#Et3_*1s)83Mfon6#4&ix
z`vze?0{f{8nyFvxq|+BuPo#{b0_seh!{Txb21W?jIy1*iM}kt$WH%`9X&}qS&8Hy;
z6dRHY=(=-R&1er<!vfieOeu->4CB&bFP1R?LDMHEWt^4itGP@nU@BECW?3bgarm?q
z@9G7eU7>+A!s@&y3Ta6Nl!*$X^R?wguwIbE(PkOYIucH0Y=|-#ck;vq)t=ZFm!`&x
zMm>CuTQeykX)A~6DDy7?uH8zxZi{KxQKn%^;lYq72EMOz9o4FNcG|KY0f%5h`J@3C
zfpwcu&`t{CXD$w_;=E{kw(Ab6OhU9C0S6KApU9#a1k~McPNXkxRZgo1To{=+$~u40
zo>2j{F$+nCqbXD~j9f{-!behQAw63ToeLravO%G+gm!DBujXn}4LanAD`7;tZ6L6M
zK-Zu|J6$FcuxTUFvrT^|H<I;~9py2xilhQs_;Ll`==@RqjIt03nG_WNegva=r^R)u
zoj#NA#-bf%FwVq~omNn<q#M*n1KZw1Sx>On<@higb(GS8avJp~im^!r3^1EM7OiLt
z&leDl>q*m$K%-d4C}wKfd5*%bG?b^thU5bJ{t=j{>@>UHOfI?(4rIDX(kk(CF1P}d
zT#QguOYnEf_rIqW_1NV{r!Nv+K@3@L+?%K`Ew^3vd=WQ_dAyeB;~nT<^jFki=2R&q
z%>pjUoAIJ|Q(ps`q~WOJx^IO+pz~cTJ%)}!QVnW$4zt`(X|fp?E-#0JJ3P-*xQL@g
z@o|wK1h-%n+BCh!5EQvt^!bItZ`5ZrlhNWMIGXohNJ1KrCNgM3cQ@5*1&wui^`UuD
ztNI=D{FJ^QY9%yrTs<08y!!$zJ_SY@a4=70B5^LL()L`;u#%!-2?MlWrT-Q1cQ6su
zBcK*k-1-_ks74vmBmoyvj}y0~wexXQ(JW|yzFlB182juorv&v1C?Y-{FPbYh@vd`2
zQ&W?IbFI+IrYuK_#pMa2sSuPXG(;C|gn*;G*wxD>2`RvlT8j$4e;f7M&Y|nB!I^Ae
zu<@KjFs9J(s|cQHu$r5jd)V<$BkxD`KSMwF!)IV_I!)?owUMM@7vpngKB3|;j_Mws
z>3Lny2n~k$eEv9Udmm0tl(~`S77(y+q*UOqrb%gRCF2zL#@_FQVGi%S2S>G!z5-Ym
zG(tczFvOXA4KsU}x`~UphEV@HjWIqon;bn`&kY|%w71vEWVX?_cj_tedJW-tvnG!Y
zbGh7cT%ojR&Z0&IoX6?3pchQh=fh}om+yHE(ViG+lz^gTVOBE5Q|4${0a=YSTFuRq
zyacB-E-GU@ouV<;m-x340*ZzLbry45rqXajpe|;!+39W=KF(wq>CFaYNE(zR=-kS|
zC_dIDY@B?NwMrV-!op_G*Ag_a3Z~W&`_{qbO8QNJFUTA6k-<a_F{A=0ouu5h^}1g|
zwcw3!2Crb(cGBL>rtRZjtV1|5nZC?fSaV)li+~bFFOu5X?Ue|#5tJ~ii@smUJUD9%
z83!Zek)P0$8`nF&Ax3*~Mh6f|0t%G0_dvgg`t8_os_1zI=zj}-Eo4)^<(ULlGN6=^
z;qgJ5ScXt{b1nK921ZPrs-Y|}{&VKfuVGoO^CVjQvj)1QKUtxzKSy_<-A9<a8q}-(
zNjpqmRhZA@`&slCh?#$GPgm+|ib3!&^R{I&naiqDpZs7R>pjZ6-OL{VD?y)n)pf-i
zbNZS4G$faUlPq#7sr8}G(@ef^3bTu{CiBpxb_kO_!9qW=&^LI6&+wcwr`Tr0$3fkm
z?_YyiuE%UI<t^Pj_|WkX>aE%&0qA2WONHT^RG$NPfR!^kQNS)9J`2%%rw~qOsh?*9
z5bUMC*R}1Pd{}Tu?IZ$0qbfiXl2$v8Y#EMhxgUnt(O!xmI;--u61u{FS;+V3dl)<n
zD#nfj?T$1z(Ikek_w8ut8NAu1c0h}dn>hmwt=ZYxIR_p#!pmdu^fu;u3{>7*;#s`I
z%Jbl)cc>OSr4M=Zby8Ho;}VQ7m=>N1p{nhhc;R!gE_izgeh-5BTFMAHf@8dyxh7ok
z8f(Rh48nbB22lpI`x$sgwMAx2KRn(9uRGznVECuKi}P&mmL)r0!NbZfP(Q&xI#Tti
zU6?HrP39v5uaCg<5zu@Wr<cUngBhh+^xL;kDlIZ_oVV03)0i#`F=Nkk=>@L)23k7=
zBgT8`zY__Vm4G&B)~yjP^{Q@1%JX!i{(nF||1&%*wHq`HBh!u4+uXHl*IK*x?77rD
zpwuqN#cl?tj63wtHt-1_dLtLTdM)FQnz8haXD*-bk8mtkt01npH{;<T!bjYyUQ;SI
zR+Qp8&N`Pb|0>?5Oi@CeTgGzN|Fa-LSY#7Zl&GGq&|31MXRCC`Wm#LcY;m@4-<~}i
lg<Q32RrD*6vr)uU>R+uEIwYwndQ1QS002ovPDHLkV1jCKnQ#CA

literal 0
HcmV?d00001

diff --git a/res/drawable-mdpi/deep_portrait_on.png b/res/drawable-mdpi/deep_portrait_on.png
new file mode 100755
index 0000000000000000000000000000000000000000..87d24160820e13ebfe183ae047da4dec60b7e44d
GIT binary patch
literal 2135
zcmV-d2&ngoP)<h;3K|Lk000e1NJLTq002S&002M;1^@s6uAHIc00001b5ch_0Itp)
z=>Px-5lKWrRCod9TU%^ZMHJn0S{@QHiXuTQ6cj}vL3zIdX=sZgpJ-xzV*C+*{6Igf
zpW=t|p&w%WK_L}M&_ttP?(J;~1PVkzNg${^MUlu$9*P#|9oIgG3-_McbMJXfk;+a^
z&zUuQ_TIDZoS8W@Q$^QOGI}_9G>$Ybq9M57N(vX@FVSgSf8hEJ_zv8+(;RgW_iG{?
zK=;aeT|*b>S<2HhaCCo6XFo!}h5FKRnx%e;SvHn>_YtvC_owsJj9>Q^MARo%RHkw#
z+RVGuj~3Gmb<WIAk8}?a89fG}eTAzweTYRC6#Wch=i!yvRfIZu^qg2JHj>q|knF%!
zTMRpS)-()uIw<hr#ALyU+ZK`2BdCkMh4BFe2iCjtJICYrh0ppriu&VzC(_(`R+3Xx
zMH8uB9jK%jHNQHlVnsczJ<8H*=*c7~x`@2b@UDN39;1V1s-vRK4SFzj(o_&KANlif
z?<MFB0O6}i>xraPUdWFPxoZ)vnn#A^mIyXeHAaouYQH64hJzN?g3PI+f=AC$gIXcx
z#RT0Dp|zq`+K*pvxZz*XM)dY`6ZiJP=(85KX9%_rQUl$CR-ui&n1GwkvX!QZBoOTb
z{zzgHxH3FkvJJKs7ABs3I}kTS5IrHr;?a8;>i!b*;(%;}ZOJ10mUXSD8zMAKFgz$a
zgMQ&-15XODEw&jJ$#+;MLfQ=xiXJq4Y06QZ`qRLZ0&I(IhDGun)`^f#PXrHl(%A3O
zo(TTqsYo{2H^-1t(i72XnE>VKP|DcM;x?PM7)%RRBs~#T=*DEZB*Upt*`_^)$j)q)
zh-?5ey!1Ardy_RC#E`xcd6GLsMT6w1Q=m4Rwisf#N>4;@;sui~!>97U6b(2PAlvMs
zAcj<?(-TprE@6EC)rcZb4;xroAXwrnwZgtRhLn<?2o6Bg??xO7pUGSFEe4(x2yB@X
zH_3MbAL)t6>gn)1$?&Z)uRKGI2A&i+OLIWDv}`a53pE@=DoIZS(eq^@aJLGr$-W4q
z@j0GSTo5rzj6u-}v<F_6cOz_zZAzl(0SS(Sxga7~YBRm&T&xpZG0STXn%b_UOCM}8
zlxmepxgcVTXkXDL5l;hpSmcpJ(`E^dgSjArm+ei-3tyzBRy@RJvzNbh5YJXf+s#s4
zRv$sLy(?t<5GFbkF~-a9bx65lvR&VS9W!1buPxD4bc7n!-6dFpgo_rzZZm@KS_FO^
zL)!_gHYX>XSI0B@q;D%sLf|wj_>K*^AwtoYR?nJlr*+tcNGjX?vho%I+brihhEF#{
zRMQ#+wAbplkGMBsRWHf7AWotJ752%#9f%tug1LSZ$ly&JQ_fTJskW_b;H$~ScLoRS
zh6uLQsJ4NK*ZI+qE=t~+*~+pn_UTC65y1f|9PW^1vJ6K;<+65Pk&K3w+!9em--R9c
zX+<xJcofi9*5)gUO+m*k5q0V`T7}tMHP$AwdamHVJj7+VY*^R_`*J95iQvFAtrX+a
zw2E4FGqvl!Vtz5mI;_iOB(eFBbR%NPCptC<ynu2RpUgaekYx(C0k_Iom(h&oe!=z)
za}s0F2%4f&e`poQ;J!r!0X7#qDS7rR`yj<iRdic%GW6W+ngq7*o(Q(us6NEsW&CIe
ze@s`BJWg83vh1>LIX4B`Jw$LowyCeeS-VPN*ZHDS(WLp^NyN=!P<i@W#EU_jj_04V
zD%N!N+<io78ZSoa;fu7A*mP_*E7^;X?`G84pa<eJc`V-SvFHKD;kB5K`!K`5qGNc+
z2N}2oWb`q>1C6ZUtmP*7*nu8yC)PZ6;$T{3at%YJfV4bbsu51{7=Mu_BM?6M#)rJd
zqp=SuQL}f4>r3>?B^0fP{rQGUK=3f+HTnB%{G#!~5TD&vShC|~yI!;v6Rv-ZvSom1
zEuupYpmX#z9P#XMCY;?=#-=@sH+`ahC}s5o_~8|)_@exNXamZxMX%CA)6}U-M#>YB
z)AymdzXl^si7Q`-z@w!&3ixs%8R_1t--rK&k#ucJ0Vyl-8oiAJsz2G)Y&jXd5G`Oy
zY61vtN#QVg1HBXba40X+wbHAIfhTBuGB>#}A(|FpR=ZR(VzvmatKj@o6nb1LP!Usf
z31xSq@;pUu!#UhFG(lCKJCrlLP4}Zy^cYlT!}b)|9*uka{*8SooY-xkC-I|{?BJV3
zWc4yQSx_#nqP$K#^KRBQG`;WrYbZz<sTl7**{l1|VUUrhhy08DQ+?y$FxtOEDEbI}
z)eFS|mJ-2Ve0h4aSRwSB2cx{1l0l_&G#EYgXmuf^^WP+Q>)vz}BUqm55dd!(^N41j
zCwcS+`mDu;V6PI<uJ6TMU>7nZR+)egFf1*>uym+U_`1Hg>sk!+i$KUrh*Kh@6LXL;
zsLwvG$5kS-n$J5wDpal$3+nUG<K~3C>mSPLddxSzMxf>otmqrOBL0F=Kt#J9h1Re~
ztW_1}5B2I3F|Vh9IsFvKXc5Ehjh*y5^+V8PE}|Ee#KVx+(@EfZaUHlWl3cEP;oUKb
z;BmK_-jd4pNQ~<eOs>M=`*ruIP4oX@jtu7_y$Vgn=>n<X<Jv(%UCcF5t~)rW#`MQL
zpvB>7yi+`0|DWW5-%l(ss+6bk)p&PmF!oB)R_4@_q~DXVp&m#W+LLSGBzADXon?&^
zV?EnQjfb8z+1f(Su6Swm;Gi&kmS)_5scN){6&NAb-GFt!T4hCt{{^wk`sV>pXsiGL
N002ovPDHLkV1huW3Y!1`

literal 0
HcmV?d00001

diff --git a/res/layout/capture_module.xml b/res/layout/capture_module.xml
index b3ade9f55..a9c8f88e5 100755
--- a/res/layout/capture_module.xml
+++ b/res/layout/capture_module.xml
@@ -32,6 +32,7 @@
     <FrameLayout
         android:layout_width="match_parent"
         android:layout_height="match_parent"
+        android:id="@+id/mdp_preivew_frame"
         android:layout_gravity="center_vertical|center_horizontal">
         <com.android.camera.ui.AutoFitSurfaceView
             android:id="@+id/mdp_preview_content"
@@ -41,10 +42,16 @@
         <com.android.camera.ui.AutoFitSurfaceView
             android:layout_width="300dp"
             android:layout_height="300dp"
-	    android:id="@+id/mdp_preview_content_mono"
+	        android:id="@+id/mdp_preview_content_mono"
             android:visibility="gone"/>
     </FrameLayout>
 
+    <FrameLayout
+        android:id="@+id/camera_glpreview"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
+        android:layout_gravity="center_vertical|center_horizontal" />
+
     <View
         android:id="@+id/preview_cover"
         android:layout_width="match_parent"
diff --git a/res/layout/one_ui_layout.xml b/res/layout/one_ui_layout.xml
index 59e31b95d..61c55cd3a 100644
--- a/res/layout/one_ui_layout.xml
+++ b/res/layout/one_ui_layout.xml
@@ -132,6 +132,10 @@
         android:id="@+id/ts_makeup_switcher"
         style="@style/OneUIMenuButton" />
 
+    <com.android.camera.ui.RotateImageView
+        android:id="@+id/deepportrait_switcher"
+        style="@style/OneUIMenuButton" />
+
     <LinearLayout
         android:id="@+id/remaining_photos"
         android:layout_width="wrap_content"
@@ -164,6 +168,19 @@
         android:layout_width="20dp"
         android:src="@drawable/icon_x" />
 
+
+    <SeekBar
+        android:layout_width="320dp"
+        android:layout_height="40dp"
+        android:maxHeight="3dip"
+        android:minHeight="1dip"
+        android:visibility="gone"
+        android:layout_gravity="center_horizontal|bottom"
+        android:layout_marginBottom="90dp"
+        android:progressDrawable="@drawable/beautify_progressbar_style"
+        android:thumb="@drawable/ic_beautify_oval"
+        android:id="@+id/deepportrait_seekbar"/>
+
     <LinearLayout
         android:layout_width="match_parent"
         android:layout_height="wrap_content"
diff --git a/res/values/camera2arrays.xml b/res/values/camera2arrays.xml
index 8723ecd66..80a0151b4 100755
--- a/res/values/camera2arrays.xml
+++ b/res/values/camera2arrays.xml
@@ -159,6 +159,7 @@
         <item>104</item>
         <item>109</item>
         <item>110</item>
+	<item>111</item>
     </string-array>
 
     <!-- Camera Preferences Scene Mode dialog box entries -->
@@ -187,6 +188,7 @@
         <item>@string/pref_camera_scenemode_entry_panorama</item>
         <item>@string/pref_camera_scenemode_entry_promode</item>
         <item>@string/pref_camera_scenemode_entry_deepzoom</item>
+	<item>@string/pref_camera_scenemode_entry_deepportrait</item>
     </string-array>
 
     <array name="pref_camera2_scenemode_thumbnails" translatable="false">
@@ -214,6 +216,7 @@
         <item>@drawable/scene_panorama</item>
         <item>@drawable/promode</item>
         <item>@drawable/sharp_photo</item>
+	<item>@drawable/deep_portrait</item>
     </array>
 
     <array name="pref_camera2_scenemode_black_thumbnails" translatable="false">
@@ -241,6 +244,7 @@
         <item>@drawable/ic_scene_mode_black_panorama</item>
         <item>@drawable/ic_scene_mode_black_dual_camera</item>
         <item>@drawable/ic_scene_mode_black_sharp_photo</item>
+	<item>@drawable/deep_portrait_black</item>
     </array>
 
     <!-- Camera Preferences Scene Mode dialog box entries -->
@@ -269,6 +273,7 @@
         <item>@string/pref_camera2_scene_mode_panorama_instructional_content</item>
         <item>@string/pref_camera2_scene_mode_pro_instructional_content</item>
         <item>@string/pref_camera2_scene_mode_deepzoom_instructional_content</item>
+	<item>@string/pref_camera2_scene_mode_deepportrait_instructional_content</item>
     </string-array>
 
     <string-array name="pref_camera2_whitebalance_entryvalues" translatable="false">
@@ -1176,4 +1181,10 @@ for time lapse recording -->
         <item>@string/pref_camera2_video_hdr_entry_value_disable</item>
         <item>@string/pref_camera2_video_hdr_entry_value_enable</item>
     </string-array>
+
+    <string-array name="pref_camera2_deepportrait_entryvalues" translatable="false">
+        <item>@string/pref_camera2_deepportrait_entry_value_disable</item>
+        <item>@string/pref_camera2_deepportrait_entry_value_enable</item>
+    </string-array>
+
 </resources>
diff --git a/res/values/qcomstrings.xml b/res/values/qcomstrings.xml
index 7074df50d..215669f16 100755
--- a/res/values/qcomstrings.xml
+++ b/res/values/qcomstrings.xml
@@ -1087,7 +1087,7 @@
     <string name="pref_camera2_scene_mode_blur_buster_instructional_content"  translatable="true">BlurBuster reduces blur from shaky hands.It can be helpful when taking photos in difficult places.</string>
     <string name="pref_camera2_scene_mode_pro_instructional_content"  translatable="true">With Pro Mode, you can manually control settings for ISO,Exposure, White Balance, and Focus. You will have easy access to all of these advanced settings</string>
     <string name="pref_camera2_scene_mode_deepzoom_instructional_content"  translatable="true">With DeepZoom Mode, you can use the 2X or 4X to take picture, then you can get the deepzoom`s picture </string>
-
+    <string name="pref_camera2_scene_mode_deepportrait_instructional_content" translatable="true">With DeepPortrait, you can take selfies, with a blurred background. You can use the slider to adjust the amount of the blur</string>
     <string name="pref_camera2_not_show_again">Do not show again</string>
     <string name="pref_camera2_scene_mode_instructional_ok" translatable="true">OK</string>
 
@@ -1253,5 +1253,9 @@
     <string name="pref_camera2_video_hdr_entry_disable" translatable="true">disable</string>
     <string name="pref_camera2_video_hdr_entry_value_enable" translatable="false">1</string>
     <string name="pref_camera2_video_hdr_entry_value_disable" translatable="false">0</string>
+
+    <string name="pref_camera2_deepportrait_entry_value_disable" translatable="false">off</string>
+    <string name="pref_camera2_deepportrait_entry_value_enable" translatable="false">on</string>
+    <string name="pref_camera_scenemode_entry_deepportrait" translatable="false">Deepportrait</string>
 </resources>
 
diff --git a/src/com/android/camera/CaptureModule.java b/src/com/android/camera/CaptureModule.java
index 4d79d6bb0..23447c4fd 100755
--- a/src/com/android/camera/CaptureModule.java
+++ b/src/com/android/camera/CaptureModule.java
@@ -66,6 +66,7 @@ import android.media.MediaMetadataRetriever;
 import android.media.MediaRecorder;
 import android.media.MediaCodecInfo;
 import android.net.Uri;
+import android.os.AsyncTask;
 import android.os.Bundle;
 import android.os.Debug;
 import android.os.Handler;
@@ -91,9 +92,14 @@ import android.graphics.Canvas;
 import android.graphics.Color;
 import android.util.AttributeSet;
 
+import com.android.camera.deepportrait.CamGLRenderObserver;
+import com.android.camera.deepportrait.CamGLRenderer;
+import com.android.camera.deepportrait.DPImage;
+import com.android.camera.deepportrait.GLCameraPreview;
 import com.android.camera.exif.ExifInterface;
 import com.android.camera.imageprocessor.filter.BlurbusterFilter;
 import com.android.camera.imageprocessor.filter.ChromaflashFilter;
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.imageprocessor.filter.ImageFilter;
 import com.android.camera.imageprocessor.PostProcessor;
 import com.android.camera.imageprocessor.FrameProcessor;
@@ -137,7 +143,8 @@ public class CaptureModule implements CameraModule, PhotoController,
         MediaSaveService.Listener, ClearSightImageProcessor.Callback,
         SettingsManager.Listener, LocationManager.Listener,
         CountDownView.OnCountDownFinishedListener,
-        MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener {
+        MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener,
+        CamGLRenderObserver {
     public static final int DUAL_MODE = 0;
     public static final int BAYER_MODE = 1;
     public static final int MONO_MODE = 2;
@@ -436,6 +443,9 @@ public class CaptureModule implements CameraModule, PhotoController,
     private long mIsoExposureTime;
     private int mIsoSensitivity;
 
+    private CamGLRenderer mRenderer;
+    private boolean mDeepPortraitMode = false;
+
     private class SelfieThread extends Thread {
         public void run() {
             try {
@@ -941,6 +951,12 @@ public class CaptureModule implements CameraModule, PhotoController,
         return value.equals("enable");
     }
 
+    public boolean isDeepPortraitMode() {
+        String value = mSettingsManager.getValue(SettingsManager.KEY_SCENE_MODE);
+        if (value == null) return  false;
+        return Integer.valueOf(value) == SettingsManager.SCENE_MODE_DEEPPORTRAIT_INT;
+    }
+
     private boolean isMpoOn() {
         String value = mSettingsManager.getValue(SettingsManager.KEY_MPO);
         if (value == null) return false;
@@ -974,6 +990,14 @@ public class CaptureModule implements CameraModule, PhotoController,
         return CameraProfile.getJpegEncodingQualityParameter(value);
     }
 
+    public CamGLRenderer getCamGLRender() {
+        return  mRenderer;
+    }
+
+    public GLCameraPreview getGLCameraPreview() {
+        return  mUI.getGLCameraPreview();
+    }
+
     public LocationManager getLocationManager() {
         return mLocationManager;
     }
@@ -1104,7 +1128,8 @@ public class CaptureModule implements CameraModule, PhotoController,
         }
     }
 
-    private void updatePreviewSurfaceReadyState(boolean rdy) {
+    private void
+    updatePreviewSurfaceReadyState(boolean rdy) {
         if (rdy != mSurfaceReady) {
             if (rdy) {
                 Log.i(TAG, "Preview Surface is ready!");
@@ -1201,20 +1226,26 @@ public class CaptureModule implements CameraModule, PhotoController,
                             Log.d(TAG, "cameracapturesession - onClosed");
                         }
                     };
-            waitForPreviewSurfaceReady();
-            Surface surface = getPreviewSurfaceForSession(id);
 
-            if(id == getMainCameraId()) {
-                mFrameProcessor.setOutputSurface(surface);
+            Surface surface = null;
+            if (!mDeepPortraitMode) {
+                waitForPreviewSurfaceReady();
+                surface = getPreviewSurfaceForSession(id);
+
+                if(id == getMainCameraId()) {
+                    mFrameProcessor.setOutputSurface(surface);
+                }
             }
 
             if(isClearSightOn()) {
-                mPreviewRequestBuilder[id].addTarget(surface);
-                list.add(surface);
+                if (surface != null) {
+                    mPreviewRequestBuilder[id].addTarget(surface);
+                    list.add(surface);
+                }
                 ClearSightImageProcessor.getInstance().createCaptureSession(
                         id == BAYER_ID, mCameraDevice[id], list, captureSessionCallback);
             } else if (id == getMainCameraId()) {
-                if(mFrameProcessor.isFrameFilterEnabled()) {
+                if(mFrameProcessor.isFrameFilterEnabled() && !mDeepPortraitMode) {
                     mActivity.runOnUiThread(new Runnable() {
                         public void run() {
                             mUI.getSurfaceHolder().setFixedSize(mPreviewSize.getHeight(), mPreviewSize.getWidth());
@@ -1246,8 +1277,10 @@ public class CaptureModule implements CameraModule, PhotoController,
                     mCameraDevice[id].createCaptureSession(list, captureSessionCallback, null);
                 }
             } else {
-                mPreviewRequestBuilder[id].addTarget(surface);
-                list.add(surface);
+                if (surface != null) {
+                    mPreviewRequestBuilder[id].addTarget(surface);
+                    list.add(surface);
+                }
                 list.add(mImageReader[id].getSurface());
                 // Here, we create a CameraCaptureSession for camera preview.
                 mCameraDevice[id].createCaptureSession(list, captureSessionCallback, null);
@@ -2556,8 +2589,9 @@ public class CaptureModule implements CameraModule, PhotoController,
             ClearSightImageProcessor.getInstance().close();
         }
         closeCamera();
-        resetAudioMute();
         mUI.showPreviewCover();
+        if (mUI.getGLCameraPreview() != null)
+            mUI.getGLCameraPreview().onPause();
         mUI.hideSurfaceView();
         mFirstPreviewLoaded = false;
         stopBackgroundThread();
@@ -2567,7 +2601,6 @@ public class CaptureModule implements CameraModule, PhotoController,
         closeVideoFileDescriptor();
     }
 
-    @Override
     public void onResumeBeforeSuper() {
         // must change cameraId before "mPaused = false;"
         int intentCameraId = CameraUtil.getCameraFacingIntentExtras(mActivity);
@@ -2605,6 +2638,11 @@ public class CaptureModule implements CameraModule, PhotoController,
     private ArrayList<Integer> getFrameProcFilterId() {
         ArrayList<Integer> filters = new ArrayList<Integer>();
 
+        if(mDeepPortraitMode) {
+            filters.add(FrameProcessor.FILTER_DEEP_PORTRAIT);
+            return filters;
+        }
+
         String scene = mSettingsManager.getValue(SettingsManager.KEY_MAKEUP);
         if(scene != null && !scene.equalsIgnoreCase("0")) {
             filters.add(FrameProcessor.FILTER_MAKEUP);
@@ -2612,7 +2650,6 @@ public class CaptureModule implements CameraModule, PhotoController,
         if(isTrackingFocusSettingOn()) {
             filters.add(FrameProcessor.LISTENER_TRACKING_FOCUS);
         }
-
         return filters;
     }
 
@@ -2687,7 +2724,9 @@ public class CaptureModule implements CameraModule, PhotoController,
         Log.d(TAG, "updatePreviewSize final preview size = " + width + ", " + height);
 
         mPreviewSize = new Size(width, height);
-        mUI.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+        if (!mDeepPortraitMode) {
+            mUI.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+        }
     }
 
     private void openProcessors() {
@@ -2725,11 +2764,11 @@ public class CaptureModule implements CameraModule, PhotoController,
                 Log.d(TAG, "Chosen postproc filter id : " + getPostProcFilterId(mode));
                 mPostProcessor.onOpen(getPostProcFilterId(mode), isFlashOn,
                         isTrackingFocusSettingOn(), isMakeupOn, isSelfieMirrorOn,
-                        mSaveRaw, mIsSupportedQcfa);
+                        mSaveRaw, mIsSupportedQcfa, mDeepPortraitMode);
             } else {
                 mPostProcessor.onOpen(PostProcessor.FILTER_NONE, isFlashOn,
                         isTrackingFocusSettingOn(), isMakeupOn, isSelfieMirrorOn,
-                        mSaveRaw, mIsSupportedQcfa);
+                        mSaveRaw, mIsSupportedQcfa, mDeepPortraitMode);
             }
         }
         if(mFrameProcessor != null) {
@@ -2751,6 +2790,7 @@ public class CaptureModule implements CameraModule, PhotoController,
     public void onResumeAfterSuper() {
         Log.d(TAG, "onResume " + getCameraMode());
         reinit();
+        mDeepPortraitMode = isDeepPortraitMode();
         initializeValues();
         updatePreviewSize();
         mCameraIdList = new ArrayList<>();
@@ -2786,7 +2826,15 @@ public class CaptureModule implements CameraModule, PhotoController,
             msg.arg1 = cameraId;
             mCameraHandler.sendMessage(msg);
         }
-        mUI.showSurfaceView();
+        if (!mDeepPortraitMode) {
+            mUI.showSurfaceView();
+            mUI.stopDeepPortraitMode();
+        } else {
+            mUI.startDeepPortraitMode(mPreviewSize);
+            if (mUI.getGLCameraPreview() != null)
+                mUI.getGLCameraPreview().onResume();
+        }
+
         if (!mFirstTimeInitialized) {
             initializeFirstTime();
         } else {
@@ -3324,6 +3372,15 @@ public class CaptureModule implements CameraModule, PhotoController,
         return mOrientation;
     }
 
+    public int getSensorOrientation() {
+        int degree = 0;
+        if(getMainCameraCharacteristics() != null) {
+            degree = getMainCameraCharacteristics().
+                    get(CameraCharacteristics.SENSOR_ORIENTATION);
+        }
+        return degree;
+    }
+
     @Override
     public void onShowSwitcherPopup() {
 
@@ -5825,6 +5882,39 @@ public class CaptureModule implements CameraModule, PhotoController,
     boolean checkSessionAndBuilder(CameraCaptureSession session, CaptureRequest.Builder builder) {
         return session != null && builder != null;
     }
+
+    public void onRenderComplete(DPImage dpimage, boolean isError) {
+        dpimage.mImage.close();
+        if(isError) {
+            getGLCameraPreview().requestRender();
+        }
+    }
+
+    public void onRenderSurfaceCreated() {
+        updatePreviewSurfaceReadyState(true);
+        mUI.initThumbnail();
+        if (getFrameFilters().size() == 0) {
+            Toast.makeText(mActivity, "DeepPortrait is not supported",
+                    Toast.LENGTH_LONG).show();
+            return;
+        }
+        mRenderer = getGLCameraPreview().getRendererInstance();
+        DeepPortraitFilter filter = (DeepPortraitFilter)getFrameFilters().get(0);
+        if (filter != null) {
+            if (filter.getDPInitialized()) {
+                int degree = getSensorOrientation();
+                int adjustedRotation = ( degree - getDisplayOrientation() + 360 ) % 360;
+                int surfaceRotation =
+                        90 * mActivity.getWindowManager().getDefaultDisplay().getRotation();
+                mRenderer.setMaskResolution(filter.getDpMaskWidth(),filter.getDpMaskHieght());
+                mRenderer.setRotationDegree(
+                        adjustedRotation, (degree - surfaceRotation + 360) % 360);
+            }
+        }
+    }
+    public void onRenderSurfaceDestroyed() {
+        mRenderer = null;
+    }
 }
 
 class Camera2GraphView extends View {
diff --git a/src/com/android/camera/CaptureUI.java b/src/com/android/camera/CaptureUI.java
index 950820fcc..617d9ef71 100755
--- a/src/com/android/camera/CaptureUI.java
+++ b/src/com/android/camera/CaptureUI.java
@@ -46,6 +46,7 @@ import android.renderscript.Type;
 import android.text.TextUtils;
 import android.util.DisplayMetrics;
 import android.util.Log;
+import android.util.Size;
 import android.view.Display;
 import android.view.Gravity;
 import android.view.LayoutInflater;
@@ -65,6 +66,8 @@ import android.widget.LinearLayout;
 import android.widget.RelativeLayout;
 import android.widget.SeekBar;
 import android.widget.TextView;
+
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.ui.AutoFitSurfaceView;
 import com.android.camera.ui.Camera2FaceView;
 import com.android.camera.ui.CameraControls;
@@ -82,7 +85,8 @@ import com.android.camera.ui.SelfieFlashView;
 import com.android.camera.ui.TrackingFocusRenderer;
 import com.android.camera.ui.ZoomRenderer;
 import com.android.camera.util.CameraUtil;
-
+import com.android.camera.deepportrait.CamGLRenderer;
+import com.android.camera.deepportrait.GLCameraPreview;
 import org.codeaurora.snapcam.R;
 
 import java.util.List;
@@ -110,6 +114,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
     private AutoFitSurfaceView mSurfaceViewMono;
     private SurfaceHolder mSurfaceHolder;
     private SurfaceHolder mSurfaceHolderMono;
+    private GLCameraPreview mGLSurfaceView = null;
     private int mOrientation;
     private int mFilterMenuStatus;
     private PreviewGestures mGestures;
@@ -188,7 +193,9 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
     private View mSceneModeSwitcher;
     private View mFrontBackSwitcher;
     private ImageView mMakeupButton;
+    private ImageView mDeepportraitSwitcher;
     private SeekBar mMakeupSeekBar;
+    private SeekBar mDeepportraitSeekBar;
     private View mMakeupSeekBarLayout;
     private View mSeekbarBody;
     private TextView mRecordingTimeView;
@@ -199,6 +206,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
     private ImageView mSeekbarToggleButton;
     private View mProModeCloseButton;
     private RotateLayout mSceneModeLabelRect;
+    private LinearLayout mSceneModeLabelView;
     private TextView mSceneModeName;
     private ImageView mExitBestMode;
     private RotateLayout mDeepZoomModeRect;
@@ -241,6 +249,12 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         }
     }
 
+    public void initThumbnail() {
+        if (mThumbnail == null)
+            mThumbnail = (ImageView) mRootView.findViewById(R.id.preview_thumb);
+        mActivity.updateThumbnail(mThumbnail);
+    }
+
     private void previewUIDestroyed() {
         mModule.onPreviewUIDestroyed();
     }
@@ -293,6 +307,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         mFrontBackSwitcher = mRootView.findViewById(R.id.front_back_switcher);
         mMakeupButton = (ImageView) mRootView.findViewById(R.id.ts_makeup_switcher);
         mMakeupSeekBarLayout = mRootView.findViewById(R.id.makeup_seekbar_layout);
+        mDeepportraitSwitcher = (ImageView) mRootView.findViewById(R.id.deepportrait_switcher);
         mSeekbarBody = mRootView.findViewById(R.id.seekbar_body);
         mSeekbarToggleButton = (ImageView) mRootView.findViewById(R.id.seekbar_toggle);
         mSeekbarToggleButton.setOnClickListener(new View.OnClickListener() {
@@ -323,6 +338,29 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
             public void onStopTrackingTouch(SeekBar seekBar) {
             }
         });
+        mDeepportraitSeekBar = (SeekBar)mRootView.findViewById(R.id.deepportrait_seekbar);
+        mDeepportraitSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
+            @Override
+            public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+                 if (mModule.getCamGLRender() != null) {
+                     module.getCamGLRender().setBlurLevel(progress);
+                 }
+            }
+
+            @Override
+            public void onStartTrackingTouch(SeekBar seekBar) {
+
+            }
+
+            @Override
+            public void onStopTrackingTouch(SeekBar seekBar) {
+                final SharedPreferences prefs =
+                        PreferenceManager.getDefaultSharedPreferences(mActivity);
+                SharedPreferences.Editor editor = prefs.edit();
+                editor.putInt(SettingsManager.KEY_DEEPPORTRAIT_VALUE, seekBar.getProgress());
+                editor.commit();
+            }
+        });
         mMakeupButton.setOnClickListener(new View.OnClickListener(){
             @Override
             public void onClick(View v) {
@@ -333,6 +371,26 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
             }
         });
         setMakeupButtonIcon();
+
+        mDeepportraitSwitcher.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                if (module != null && !module.isAllSessionClosed()) {
+                    String value = mSettingsManager.getValue(SettingsManager.KEY_SCENE_MODE);
+                    if(value == null ||
+                            Integer.valueOf(value) != SettingsManager.SCENE_MODE_DEEPPORTRAIT_INT) {
+                        mSettingsManager.setValue(SettingsManager.KEY_SCENE_MODE,""+
+                                SettingsManager.SCENE_MODE_DEEPPORTRAIT_INT);
+                    } else {
+                        mSettingsManager.setValue(SettingsManager.KEY_SCENE_MODE,
+                                ""+SettingsManager.SCENE_MODE_AUTO_INT);
+                    }
+                }
+                setDeepportraitButtonIcon();
+            }
+        });
+        setDeepportraitButtonIcon();
+
         mFlashButton = (FlashToggleButton) mRootView.findViewById(R.id.flash_button);
         mProModeCloseButton = mRootView.findViewById(R.id.promode_close_button);
         mProModeCloseButton.setOnClickListener(new View.OnClickListener() {
@@ -584,6 +642,20 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         return mDeepZoomValue;
     }
 
+    private void setDeepportraitButtonIcon() {
+        boolean enable = DeepPortraitFilter.isSupportedStatic();
+        mDeepportraitSwitcher.setEnabled(enable);
+        mActivity.runOnUiThread(new Runnable() {
+            public void run() {
+                if(mModule.isDeepPortraitMode()) {
+                    mDeepportraitSwitcher.setImageResource(R.drawable.deep_portrait_on);
+                } else {
+                    mDeepportraitSwitcher.setImageResource(R.drawable.deep_portrait);
+                }
+            }
+        });
+    }
+
     public void onCameraOpened(List<Integer> cameraIds) {
         mGestures.setCaptureUI(this);
         if (mModule.isDeepZoom()) {
@@ -599,6 +671,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         initFilterModeButton();
         initFlashButton();
         setMakeupButtonIcon();
+        setDeepportraitButtonIcon();
         showSceneModeLabel();
         updateMenus();
         if(mModule.isTrackingFocusSettingOn()) {
@@ -912,7 +985,12 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         mIsSceneModeLabelClose = false;
         int index = mSettingsManager.getValueIndex(SettingsManager.KEY_SCENE_MODE);
         CharSequence sceneModeNameArray[] = mSettingsManager.getEntries(SettingsManager.KEY_SCENE_MODE);
-        if ( index > 0 && index < sceneModeNameArray.length ) {
+        if (mModule.isDeepPortraitMode()) {
+            mSceneModeLabelRect.setVisibility(View.GONE);
+            mExitBestMode.setVisibility(View.GONE);
+            return;
+        }
+        if ( index > 0 && index < sceneModeNameArray.length) {
             mSceneModeName.setText(sceneModeNameArray[index]);
             mSceneModeLabelRect.setVisibility(View.VISIBLE);
             mExitBestMode.setVisibility(View.VISIBLE);
@@ -947,6 +1025,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         if(value != null && value.equals("0")) {
             mMakeupButton.setVisibility(View.INVISIBLE);
         }
+        mDeepportraitSwitcher.setVisibility(View.INVISIBLE);
         mIsVideoUI = true;
         mPauseButton.setVisibility(View.VISIBLE);
     }
@@ -1182,6 +1261,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         if (mFilterModeSwitcher != null) mFilterModeSwitcher.setVisibility(status);
         if (mFilterModeSwitcher != null) mFilterModeSwitcher.setVisibility(status);
         if (mMakeupButton != null) mMakeupButton.setVisibility(status);
+        if (mDeepportraitSwitcher != null) mDeepportraitSwitcher.setVisibility(status);
     }
 
     public void initializeControlByIntent() {
@@ -1227,6 +1307,38 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         mActivity.setSystemBarsVisibility(false);
     }
 
+    public void startDeepPortraitMode(Size preview) {
+        mSurfaceView.setVisibility(View.GONE);
+        mSurfaceViewMono.setVisibility(View.GONE);
+        mGLSurfaceView = new GLCameraPreview(
+                    mActivity, preview.getWidth(), preview.getHeight(), mModule);
+        FrameLayout layout = (FrameLayout) mActivity.findViewById(R.id.camera_glpreview);
+        layout.addView(mGLSurfaceView);
+        mGLSurfaceView.setVisibility(View.VISIBLE);
+        mRootView.requestLayout();
+        final SharedPreferences prefs =
+                PreferenceManager.getDefaultSharedPreferences(mActivity);
+        int progress = prefs.getInt(SettingsManager.KEY_DEEPPORTRAIT_VALUE,50);
+        mDeepportraitSeekBar.setProgress(progress);
+        mDeepportraitSeekBar.setVisibility(View.VISIBLE);
+        mRenderOverlay.setVisibility(View.GONE);
+    }
+
+    public void stopDeepPortraitMode() {
+        FrameLayout layout = (FrameLayout)mActivity.findViewById(R.id.camera_glpreview);
+        if (mGLSurfaceView != null) {
+            mGLSurfaceView.setVisibility(View.GONE);
+            layout.removeView(mGLSurfaceView);
+        }
+        mGLSurfaceView = null;
+        mDeepportraitSeekBar.setVisibility(View.GONE);
+        mRenderOverlay.setVisibility(View.VISIBLE);
+    }
+
+    public GLCameraPreview getGLCameraPreview() {
+        return  mGLSurfaceView;
+    }
+
     public void updateMenus() {
         boolean enableMakeupMenu = true;
         boolean enableFilterMenu = true;
@@ -1652,7 +1764,7 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
         if (mGestures != null) {
             mGestures.setEnabled(previewFocused);
         }
-        if (mRenderOverlay != null) {
+        if (mRenderOverlay != null && !mModule.isDeepPortraitMode()) {
             // this can not happen in capture mode
             mRenderOverlay.setVisibility(previewFocused ? View.VISIBLE : View.GONE);
         }
diff --git a/src/com/android/camera/SettingsManager.java b/src/com/android/camera/SettingsManager.java
index 37376715d..d17df06cb 100755
--- a/src/com/android/camera/SettingsManager.java
+++ b/src/com/android/camera/SettingsManager.java
@@ -52,6 +52,7 @@ import com.android.camera.imageprocessor.filter.BeautificationFilter;
 import com.android.camera.imageprocessor.filter.BestpictureFilter;
 import com.android.camera.imageprocessor.filter.BlurbusterFilter;
 import com.android.camera.imageprocessor.filter.ChromaflashFilter;
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.imageprocessor.filter.OptizoomFilter;
 import com.android.camera.imageprocessor.filter.SharpshooterFilter;
 import com.android.camera.imageprocessor.filter.StillmoreFilter;
@@ -100,6 +101,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
     public static final int SCENE_MODE_TRACKINGFOCUS_INT = SCENE_MODE_CUSTOM_START + 8;
     public static final int SCENE_MODE_PROMODE_INT = SCENE_MODE_CUSTOM_START + 9;
     public static final int SCENE_MODE_DEEPZOOM_INT = SCENE_MODE_CUSTOM_START + 10;
+	public static final int SCENE_MODE_DEEPPORTRAIT_INT = SCENE_MODE_CUSTOM_START + 11;
     public static final String SCENE_MODE_DUAL_STRING = "100";
     public static final String KEY_CAMERA_SAVEPATH = "pref_camera2_savepath_key";
     public static final String KEY_RECORD_LOCATION = "pref_camera2_recordlocation_key";
@@ -160,6 +162,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
     public static final String KEY_QCFA = "pref_camera2_qcfa_key";
     public static final String KEY_EIS_VALUE = "pref_camera2_eis_key";
     public static final String KEY_FOVC_VALUE = "pref_camera2_fovc_key";
+    public static final String KEY_DEEPPORTRAIT_VALUE = "pref_camera2_deepportrait_key";
 
     public static final HashMap<String, Integer> KEY_ISO_INDEX = new HashMap<String, Integer>();
     public static final String KEY_BSGC_DETECTION = "pref_camera2_bsgc_key";
@@ -1229,12 +1232,20 @@ public class SettingsManager implements ListMenu.SettingsListener {
         Size[] sizes = map.getOutputSizes(ImageFormat.JPEG);
         List<String> res = new ArrayList<>();
 
+        boolean isDeepportrait = getDeepportraitEnabled();
+
         if (getQcfaPrefEnabled() && getIsSupportedQcfa(cameraId)) {
             res.add(getSupportedQcfaDimension(cameraId));
         }
 
         if (sizes != null) {
             for (int i = 0; i < sizes.length; i++) {
+                if (isDeepportrait &&
+                        (Math.min(sizes[i].getWidth(),sizes[i].getHeight()) < 720 ||
+                        Math.max(sizes[i].getWidth(),sizes[i].getHeight()) <= 1024)) {
+                    //some reslutions are not supported in deepportrait
+                    continue;
+                }
                 res.add(sizes[i].toString());
             }
         }
@@ -1352,7 +1363,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
         if (BlurbusterFilter.isSupportedStatic()) modes.add(SCENE_MODE_BLURBUSTER_INT + "");
         if (SharpshooterFilter.isSupportedStatic()) modes.add(SCENE_MODE_SHARPSHOOTER_INT + "");
         if (TrackingFocusFrameListener.isSupportedStatic()) modes.add(SCENE_MODE_TRACKINGFOCUS_INT + "");
-        if (DeepZoomFilter.isSupportedStatic()) modes.add(SCENE_MODE_DEEPZOOM_INT + "");
+        if (DeepPortraitFilter.isSupportedStatic()) modes.add(SCENE_MODE_DEEPPORTRAIT_INT+"");
         modes.add("" + SCENE_MODE_PROMODE_INT);
         for (int mode : sceneModes) {
             modes.add("" + mode);
@@ -1523,14 +1534,21 @@ public class SettingsManager implements ListMenu.SettingsListener {
     }
 
     public boolean getQcfaPrefEnabled() {
-        ListPreference qcfaPref = mPreferenceGroup.findPreference(KEY_QCFA);
-        String qcfa = qcfaPref.getValue();
+        String qcfa = getValue(KEY_QCFA);
         if(qcfa != null && qcfa.equals("enable")) {
             return true;
         }
         return false;
     }
 
+    public boolean getDeepportraitEnabled() {
+        String dp = getValue(KEY_SCENE_MODE);
+        if( dp!= null && Integer.valueOf(dp) == SCENE_MODE_DEEPPORTRAIT_INT) {
+            return true;
+        }
+        return false;
+    }
+
     public boolean getIsSupportedQcfa (int cameraId) {
         byte isSupportQcfa = 0;
         try {
diff --git a/src/com/android/camera/deepportrait/CamGLRenderObserver.java b/src/com/android/camera/deepportrait/CamGLRenderObserver.java
new file mode 100755
index 000000000..53faa4543
--- /dev/null
+++ b/src/com/android/camera/deepportrait/CamGLRenderObserver.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package com.android.camera.deepportrait;
+
+// Wrapper for native library
+
+public interface CamGLRenderObserver
+{
+    public void onRenderComplete(DPImage dpimage, boolean isError);
+    public void onRenderSurfaceCreated();
+    public void onRenderSurfaceDestroyed();
+}
diff --git a/src/com/android/camera/deepportrait/CamGLRenderer.java b/src/com/android/camera/deepportrait/CamGLRenderer.java
new file mode 100755
index 000000000..ef8e3b1f4
--- /dev/null
+++ b/src/com/android/camera/deepportrait/CamGLRenderer.java
@@ -0,0 +1,695 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package com.android.camera.deepportrait;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+import java.nio.ShortBuffer;
+import java.util.Vector;
+import java.lang.Thread;
+import android.media.Image;
+import android.media.Image.Plane;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.content.Context;
+import android.opengl.GLES30;
+import android.opengl.GLSurfaceView;
+import android.opengl.GLSurfaceView.Renderer;
+import android.opengl.Matrix;
+import android.util.Log;
+
+
+public class CamGLRenderer implements Renderer
+{
+    // MVP
+    private final float[] mtrxProjection        = new float[16];
+    private final float[] mtrxView              = new float[16];
+    private final float[] mtrxProjectionAndView = new float[16];
+    // Vertex shader points
+
+    public FloatBuffer mSquareVertices;
+    public FloatBuffer[] mSquareTextureCoordinates = new FloatBuffer[4]; // 4 positions
+    // synchronized vector
+    private Vector<DPImage> mFrameQueue;
+
+    private final boolean SHOW_LOGS = false;
+
+    /** Program handles */
+    private int mConvertProgramHandle;
+    private int mBlurProgramHandle;
+    private int mProgramHandle;
+    private Boolean mActive = false;
+
+    // Effects
+    Boolean mBokehEffect = true;
+
+    // Our screenresolution
+    float mScreenWidth = 0;
+    float mScreenHeight = 0;
+
+    // Our screenresolution
+    int mScreenROIX      = 0;
+    int mScreenROIY      = 0;
+    int mScreenROIWidth  = 0;
+    int mScreenROIHeight = 0;
+
+    //Display image resolution
+    int mFrameWidth = 0;
+    int mFrameHeight = 0;
+
+    // Misc
+    Context mContext;
+    long mLastTime;
+    int mProgram;
+    private CamRenderTexture mCamTexture;
+
+    private ByteBuffer scratchRGB;
+    private CamGLRenderObserver mObserver;
+
+    private final int NUM_PROGRAMS = 3;
+    private int[] mVerticesHandle    = new int[NUM_PROGRAMS];
+    private int[] mTexCoordLocHandle = new int[NUM_PROGRAMS];
+    private int[] mMVPMtrxhandle     = new int[NUM_PROGRAMS];
+    private int mRotMtrxhandle;
+    private int mSurfaceRotMtrxhandle;
+    private int mFlipMtrxhandle;
+    private int mInYHandle;
+    private int mInCHandle;
+    private int mPositionConv;
+    private int[] mInRGBHandle = new int[8];
+    private int mForegroundRGBHandle;
+    private int mBackGroundRGBHandle;
+    private int mMaskHandle;
+    private int mXPixelOffsetUniform;
+    private int mYPixelOffsetUniform;
+    private int mMipLevelUniform;
+    private int mBlurLevel = 50;
+    private int mRotationDegree = 90;
+    private int mMaskWidth = 0;
+    private int mMaskHeight = 0;
+    private boolean mTexturesCreated = false;
+    private static final String TAG = "<dp><app><CamGLRenderer>";
+    private long prevTime, currentTime;
+    private long minFrameDelta = 33;
+    private int mFrameRotation = 0;
+
+    private final CamRenderTexture.BlurType blurType = CamRenderTexture.BlurType.BlurTypeGaussianDilated;
+
+    private final boolean ROTATE_MASK = false;
+
+    private final float[] flipNone = new float[] { 1.0f, 0.0f, 1.0f, 0.0f }; // x, 1-x, y, 1-y
+    private final float[] flipX    = new float[] { 0.0f, 1.0f, 1.0f, 0.0f }; // x, 1-x, y, 1-y
+    private final float[] flipY    = new float[] { 1.0f, 0.0f, 0.0f, 1.0f }; // x, 1-x, y, 1-y
+    private final float[] flipXY   = new float[] { 0.0f, 1.0f, 0.0f, 1.0f }; // x, 1-x, y, 1-y
+
+    // clockwise rotations. All in column major
+    private final float[] rotNone  = new float[] {  1.0f,  0.0f, 0.0f,  0.0f,  1.0f, 0.0f, 0.0f, 0.0f, 1.0f };
+    // rotmatrix of 90 + move to 1st quadrant
+    private final float[] rot90    = new float[] {  0.0f, -1.0f, 1.0f,  1.0f,  0.0f, 0.0f, 0.0f, 0.0f, 1.0f }; // 1-y, x
+    // rotmatrix of 180 + move to 1st quadrant
+    private final float[] rot180   = new float[] { -1.0f,  0.0f, 1.0f,  0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f };
+    // rotmatrix of 270 + move to 1st quadrant
+    private final float[] rot270   = new float[] {  0.0f,  1.0f, 0.0f, -1.0f,  0.0f, 1.0f, 0.0f, 0.0f, 1.0f }; // y, 1-x
+
+    private float[] mRotationMatrix        = new float[9];
+    private float[] mSurfaceRotationMatrix = new float[9];
+    private GLSurfaceView mSurfaceView;
+
+    public void sendFrame( DPImage dpimage )
+    {
+        if ( !mActive ) return;
+        synchronized ( mFrameQueue ) {
+            if ( mFrameQueue.size() > 3 ) {
+                DPImage oldImage = mFrameQueue.get( 0 );
+                mFrameQueue.removeElementAt( 0 );
+                mObserver.onRenderComplete( oldImage, true );
+            }
+            mFrameQueue.add( dpimage );
+        }
+    }
+
+    public void setBlurLevel( int level )
+    {
+        mBlurLevel = level;
+        Log.e( TAG, "Blur Level " + mBlurLevel );
+    }
+
+    public void setRotationDegree( int camRotation, int frameRotation )
+    {
+        System.arraycopy( getRotationMatrix3x3( frameRotation ), 0, mSurfaceRotationMatrix, 0, 9 );
+
+        mFrameRotation = frameRotation;
+        mRotationDegree = ( camRotation + frameRotation ) % 360 ;
+        System.arraycopy( getRotationMatrix3x3( mRotationDegree ), 0, mRotationMatrix, 0, 9 );
+        switch ( camRotation ) {
+        case  90:
+            // transpose applied. apply H flip for 90 degree rotation - 1st column
+            mRotationMatrix[0] *= -1;
+            mRotationMatrix[1] *= -1;
+            mRotationMatrix[2] = mRotationMatrix[2] > 0.0f ? 0.0f : 1.0f;
+            break;
+        case 180:
+            // V flip applied. apply H flip for 180 degree rotation.
+            mRotationMatrix[0] *= -1;
+            mRotationMatrix[1] *= -1;
+            mRotationMatrix[2] = mRotationMatrix[2] > 0.0f ? 0.0f : 1.0f;
+            break;
+        case 270:
+            // transpose + H flip applied. correct rotation. No op
+            break;
+        }
+        Log.e( TAG, "setRotationDegree cam " + camRotation + " adjusted " + mRotationDegree +
+               " frame " + frameRotation );
+    }
+
+    public void prepareRotationMatrix( int camRotation )
+    {
+        mRotationDegree = mFrameRotation;
+        System.arraycopy( getRotationMatrix3x3( mRotationDegree ), 0, mRotationMatrix, 0, 9 );
+        if ( ROTATE_MASK ) {
+            switch ( camRotation ) {
+            case  90:
+                // H flip applied. apply V flip for 90 degree rotation - 1st column
+                mRotationMatrix[0] *= -1;
+                mRotationMatrix[1] *= -1;
+                mRotationMatrix[2] = mRotationMatrix[2] > 0.0f ? 0.0f : 1.0f;
+                break;
+            case 180:
+                // V flip applied. apply V flip for 180 degree rotation.
+                mRotationMatrix[3] *= -1;
+                mRotationMatrix[4] *= -1;
+                mRotationMatrix[5] = mRotationMatrix[5] > 0.0f ? 0.0f : 1.0f;
+                break;
+            }
+        }
+        Log.e( TAG, "setRotationDegree per frame single cam " + camRotation + " adjusted " + mRotationDegree +
+               " frame " + mFrameRotation );
+    }
+
+    public void setMaskResolution( int width, int height )
+    {
+        mMaskWidth  = width;
+        mMaskHeight = height;
+        Log.e( TAG, "setMaskResolution width " + width + " height " + height );
+    }
+
+    public float[] getRotationMatrix( int rotationDegree )
+    {
+        float[] rotMat   = new float[4];
+        float cosTheta = (float)Math.cos( Math.toRadians( rotationDegree ) );
+        float sinTheta = (float)Math.sin( Math.toRadians( rotationDegree ) );
+        rotMat[0] = cosTheta;
+        rotMat[1] = -sinTheta;
+        rotMat[2] = sinTheta;
+        rotMat[3] = cosTheta;
+        return rotMat;
+    }
+
+    public float[] getRotationMatrix3x3( int rotationDegree )
+    {
+        switch ( rotationDegree ) {
+            case  90: return rot90;
+            case 180: return rot180;
+            case 270: return rot270;
+        }
+        return rotNone;
+    }
+
+    public float[] getFlipMatrix( int rotationDegree )
+    {
+        switch ( rotationDegree ) {
+        case  90: return flipX;
+        case 180: return flipY;
+        case 270: return flipXY;
+        }
+        return flipNone;
+    }
+
+    public CamGLRenderer( Context c, int textureWidth, int textureHeight,
+                          CamGLRenderObserver observer, GLSurfaceView surfaceView )
+    {
+        mObserver = observer;
+        mContext = c;
+        mCamTexture = new CamRenderTexture();
+        mFrameQueue = new Vector<DPImage>(5);
+        mSurfaceView = surfaceView;
+
+        // Create our UV coordinates.
+        float[] squareTextureCoordinateData = new float[] {
+            0.0f, 0.0f,
+            1.0f, 0.0f,
+            0.0f, 1.0f,
+            1.0f, 1.0f
+        };
+        float[] squareTextureCoordinateDataHFlip = new float[] {
+            1.0f, 0.0f,
+            0.0f, 0.0f,
+            1.0f, 1.0f,
+            0.0f, 1.0f
+        };
+        float[] squareTextureCoordinateDataVFlip = new float[] {
+            0.0f, 1.0f,
+            1.0f, 1.0f,
+            0.0f, 0.0f,
+            1.0f, 0.0f
+        };
+        float[] squareTextureCoordinateDataHVFlip = new float[] {
+            1.0f, 1.0f,
+            0.0f, 1.0f,
+            1.0f, 0.0f,
+            0.0f, 0.0f
+        };
+        // We have to create the vertices of our triangle.
+        float[] squareVerticesData = new float[] {
+            -1.0f, -1.0f,
+             1.0f, -1.0f,
+            -1.0f,  1.0f,
+             1.0f,  1.0f,
+        };
+
+        // The texture buffer
+        for ( int i = 0; i < 4; ++i ) {
+            mSquareTextureCoordinates[i] = ByteBuffer.allocateDirect(
+                                               squareTextureCoordinateData.length * 4 ).order(
+                                               ByteOrder.nativeOrder() ).asFloatBuffer();
+        }
+        mSquareTextureCoordinates[0].put( squareTextureCoordinateData ).position( 0 );
+        mSquareTextureCoordinates[1].put( squareTextureCoordinateDataHFlip ).position( 0 );
+        mSquareTextureCoordinates[2].put( squareTextureCoordinateDataVFlip ).position( 0 );
+        mSquareTextureCoordinates[3].put( squareTextureCoordinateDataHVFlip ).position( 0 );
+
+        // The vertex buffer.
+        mSquareVertices = ByteBuffer.allocateDirect( squareVerticesData.length * 4 ).order(
+                          ByteOrder.nativeOrder() ).asFloatBuffer();
+        mSquareVertices.put( squareVerticesData ).position(0);
+
+        // initialize bytebuffer for the draw list
+        // short[] drawIndicesData = new short[] {0, 1, 2, 0, 2, 3}; // The order of vertex rendering.
+        // mSquareDrawIndices = ByteBuffer.allocateDirect( drawIndicesData.length * 2).order(
+        //                                                 ByteOrder.nativeOrder() ).asShortBuffer();
+        // mSquareDrawIndices.put( drawIndicesData ).position(0);
+
+        mFrameHeight = textureHeight;
+        mFrameWidth  = textureWidth;
+        // mRotationMatrix = getRotationMatrix( 90 );
+        mTexturesCreated = false;
+        prevTime = System.currentTimeMillis();
+    }
+
+    public void onPause()
+    {
+        mActive = false;
+    }
+
+    public void onResume()
+    {
+        mActive = true;
+    }
+
+    public void open()
+    {
+    }
+
+    public void close()
+    {
+        mFrameHeight   = 0;
+        mFrameWidth    = 0;
+        mCamTexture.deleteTextures();
+        mCamTexture = null;
+    }
+
+    @Override
+    public void onSurfaceCreated( GL10 gl, EGLConfig config )
+    {
+        Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
+        // Set the clear color to black
+        GLES30.glClearColor( 0.0f, 0.0f, 0.0f, 1 );
+
+        // Set the camera position (View matrix)
+        Matrix.setLookAtM( mtrxView, 0, 0f, 0f, 1f, 0f, 0f, 0f, 0f, 1.0f, 0.0f );
+
+        int convertVertexShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_VERTEX_SHADER, getShaderByName("convVertexShaderSource"));
+        int normalVertexShaderHandle = CamRenderShader.compileShader(
+                GLES30.GL_VERTEX_SHADER, getShaderByName("norVertexShaderSource"));
+        int vertexShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_VERTEX_SHADER, getShaderByName("vertexShaderSource"));
+        int convertShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_FRAGMENT_SHADER, getShaderByName("convertShaderSource"));
+        int blurShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_FRAGMENT_SHADER, getShaderByName("blurShaderRGBSource"));
+        int fragmentShaderHandle = CamRenderShader.compileShader(
+            GLES30.GL_FRAGMENT_SHADER, getShaderByName("blendFragShaderRGBSource"));
+
+        //----------------  Convert shader program -----------------------------------------------------
+        mConvertProgramHandle = CamRenderShader.createAndLinkProgram( convertVertexShaderHandle, convertShaderHandle );
+        mVerticesHandle[0]    = GLES30.glGetAttribLocation(  mConvertProgramHandle, "vPosition"   );
+        mTexCoordLocHandle[0] = GLES30.glGetAttribLocation(  mConvertProgramHandle, "a_texCoord"  );
+        mMVPMtrxhandle[0]     = GLES30.glGetUniformLocation( mConvertProgramHandle, "uMVPMatrix"  );
+        mPositionConv         = GLES30.glGetUniformLocation( mConvertProgramHandle, "positionConv"  );
+        mInYHandle            = GLES30.glGetUniformLocation( mConvertProgramHandle, "y_texture"   );
+        mInCHandle            = GLES30.glGetUniformLocation( mConvertProgramHandle, "uv_texture"  );
+        //----------------------------------------------------------------------------------------------
+
+        //----------------  Blur + Blend shader program --------------------------------------------------------
+        // mProgramHandle        = CamRenderShader.createAndLinkProgram( vertexShaderHandle, fragmentShaderHandle );
+        // mVerticesHandle[1]    = GLES30.glGetAttribLocation(  mProgramHandle, "vPosition"        );
+        // mTexCoordLocHandle[1] = GLES30.glGetAttribLocation(  mProgramHandle, "a_texCoord"       );
+        // mMVPMtrxhandle[1]     = GLES30.glGetUniformLocation( mProgramHandle, "uMVPMatrix"       );
+        // mInRGBHandle          = GLES30.glGetUniformLocation( mProgramHandle, "rgb_texture"      );
+        // mBackGroundRGBHandle  = GLES30.glGetUniformLocation( mProgramHandle, "bg_rgb_texture"   );
+        // mMaskHandle           = GLES30.glGetUniformLocation( mProgramHandle, "mask_texture"     );
+        // mXPixelOffsetUniform  = GLES30.glGetUniformLocation( mProgramHandle, "xPixelBaseOffset" );
+        // mYPixelOffsetUniform  = GLES30.glGetUniformLocation( mProgramHandle, "yPixelBaseOffset" );
+        // mMipLevelUniform      = GLES30.glGetUniformLocation( mProgramHandle, "mipLevel"         );
+        //----------------------------------------------------------------------------------------------
+
+        //----------------  Blur shader program --------------------------------------------------------
+        mBlurProgramHandle    = CamRenderShader.createAndLinkProgram( normalVertexShaderHandle, blurShaderHandle );
+        mVerticesHandle[2]    = GLES30.glGetAttribLocation(  mBlurProgramHandle, "vPosition"        );
+        mTexCoordLocHandle[2] = GLES30.glGetAttribLocation(  mBlurProgramHandle, "a_texCoord"       );
+        mMVPMtrxhandle[2]     = GLES30.glGetUniformLocation( mBlurProgramHandle, "uMVPMatrix"       );
+        for ( int i = 0; i < 8; ++i ) {
+            mInRGBHandle[i] = GLES30.glGetUniformLocation( mBlurProgramHandle, "rgb_texture");
+        }
+        mXPixelOffsetUniform  = GLES30.glGetUniformLocation( mBlurProgramHandle, "xPixelBaseOffset" );
+        mYPixelOffsetUniform  = GLES30.glGetUniformLocation( mBlurProgramHandle, "yPixelBaseOffset" );
+        mMipLevelUniform      = GLES30.glGetUniformLocation( mBlurProgramHandle, "mipLevel"         );
+        //----------------------------------------------------------------------------------------------
+
+        //----------------  Blend shader program --------------------------------------------------------
+        mProgramHandle        = CamRenderShader.createAndLinkProgram( vertexShaderHandle, fragmentShaderHandle );
+        mVerticesHandle[1]    = GLES30.glGetAttribLocation(  mProgramHandle, "vPosition"      );
+        mTexCoordLocHandle[1] = GLES30.glGetAttribLocation(  mProgramHandle, "a_texCoord"     );
+        mMVPMtrxhandle[1]     = GLES30.glGetUniformLocation( mProgramHandle, "uMVPMatrix"     );
+        mForegroundRGBHandle  = GLES30.glGetUniformLocation( mProgramHandle, "rgb_texture"    );
+        mBackGroundRGBHandle  = GLES30.glGetUniformLocation( mProgramHandle, "bg_rgb_texture" );
+        mMaskHandle           = GLES30.glGetUniformLocation( mProgramHandle, "mask_texture"   );
+        mRotMtrxhandle        = GLES30.glGetUniformLocation( mProgramHandle, "rotMat"         );
+        mSurfaceRotMtrxhandle = GLES30.glGetUniformLocation( mProgramHandle, "surfaceRotMat"  );
+        mFlipMtrxhandle       = GLES30.glGetUniformLocation( mProgramHandle, "flipMat"        );
+        //----------------------------------------------------------------------------------------------
+
+        mActive = true;
+    }
+
+    @Override
+    public void onSurfaceChanged( GL10 gl, int width, int height )
+    {
+
+        // We need to know the current width and height.
+        mScreenWidth = width;
+        mScreenHeight = height;
+        float aspectRatio = (float)mFrameWidth/mFrameHeight;
+        float screenAspectRatio = (float)mScreenWidth/mScreenHeight;
+        Log.d(TAG,"onSurfaceChanged aspectRatio="+aspectRatio+" screenAspectRatio="+screenAspectRatio+" w="+width+" h="+height);
+
+        if ( screenAspectRatio > aspectRatio ) {
+            mScreenROIWidth  = (int)Math.min( mScreenWidth,  mScreenWidth * aspectRatio / screenAspectRatio );
+            mScreenROIHeight = (int)mScreenHeight;
+        } else {
+            mScreenROIWidth = (int) mScreenWidth;
+            mScreenROIHeight = (int) Math.min( mScreenHeight,  mScreenWidth * aspectRatio);
+        }
+        mScreenROIX = (  (int)mScreenWidth -  mScreenROIWidth )/2;
+        mScreenROIY = ( (int)mScreenHeight - mScreenROIHeight )/2;
+
+        // Clear our matrices
+        for ( int i = 0; i < 16; i++ ) {
+            mtrxProjection[i]        = 0.0f;
+            mtrxProjectionAndView[i] = 0.0f;
+        }
+
+        Log.e( TAG, "onSurfaceChanged Frame_dim " + mFrameWidth + " x " + mFrameHeight +
+                    " ROI ( " + mScreenROIX + " " + mScreenROIY +
+                    "  " + mScreenROIWidth + " " + mScreenROIHeight + ")" );
+        // Setup our screen width and height for normal sprite translation.
+        Matrix.orthoM( mtrxProjection, 0, -aspectRatio, aspectRatio, -1, 1, 0, 50 );
+
+        // Calculate the projection and view transformation
+        Matrix.multiplyMM( mtrxProjectionAndView, 0, mtrxProjection, 0, mtrxView, 0 );
+    }
+
+    public void executeConverter( ByteBuffer bufferY, ByteBuffer bufferC, boolean offline )
+    {
+        // clear Screen and Depth Buffer, we have set the clear color as black.
+        GLES30.glClear( GLES30.GL_COLOR_BUFFER_BIT );
+
+        if ( offline ) {
+            GLES30.glViewport( 0, 0, ( int )mFrameWidth, ( int )mFrameHeight );
+            GLES30.glBindFramebuffer( GLES30.GL_FRAMEBUFFER, mCamTexture.getInRGBFBO( 0 ) );
+            GLES30.glFramebufferTexture2D( GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0,
+                                           GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( 0 ), 0 );
+        } else {
+            GLES30.glViewport( 0, 0, ( int )mScreenWidth, ( int )mScreenHeight );
+        }
+
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE0 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInYTex() );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth, mFrameHeight,
+                                GLES30.GL_LUMINANCE, GLES30.GL_UNSIGNED_BYTE, bufferY );
+
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE1 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInCTex() );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth/2, mFrameHeight/2,
+                                GLES30.GL_LUMINANCE_ALPHA, GLES30.GL_UNSIGNED_BYTE, bufferC );
+
+        GLES30.glUseProgram( mConvertProgramHandle );
+        if (offline) {
+            GLES30.glUniform1i(mPositionConv,0);
+        } else {
+            GLES30.glUniform1i(mPositionConv,1);
+        }
+        GLES30.glUniform1i ( mInYHandle, 0 );
+        GLES30.glUniform1i ( mInCHandle, 1 );
+        GLES30.glVertexAttribPointer( mVerticesHandle[0], 2, GLES30.GL_FLOAT, false, 0, mSquareVertices );
+        GLES30.glVertexAttribPointer ( mTexCoordLocHandle[0], 2, GLES30.GL_FLOAT, false, 0, mSquareTextureCoordinates[0] );
+        GLES30.glUniformMatrix4fv( mMVPMtrxhandle[0], 1, false, mtrxProjectionAndView, 0);
+        GLES30.glEnableVertexAttribArray( mVerticesHandle[0] );
+        GLES30.glEnableVertexAttribArray ( mTexCoordLocHandle[0] );
+
+        //GLES30.glDrawElements( GLES30.GL_TRIANGLES, 6, GLES30.GL_UNSIGNED_SHORT, mSquareDrawIndices );
+        GLES30.glDrawArrays( GLES30.GL_TRIANGLE_STRIP, 0, 4 );
+
+        if ( offline ) {
+            int status = GLES30.glCheckFramebufferStatus( GLES30.GL_FRAMEBUFFER );
+            if ( status == GLES30.GL_FRAMEBUFFER_COMPLETE ) {
+            /// Debug
+            ///    GLES30.glReadPixels( 0, 0, mFrameWidth, mFrameHeight, GLES30.GL_RGB, GLES30.GL_UNSIGNED_BYTE, scratchRGB );
+            ///    Log.e( TAG, "RGB Buffer " + scratchRGB.get(1000) + " " + scratchRGB.get(1001) +
+            ///           "handles "  + mCamTexture.getInRGBFBO() + " " + mCamTexture.getInRGBTex() );
+            } else {
+                Log.e( TAG, "FBO status " + status + "error " + GLES30.glGetError() );
+            }
+        }
+
+        // Disable vertex array
+        GLES30.glDisableVertexAttribArray( mVerticesHandle[0] );
+        GLES30.glDisableVertexAttribArray( mTexCoordLocHandle[0] );
+        // Reset FBO
+        GLES30.glBindFramebuffer( GLES30.GL_FRAMEBUFFER, 0 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, 0 );
+    }
+
+    public void executeBlur( int level )
+    {
+        int viewPortScaleFactor = 1;
+        int texScaleFactor = 1;
+        float blurScaleFactor = 1.0f; // 2x->.5
+
+        switch ( blurType )
+        {
+        case BlurTypeGaussianPyramid:
+            viewPortScaleFactor = level + 1;
+            texScaleFactor = level;
+            break;
+        case BlurTypeGaussianDilated:
+            blurScaleFactor = 4.0f;
+            break;
+        case BlurTypeGaussianKernelSize:
+            break;
+        }
+
+        GLES30.glClear( GLES30.GL_COLOR_BUFFER_BIT );
+        //GLES30.glViewport( 0, 0, ( int )mFrameWidth/(level+1), ( int )mFrameHeight/(level+1) );
+        GLES30.glViewport( 0, 0, ( int )mFrameWidth/viewPortScaleFactor,
+                           ( int )mFrameHeight/viewPortScaleFactor );
+
+        // Bind Mask texture to texturename
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE0 );
+        // GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( 0 ) );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( level - 1 ) );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0,
+                                mFrameWidth/texScaleFactor,
+                                mFrameHeight/texScaleFactor,
+                                GLES30.GL_RGB, GLES30.GL_UNSIGNED_BYTE, null );
+        GLES30.glUniform1i ( mInRGBHandle[level-1], 0 );
+
+        GLES30.glBindFramebuffer( GLES30.GL_FRAMEBUFFER, mCamTexture.getInRGBFBO( level ) );
+        GLES30.glFramebufferTexture2D( GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0,
+                                       GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( level ), 0 );
+
+        float xPixelOffset = blurScaleFactor/(float)mFrameWidth;
+        float yPixelOffset = blurScaleFactor/(float)mFrameHeight;
+        float mipLevel = (float)level;
+        GLES30.glUniform1f( mMipLevelUniform, mipLevel );
+        GLES30.glUniform1f( mXPixelOffsetUniform, xPixelOffset );
+        GLES30.glUniform1f( mYPixelOffsetUniform, yPixelOffset );
+
+        GLES30.glUseProgram( mBlurProgramHandle );
+
+        GLES30.glVertexAttribPointer( mVerticesHandle[2], 2, GLES30.GL_FLOAT, false, 0, mSquareVertices );
+        GLES30.glEnableVertexAttribArray( mVerticesHandle[2] );
+        GLES30.glVertexAttribPointer ( mTexCoordLocHandle[2], 2, GLES30.GL_FLOAT, false, 0, mSquareTextureCoordinates[0] );
+        GLES30.glEnableVertexAttribArray ( mTexCoordLocHandle[2] );
+
+        // GLES30.glDrawElements( GLES30.GL_TRIANGLES, 6, GLES30.GL_UNSIGNED_SHORT, mSquareDrawIndices );
+        GLES30.glDrawArrays( GLES30.GL_TRIANGLE_STRIP, 0, 4 );
+
+        // Disable vertex array
+        GLES30.glDisableVertexAttribArray( mVerticesHandle[2] );
+        GLES30.glDisableVertexAttribArray( mTexCoordLocHandle[2] );
+
+        // Reset FBO
+        GLES30.glBindFramebuffer( GLES30.GL_FRAMEBUFFER, 0 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, 0 );
+    }
+
+    public void executeBlend( ByteBuffer bufferMask, int level )
+    {
+        GLES30.glClear( GLES30.GL_COLOR_BUFFER_BIT /*| GLES30.GL_DEPTH_BUFFER_BIT*/ );
+        GLES30.glViewport( mScreenROIX, mScreenROIY, ( int )mScreenROIWidth, ( int )mScreenROIHeight );
+        //GLES30.glEnable( GLES30.GL_DEPTH_TEST );
+        GLES30.glUseProgram( mProgramHandle );
+
+        // Bind Mask texture to texturename
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE0 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex( 0 ) );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth, mFrameHeight,
+                                GLES30.GL_RGB, GLES30.GL_UNSIGNED_BYTE, null );
+        GLES30.glUniform1i ( mForegroundRGBHandle, 0 );
+
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE1 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getInRGBTex(level));
+        //GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth/(level+1), mFrameHeight/(level+1),
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mFrameWidth, mFrameHeight,
+                                GLES30.GL_RGB, GLES30.GL_UNSIGNED_BYTE, null );
+        GLES30.glUniform1i ( mBackGroundRGBHandle, 1 );
+
+        GLES30.glActiveTexture( GLES30.GL_TEXTURE2 );
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mCamTexture.getMaskTex() );
+        GLES30.glTexSubImage2D( GLES30.GL_TEXTURE_2D, 0, 0, 0, mMaskWidth, mMaskHeight,
+                                GLES30.GL_LUMINANCE, GLES30.GL_UNSIGNED_BYTE, bufferMask );
+        GLES30.glUniform1i ( mMaskHandle, 2 );
+
+        GLES30.glVertexAttribPointer( mVerticesHandle[1], 2, GLES30.GL_FLOAT, false, 0, mSquareVertices );
+        GLES30.glEnableVertexAttribArray( mVerticesHandle[1] );
+        GLES30.glVertexAttribPointer ( mTexCoordLocHandle[1], 2, GLES30.GL_FLOAT, false, 0, mSquareTextureCoordinates[0] );
+        GLES30.glEnableVertexAttribArray ( mTexCoordLocHandle[1] );
+        GLES30.glUniformMatrix4fv( mMVPMtrxhandle[1], 1, false, mtrxProjectionAndView, 0 );
+        GLES30.glUniformMatrix3fv( mRotMtrxhandle, 1, false, mRotationMatrix, 0 );
+        GLES30.glUniformMatrix3fv( mSurfaceRotMtrxhandle, 1, false, mSurfaceRotationMatrix, 0 );
+        GLES30.glUniformMatrix2fv( mFlipMtrxhandle, 1, false, flipNone, 0 );
+
+        // GLES30.glDrawElements( GLES30.GL_TRIANGLES, 6, GLES30.GL_UNSIGNED_SHORT, mSquareDrawIndices );
+        GLES30.glDrawArrays( GLES30.GL_TRIANGLE_STRIP, 0, 4 );
+
+        // Disable vertex array
+        GLES30.glDisableVertexAttribArray( mVerticesHandle[1] );
+        GLES30.glDisableVertexAttribArray( mTexCoordLocHandle[1] );
+    }
+
+    @Override
+    public void onDrawFrame( GL10 unused )
+    {
+        if ( !mActive || mFrameQueue.size() == 0 ) {
+            return;
+        }
+
+        currentTime = System.currentTimeMillis();
+        long delta = currentTime - prevTime;
+        Log.d(TAG,"frame delta time = "+delta);
+        try {
+            if ( minFrameDelta > delta )
+                Thread.sleep( minFrameDelta - delta );
+        } catch ( InterruptedException e ) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+        prevTime = System.currentTimeMillis();
+
+        if ( !mTexturesCreated && mMaskWidth > 0 && mMaskHeight  > 0 ) {
+            Log.d( TAG, "onDrawFrame createTextures " + blurType );
+            mCamTexture.createTextures( mFrameWidth, mFrameHeight,
+                                        mMaskWidth, mMaskHeight, 8,
+                                        blurType );
+            mTexturesCreated = true;
+        } else if ( !mTexturesCreated ) {
+            // No op
+            return;
+        }
+
+        DPImage dpimage = mFrameQueue.get( 0 );
+        mFrameQueue.removeElementAt( 0 );
+        Plane[] planes = dpimage.mImage.getPlanes();
+        ByteBuffer bufferY  = planes[0].getBuffer();
+        ByteBuffer bufferC = planes[2].getBuffer();
+
+        if ( dpimage.mMask == null) {
+            executeConverter( bufferY, bufferC, false );
+            Log.d( TAG, "onDrawFrame no processing" );
+        } else {
+            int mipLevel = (int)(( mBlurLevel * 8.0f )/100.0f);
+            if ( mipLevel >= 7 )
+                mipLevel = 7;// clamp
+            Log.d( TAG, "[DP_BUF_DBG] onDrawFrame frame " + dpimage.mSeqNumber + " mipLevel "
+                    + mipLevel );
+            executeConverter( bufferY, bufferC, true );
+
+            for ( int lvl = 1; lvl <= mipLevel; ++lvl ) {
+               executeBlur( lvl );
+            }
+
+            // Set rotation
+            if ( dpimage.mOrientation >= 0 ) {
+                prepareRotationMatrix( dpimage.mOrientation );
+            }
+            executeBlend( dpimage.mMask, mipLevel );
+        }
+        if ( mActive ) {
+            mObserver.onRenderComplete( dpimage, false );
+        }
+    }
+
+    private native String getShaderByName(String type);
+}
diff --git a/src/com/android/camera/deepportrait/CamRenderShader.java b/src/com/android/camera/deepportrait/CamRenderShader.java
new file mode 100755
index 000000000..5b80ed919
--- /dev/null
+++ b/src/com/android/camera/deepportrait/CamRenderShader.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package com.android.camera.deepportrait;
+import android.opengl.GLES30;
+import android.util.Log;
+
+public class CamRenderShader
+{
+    public static final String TAG = "<dp><app><CamRenderShader>";
+
+    public static int compileShader( final int shaderType, final String shaderSource )
+    {
+        int shaderHandle = GLES30.glCreateShader( shaderType );
+
+        if ( shaderHandle != 0 )
+        {
+            // Pass in the shader source.
+            GLES30.glShaderSource( shaderHandle, shaderSource );
+
+            // Compile the shader.
+            GLES30.glCompileShader( shaderHandle );
+
+            // Get the compilation status.
+            final int[] compileStatus = new int[1];
+            GLES30.glGetShaderiv( shaderHandle, GLES30.GL_COMPILE_STATUS, compileStatus, 0 );
+
+            // If the compilation failed, delete the shader.
+            if ( compileStatus[0] == 0 ) 
+            {
+                Log.e( TAG, "Error compiling shader: " + GLES30.glGetShaderInfoLog( shaderHandle ) );
+                GLES30.glDeleteShader( shaderHandle );
+                shaderHandle = 0;
+            }
+        }
+
+        if ( shaderHandle == 0 )
+        {
+            throw new RuntimeException( "Error creating shader." );
+        }
+
+        return shaderHandle;
+    }
+
+    public static int createAndLinkProgram( final int vertexShaderHandle,
+                                            final int fragmentShaderHandle )
+    {
+        int programHandle = GLES30.glCreateProgram();
+
+        if ( programHandle != 0 ) {
+            // Bind the vertex shader to the program.
+            GLES30.glAttachShader( programHandle, vertexShaderHandle );
+            
+            // Bind the fragment shaders to the program
+            GLES30.glAttachShader( programHandle, fragmentShaderHandle );
+
+            // Link the two shaders together into a program.
+            GLES30.glLinkProgram( programHandle );
+
+            // Get the link status.
+            final int[] linkStatus = new int[1];
+            GLES30.glGetProgramiv( programHandle, GLES30.GL_LINK_STATUS, linkStatus, 0 );
+
+            // If the link failed, delete the program.
+            if ( linkStatus[0] == 0 ) 
+            {
+                Log.e(TAG, "Error compiling program: " + GLES30.glGetProgramInfoLog(programHandle));
+                GLES30.glDeleteProgram(programHandle);
+                programHandle = 0;
+            }
+        }
+
+        if ( programHandle == 0 ) {
+            throw new RuntimeException("Error creating program.");
+        }
+        
+        return programHandle;
+    }
+}
diff --git a/src/com/android/camera/deepportrait/CamRenderTexture.java b/src/com/android/camera/deepportrait/CamRenderTexture.java
new file mode 100755
index 000000000..c0b4108b3
--- /dev/null
+++ b/src/com/android/camera/deepportrait/CamRenderTexture.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package com.android.camera.deepportrait;
+
+import java.nio.ByteBuffer;
+import android.opengl.GLES30;
+
+public class CamRenderTexture
+{
+    int[] mTextureHandle;
+    int[] mFBO;
+    int[] mRBO;
+    public enum BlurType
+    {
+        BlurTypeGaussianDilated,
+        BlurTypeGaussianPyramid,
+        BlurTypeGaussianKernelSize,
+    }
+
+    public int getInYTex()  { return mTextureHandle[0]; }
+    public int getInCTex()  { return mTextureHandle[1]; }
+    public int getMaskTex() { return mTextureHandle[2]; }
+    public int getInRGBTex( int level ) { return mTextureHandle[3 + level]; }
+    public int getInRGBFBO( int level ) { return mFBO[level]; }
+    public int getInRGBRBO( int level ) { return mRBO[level]; }
+
+    public void createTextures( int width, int height, int maskW, int maskH,
+                                int levels, BlurType blurType )
+    {
+        mTextureHandle = new int[3 + levels];
+        mFBO = new int[levels];
+        mRBO = new int[levels];
+        GLES30.glGenTextures( mTextureHandle.length, mTextureHandle, 0 );
+
+        // Input Luma
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mTextureHandle[0] );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR );
+        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
+        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
+        GLES30.glTexImage2D( GLES30.GL_TEXTURE_2D, 0, GLES30.GL_LUMINANCE, width, height, 0,
+                             GLES30.GL_LUMINANCE, GLES30.GL_UNSIGNED_BYTE, null );
+
+        // Input chroma
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mTextureHandle[1] );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR );
+        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
+        GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
+        GLES30.glTexImage2D( GLES30.GL_TEXTURE_2D, 0, GLES30.GL_LUMINANCE_ALPHA, width/2, height/2, 0,
+                             GLES30.GL_LUMINANCE_ALPHA, GLES30.GL_UNSIGNED_BYTE, null );
+
+        // mask
+        GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mTextureHandle[2] );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_NEAREST );
+        GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_NEAREST );
+        GLES30.glTexImage2D( GLES30.GL_TEXTURE_2D, 0, GLES30.GL_LUMINANCE, maskW, maskH, 0,
+                             GLES30.GL_LUMINANCE , GLES30.GL_UNSIGNED_BYTE, null );
+
+        // Input RGB
+        GLES30.glGenFramebuffers( levels, mFBO, 0 );
+
+        for ( int i = 0; i < levels; ++i )
+        {
+            int scaleFactor = ( blurType == BlurType.BlurTypeGaussianPyramid ) ? i + 1 : 1;
+            GLES30.glBindTexture( GLES30.GL_TEXTURE_2D, mTextureHandle[3 + i] );
+            GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR );
+            GLES30.glTexParameteri( GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR );
+            GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
+            GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
+            GLES30.glTexImage2D( GLES30.GL_TEXTURE_2D, 0, GLES30.GL_RGB,
+                                 width/scaleFactor, height/scaleFactor, 0,
+                                 GLES30.GL_RGB , GLES30.GL_UNSIGNED_BYTE, null );
+        }
+        //ToDo: move to render buffers
+        //  GLES30.glGenRenderbuffers( 1, mRBO, 0 );
+        //  GLES30.glBindRenderbuffer( GLES30.GL_RENDERBUFFER, mRBO[0]);
+        //  GLES30.glRenderbufferStorage( GLES30.GL_RENDERBUFFER, GLES30.GL_RGB, width, height );
+    }
+
+    public void deleteTextures()
+    {
+        GLES30.glDeleteTextures( mTextureHandle.length, mTextureHandle, 0 );
+        GLES30.glDeleteFramebuffers ( mFBO.length, mFBO, 0 );
+     //   GLES30.glDeleteRenderbuffers( mRBO.length, mRBO, 0 );
+    }
+}
diff --git a/src/com/android/camera/deepportrait/DPImage.java b/src/com/android/camera/deepportrait/DPImage.java
new file mode 100755
index 000000000..381270682
--- /dev/null
+++ b/src/com/android/camera/deepportrait/DPImage.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+package com.android.camera.deepportrait;
+import android.media.Image;
+import android.media.Image.Plane;
+import java.nio.ByteBuffer;
+
+public class DPImage
+{
+    public Image mImage;
+    public ByteBuffer mMask;
+    public int mMaskWidth = 0;
+    public int mMaskHeight = 0;
+    public int mSeqNumber = 0;
+    public int mOrientation = 0;
+    public DPImage( Image aImage, ByteBuffer aMask, int orientation)
+    {
+        mImage = aImage;
+        mMask= aMask;
+        mOrientation = orientation;
+    }
+
+    public DPImage(Image aImage, int orientation)
+    {
+        mImage = aImage;
+        mMask = null;
+        mOrientation = orientation;
+    }
+}
diff --git a/src/com/android/camera/deepportrait/GLCameraPreview.java b/src/com/android/camera/deepportrait/GLCameraPreview.java
new file mode 100755
index 000000000..7d62faebf
--- /dev/null
+++ b/src/com/android/camera/deepportrait/GLCameraPreview.java
@@ -0,0 +1,343 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ * Not a Contribution
+ *
+ * Copyright 2008-2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+package com.android.camera.deepportrait;
+
+import android.app.Activity;
+import android.content.Context;
+import android.opengl.GLSurfaceView;
+import android.view.SurfaceHolder;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.opengles.GL10;
+import android.opengl.GLES30;
+import android.util.Log;
+
+public class GLCameraPreview extends GLSurfaceView
+{
+    private CamGLRenderer mRenderer;
+    private CamGLRenderObserver mObserver;
+    public static String TAG = "<dp><app><GLSurfaceView>";
+
+    private static class ContextFactory implements EGLContextFactory
+    {
+        private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+        public  static final int EGL_CONTEXT_PRIORITY_LEVEL_IMG = 0x3100;
+        public  static final int EGL_CONTEXT_PRIORITY_HIGH_IMG = 0x3101;
+        public  static final int EGL_CONTEXT_PRIORITY_MED_IMG = 0x3102;
+
+        public EGLContext createContext(EGL10 egl, EGLDisplay display,
+                EGLConfig eglConfig)
+        {
+            Log.w(TAG, "creating OpenGL ES 3.0 context");
+            checkEglError("Before eglCreateContext", egl);
+            int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 3,
+                                  EGL_CONTEXT_PRIORITY_LEVEL_IMG, EGL_CONTEXT_PRIORITY_MED_IMG,
+                                  EGL10.EGL_NONE, EGL10.EGL_NONE };
+            EGLContext context = egl.eglCreateContext(display, eglConfig,
+                    EGL10.EGL_NO_CONTEXT, attrib_list);
+            checkEglError("After eglCreateContext", egl);
+            return context;
+        }
+
+        public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context)
+        {
+            egl.eglDestroyContext(display, context);
+        }
+    }
+
+    private static void checkEglError(String prompt, EGL10 egl)
+    {
+        int error;
+        while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS)
+        {
+            Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
+        }
+    }
+
+    private static class ConfigChooser implements EGLConfigChooser
+    {
+
+        public ConfigChooser(int r, int g, int b, int a, int depth, int stencil)
+        {
+            mRedSize = r;
+            mGreenSize = g;
+            mBlueSize = b;
+            mAlphaSize = a;
+            mDepthSize = depth;
+            mStencilSize = stencil;
+        }
+
+        /*
+         * This EGL config specification is used to specify 2.0 rendering. We
+         * use a minimum size of 4 bits for red/green/blue, but will perform
+         * actual matching in chooseConfig() below.
+         */
+        private static int EGL_OPENGL_ES2_BIT = 4;
+        private static int[] s_configAttribs2 = { EGL10.EGL_RED_SIZE, 4,
+                EGL10.EGL_GREEN_SIZE, 4, EGL10.EGL_BLUE_SIZE, 4,
+                EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL10.EGL_NONE };
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display)
+        {
+
+            /*
+             * Get the number of minimally matching EGL configurations
+             */
+            int[] num_config = new int[1];
+            egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
+
+            int numConfigs = num_config[0];
+
+            if (numConfigs <= 0)
+            {
+                throw new IllegalArgumentException(
+                        "No configs match configSpec");
+            }
+
+            /*
+             * Allocate then read the array of minimally matching EGL configs
+             */
+            EGLConfig[] configs = new EGLConfig[numConfigs];
+            egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs,
+                    num_config);
+
+            printConfigs(egl, display, configs);
+            /*
+             * Now return the "best" one
+             */
+            return chooseConfig(egl, display, configs);
+        }
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+                EGLConfig[] configs)
+        {
+            for (EGLConfig config : configs)
+            {
+                int d = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_DEPTH_SIZE, 0);
+                int s = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_STENCIL_SIZE, 0);
+
+                // We need at least mDepthSize and mStencilSize bits
+                if (d < mDepthSize || s < mStencilSize)
+                    continue;
+
+                // We want an *exact* match for red/green/blue/alpha
+                int r = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_RED_SIZE, 0);
+                int g = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_GREEN_SIZE, 0);
+                int b = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_BLUE_SIZE, 0);
+                int a = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_ALPHA_SIZE, 0);
+
+                if (r == mRedSize && g == mGreenSize && b == mBlueSize
+                        && a == mAlphaSize)
+                    return config;
+            }
+            return null;
+        }
+
+        private int findConfigAttrib(EGL10 egl, EGLDisplay display,
+                EGLConfig config, int attribute, int defaultValue)
+        {
+
+            if (egl.eglGetConfigAttrib(display, config, attribute, mValue))
+            {
+                return mValue[0];
+            }
+            return defaultValue;
+        }
+
+        private void printConfigs(EGL10 egl, EGLDisplay display,
+                EGLConfig[] configs)
+        {
+            int numConfigs = configs.length;
+            Log.w(TAG, String.format("%d configurations", numConfigs));
+            for (int i = 0; i < numConfigs; i++)
+            {
+                Log.w(TAG, String.format("Configuration %d:\n", i));
+                printConfig(egl, display, configs[i]);
+            }
+        }
+
+        private void printConfig(EGL10 egl, EGLDisplay display, EGLConfig config)
+        {
+            int[] attributes = { EGL10.EGL_BUFFER_SIZE, EGL10.EGL_ALPHA_SIZE,
+                    EGL10.EGL_BLUE_SIZE,
+                    EGL10.EGL_GREEN_SIZE,
+                    EGL10.EGL_RED_SIZE,
+                    EGL10.EGL_DEPTH_SIZE,
+                    EGL10.EGL_STENCIL_SIZE,
+                    EGL10.EGL_CONFIG_CAVEAT,
+                    EGL10.EGL_CONFIG_ID,
+                    EGL10.EGL_LEVEL,
+                    EGL10.EGL_MAX_PBUFFER_HEIGHT,
+                    EGL10.EGL_MAX_PBUFFER_PIXELS,
+                    EGL10.EGL_MAX_PBUFFER_WIDTH,
+                    EGL10.EGL_NATIVE_RENDERABLE,
+                    EGL10.EGL_NATIVE_VISUAL_ID,
+                    EGL10.EGL_NATIVE_VISUAL_TYPE,
+                    0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
+                    EGL10.EGL_SAMPLES,
+                    EGL10.EGL_SAMPLE_BUFFERS,
+                    EGL10.EGL_SURFACE_TYPE,
+                    EGL10.EGL_TRANSPARENT_TYPE,
+                    EGL10.EGL_TRANSPARENT_RED_VALUE,
+                    EGL10.EGL_TRANSPARENT_GREEN_VALUE,
+                    EGL10.EGL_TRANSPARENT_BLUE_VALUE,
+                    0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
+                    0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
+                    0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
+                    0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
+                    EGL10.EGL_LUMINANCE_SIZE, EGL10.EGL_ALPHA_MASK_SIZE,
+                    EGL10.EGL_COLOR_BUFFER_TYPE, EGL10.EGL_RENDERABLE_TYPE,
+                    0x3042 // EGL10.EGL_CONFORMANT
+            };
+            String[] names = { "EGL_BUFFER_SIZE", "EGL_ALPHA_SIZE",
+                    "EGL_BLUE_SIZE", "EGL_GREEN_SIZE", "EGL_RED_SIZE",
+                    "EGL_DEPTH_SIZE", "EGL_STENCIL_SIZE", "EGL_CONFIG_CAVEAT",
+                    "EGL_CONFIG_ID", "EGL_LEVEL", "EGL_MAX_PBUFFER_HEIGHT",
+                    "EGL_MAX_PBUFFER_PIXELS", "EGL_MAX_PBUFFER_WIDTH",
+                    "EGL_NATIVE_RENDERABLE", "EGL_NATIVE_VISUAL_ID",
+                    "EGL_NATIVE_VISUAL_TYPE", "EGL_PRESERVED_RESOURCES",
+                    "EGL_SAMPLES", "EGL_SAMPLE_BUFFERS", "EGL_SURFACE_TYPE",
+                    "EGL_TRANSPARENT_TYPE", "EGL_TRANSPARENT_RED_VALUE",
+                    "EGL_TRANSPARENT_GREEN_VALUE",
+                    "EGL_TRANSPARENT_BLUE_VALUE", "EGL_BIND_TO_TEXTURE_RGB",
+                    "EGL_BIND_TO_TEXTURE_RGBA", "EGL_MIN_SWAP_INTERVAL",
+                    "EGL_MAX_SWAP_INTERVAL", "EGL_LUMINANCE_SIZE",
+                    "EGL_ALPHA_MASK_SIZE", "EGL_COLOR_BUFFER_TYPE",
+                    "EGL_RENDERABLE_TYPE", "EGL_CONFORMANT" };
+            int[] value = new int[1];
+            for (int i = 0; i < attributes.length; i++)
+            {
+                int attribute = attributes[i];
+                String name = names[i];
+                if (egl.eglGetConfigAttrib(display, config, attribute, value))
+                {
+                    Log.w(TAG, String.format("  %s: %d\n", name, value[0]));
+                } else
+                {
+                    // Log.w(TAG, String.format("  %s: failed\n", name));
+                    while (egl.eglGetError() != EGL10.EGL_SUCCESS)
+                        ;
+                }
+            }
+        }
+
+        // Subclasses can adjust these values:
+        protected int mRedSize;
+        protected int mGreenSize;
+        protected int mBlueSize;
+        protected int mAlphaSize;
+        protected int mDepthSize;
+        protected int mStencilSize;
+        private int[] mValue = new int[1];
+    }
+
+    public GLCameraPreview( Context context, int textureWidth,
+                            int textureHeight, CamGLRenderObserver observer )
+    {
+        super( context );
+        mObserver = observer;
+        // Create an OpenGL ES 3.0 context.
+        setEGLContextClientVersion( 3 );
+
+        /*
+         * Setup the context factory for 2.0 rendering. See ContextFactory class
+         * definition below
+         */
+        setEGLContextFactory(new ContextFactory());
+
+        /*
+         * We need to choose an EGLConfig that matches the format of our surface
+         * exactly. This is going to be done in our custom config chooser. See
+         * ConfigChooser class definition below.
+         */
+        boolean translucent = false;
+        int depth = 0;
+        int stencil = 0;
+        //setEGLConfigChooser(translucent ? new ConfigChooser(8, 8, 8, 8, depth,
+        //        stencil) : new ConfigChooser(5, 6, 5, 0, depth, stencil));
+
+        // Set the Renderer for drawing on the GLSurfaceView
+        mRenderer = new CamGLRenderer( context, textureWidth, textureHeight, observer, this );
+        setRenderer( mRenderer );
+
+        // Render the view only when there is a change in the drawing data
+        setRenderMode( GLSurfaceView.RENDERMODE_WHEN_DIRTY );
+        // setRenderMode( GLSurfaceView.RENDERMODE_CONTINUOUSLY );
+
+        mRenderer.open();
+    }
+
+    @Override
+    public void onPause()
+    {
+        super.onPause();
+        mRenderer.onPause();
+    }
+
+    @Override
+    public void onResume()
+    {
+        super.onResume();
+        mRenderer.onResume();
+    }
+
+    @Override
+    public void surfaceChanged( SurfaceHolder holder, int format, int w, int h )
+    {
+        super.surfaceChanged( holder, format, w, h );
+    }
+
+    @Override
+    public void surfaceCreated( SurfaceHolder holder )
+    {
+        super.surfaceCreated( holder );
+        mObserver.onRenderSurfaceCreated();
+        if (mRenderer != null) {
+            mRenderer.onResume();
+        }
+    }
+
+    @Override
+    public void surfaceDestroyed( SurfaceHolder holder )
+    {
+        super.surfaceDestroyed( holder );
+        if ( mRenderer != null ) {
+            mObserver.onRenderSurfaceDestroyed();
+            Log.e( TAG, " surfaceDestroyed Close renderer" );
+            mRenderer.onPause();
+            mRenderer.close();
+            mRenderer = null;
+        }
+    }
+
+    public CamGLRenderer getRendererInstance()
+    {
+        return mRenderer;
+    }
+}
diff --git a/src/com/android/camera/imageprocessor/FrameProcessor.java b/src/com/android/camera/imageprocessor/FrameProcessor.java
index 2e7ded169..4eaf7f7ad 100755
--- a/src/com/android/camera/imageprocessor/FrameProcessor.java
+++ b/src/com/android/camera/imageprocessor/FrameProcessor.java
@@ -46,8 +46,12 @@ import android.view.Surface;
 import android.widget.Toast;
 
 import com.android.camera.CaptureModule;
+import com.android.camera.PhotoModule;
 import com.android.camera.SettingsManager;
+import com.android.camera.deepportrait.DPImage;
+import com.android.camera.deepportrait.GLCameraPreview;
 import com.android.camera.imageprocessor.filter.BeautificationFilter;
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.imageprocessor.filter.ImageFilter;
 import com.android.camera.imageprocessor.filter.TrackingFocusFrameListener;
 import com.android.camera.ui.RotateTextToast;
@@ -90,8 +94,11 @@ public class FrameProcessor {
     public static final int FILTER_NONE = 0;
     public static final int FILTER_MAKEUP = 1;
     public static final int LISTENER_TRACKING_FOCUS = 2;
+    public static final int FILTER_DEEP_PORTRAIT = 3;
     private CaptureModule mModule;
     private boolean mIsVideoOn = false;
+    private boolean mIsDeepPortrait = false;
+    private DeepPortraitFilter mDeepPortraitFilter = null;
 
     public FrameProcessor(Activity activity, CaptureModule module) {
         mActivity = activity;
@@ -101,14 +108,14 @@ public class FrameProcessor {
 
         mRs = RenderScript.create(mActivity);
         mRsYuvToRGB = new ScriptC_YuvToRgb(mRs);
-        mRsRotator = new ScriptC_rotator(mRs);
+            mRsRotator = new ScriptC_rotator(mRs);
     }
 
     private void init(Size previewDim) {
         mIsActive = true;
         mSize = previewDim;
         synchronized (mAllocationLock) {
-            mInputImageReader = ImageReader.newInstance(mSize.getWidth(), mSize.getHeight(), ImageFormat.YUV_420_888, 8);
+            mInputImageReader = ImageReader.newInstance(mSize.getWidth(), mSize.getHeight(), ImageFormat.YUV_420_888, 12);
 
             Type.Builder rgbTypeBuilder = new Type.Builder(mRs, Element.RGBA_8888(mRs));
             rgbTypeBuilder.setX(mSize.getHeight());
@@ -190,12 +197,29 @@ public class FrameProcessor {
 
     public void onOpen(ArrayList<Integer> filterIds, final Size size) {
         cleanFilterSet();
+        boolean hasDeepportraitFilter = false;
         if (filterIds != null) {
             for (Integer i : filterIds) {
                 addFilter(i.intValue());
+                if (i == FILTER_DEEP_PORTRAIT) {
+                    hasDeepportraitFilter = true;
+                }
             }
         }
-        if(isFrameFilterEnabled() || isFrameListnerEnabled()) {
+
+        mIsDeepPortrait = hasDeepportraitFilter;
+        if (mIsDeepPortrait && mPreviewFilters.size() != 0) {
+            mDeepPortraitFilter =
+                    (DeepPortraitFilter)mPreviewFilters.get(0);
+            mDeepPortraitFilter.init(size.getWidth(),size.getHeight(),0,0);
+            if (!mDeepPortraitFilter.getDPInitialized())
+                Toast.makeText(mActivity, "Deepportrait init failed",
+                    Toast.LENGTH_LONG).show();
+        } else {
+            mDeepPortraitFilter = null;
+        }
+
+        if(isFrameFilterEnabled() || isFrameListnerEnabled() || mIsDeepPortrait) {
             init(size);
         }
     }
@@ -206,6 +230,8 @@ public class FrameProcessor {
             filter = new BeautificationFilter(mModule);
         } else if (filterId == LISTENER_TRACKING_FOCUS) {
             filter = new TrackingFocusFrameListener(mModule);
+        } else if (filterId == FILTER_DEEP_PORTRAIT) {
+            filter = new DeepPortraitFilter(mModule,mModule.getCamGLRender());
         }
 
         if (filter != null && filter.isSupported()) {
@@ -292,6 +318,10 @@ public class FrameProcessor {
 
     public List<Surface> getInputSurfaces() {
         List<Surface> surfaces = new ArrayList<Surface>();
+        if (mIsDeepPortrait) {
+            surfaces.add(getReaderSurface());
+            return surfaces;
+        }
         if (mPreviewFilters.size() == 0 && mFinalFilters.size() == 0) {
             surfaces.add(mSurfaceAsItIs);
             if (mIsVideoOn) {
@@ -390,6 +420,20 @@ public class FrameProcessor {
                         image.close();
                         return;
                     }
+                    if (mIsDeepPortrait) {
+                        //render to GLSurfaceView directly
+                        GLCameraPreview preview = mModule.getGLCameraPreview();
+                        if (mDeepPortraitFilter != null && mDeepPortraitFilter.getDPInitialized()
+                                && preview != null) {
+                            DPImage DpImage = new DPImage(image,0);
+                            mDeepPortraitFilter.addImage(null,null,1,DpImage);
+                            preview.getRendererInstance().sendFrame(DpImage);
+                            preview.requestRender();
+                        } else {
+                            image.close();
+                        }
+                        return;
+                    }
                     mIsAllocationEverUsed = true;
                     ByteBuffer bY = image.getPlanes()[0].getBuffer();
                     ByteBuffer bVU = image.getPlanes()[2].getBuffer();
@@ -411,7 +455,7 @@ public class FrameProcessor {
                             filter.init(mSize.getWidth(), mSize.getHeight(), stride, stride);
                             if (filter instanceof BeautificationFilter) {
                                 filter.addImage(bY, bVU, 0, new Boolean(false));
-                            } else {
+                            } else{
                                 filter.addImage(bY, bVU, 0, new Boolean(true));
                             }
                             needToFeedSurface = true;
diff --git a/src/com/android/camera/imageprocessor/PostProcessor.java b/src/com/android/camera/imageprocessor/PostProcessor.java
old mode 100644
new mode 100755
index 72a92b4b3..c227f32b2
--- a/src/com/android/camera/imageprocessor/PostProcessor.java
+++ b/src/com/android/camera/imageprocessor/PostProcessor.java
@@ -30,6 +30,8 @@ package com.android.camera.imageprocessor;
 
 import android.content.ContentResolver;
 import android.content.Context;
+import android.content.SharedPreferences;
+import android.graphics.Bitmap;
 import android.graphics.ImageFormat;
 import android.graphics.Rect;
 import android.graphics.YuvImage;
@@ -50,6 +52,7 @@ import android.media.ImageWriter;
 import android.os.Handler;
 import android.os.HandlerThread;
 import android.os.Looper;
+import android.preference.PreferenceManager;
 import android.util.Log;
 import android.widget.Toast;
 
@@ -59,11 +62,13 @@ import com.android.camera.Exif;
 import com.android.camera.MediaSaveService;
 import com.android.camera.PhotoModule;
 import com.android.camera.SettingsManager;
+import com.android.camera.deepportrait.DPImage;
 import com.android.camera.exif.ExifInterface;
 import com.android.camera.exif.Rational;
 import com.android.camera.imageprocessor.filter.BestpictureFilter;
 import com.android.camera.imageprocessor.filter.BlurbusterFilter;
 import com.android.camera.imageprocessor.filter.ChromaflashFilter;
+import com.android.camera.imageprocessor.filter.DeepPortraitFilter;
 import com.android.camera.imageprocessor.filter.OptizoomFilter;
 import com.android.camera.imageprocessor.filter.SharpshooterFilter;
 import com.android.camera.imageprocessor.filter.StillmoreFilter;
@@ -148,6 +153,7 @@ public class PostProcessor{
     private LinkedList<ZSLQueue.ImageItem> mFallOffImages = new LinkedList<ZSLQueue.ImageItem>();
     private int mPendingContinuousRequestCount = 0;
     public int mMaxRequiredImageNum;
+    private boolean mIsDeepPortrait = false;
 
     public int getMaxRequiredImageNum() {
         return mMaxRequiredImageNum;
@@ -439,6 +445,12 @@ public class PostProcessor{
             mZSLReprocessImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(), ImageFormat.JPEG, mMaxRequiredImageNum);
             mZSLReprocessImageReader.setOnImageAvailableListener(processedImageAvailableListener, mHandler);
         }
+        if (mIsDeepPortrait) {
+            ImageFilter imageFilter = mController.getFrameFilters().get(0);
+            DeepPortraitFilter deepPortraitFilter =
+                    (DeepPortraitFilter) imageFilter;
+            deepPortraitFilter.initSnapshot(pictureSize.getWidth(),pictureSize.getHeight());
+        }
     }
 
     public boolean takeZSLPicture() {
@@ -678,9 +690,10 @@ public class PostProcessor{
 
     public void onOpen(int postFilterId, boolean isFlashModeOn, boolean isTrackingFocusOn,
                        boolean isMakeupOn, boolean isSelfieMirrorOn, boolean isSaveRaw,
-                       boolean isSupportedQcfa) {
+                       boolean isSupportedQcfa, boolean isDeepPortrait) {
         mImageHandlerTask = new ImageHandlerTask();
         mSaveRaw = isSaveRaw;
+        mIsDeepPortrait = isDeepPortrait;
         if(setFilter(postFilterId) || isFlashModeOn || isTrackingFocusOn || isMakeupOn || isSelfieMirrorOn
                 || PersistUtil.getCameraZSLDisabled()
                 || !SettingsManager.getInstance().isZSLInAppEnabled()
@@ -690,7 +703,7 @@ public class PostProcessor{
                 || "18".equals(SettingsManager.getInstance().getValue(
                                   SettingsManager.KEY_SCENE_MODE))
                 || mController.getCameraMode() == CaptureModule.DUAL_MODE
-                || isSupportedQcfa) {
+                || isSupportedQcfa || isDeepPortrait) {
             mUseZSL = false;
         } else {
             mUseZSL = true;
@@ -949,6 +962,17 @@ public class PostProcessor{
             }
             mOrientation = CameraUtil.getJpegRotation(mController.getMainCameraId(), mController.getDisplayOrientation());
         }
+        if (mIsDeepPortrait) {
+            ImageFilter imageFilter = mController.getFrameFilters().get(0);
+            DeepPortraitFilter deepPortraitFilter =
+                    (DeepPortraitFilter) imageFilter;
+            if (!deepPortraitFilter.getDPStillInit()) {
+                mStatus = STATUS.BUSY;
+                if(mWatchdog != null) {
+                    mWatchdog.startMonitor();
+                }
+            }
+        }
         if(mFilter != null && mCurrentNumImage >= mFilter.getNumRequiredImage()) {
             return;
         }
@@ -967,10 +991,78 @@ public class PostProcessor{
                         ByteBuffer vuBuf = image.getPlanes()[2].getBuffer();
 
                         if(mFilter == null) {
-                            mDefaultResultImage = new ImageFilter.ResultImage(ByteBuffer.allocateDirect(mStride * mHeight*3/2),
-                                                                    new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
-                            yBuf.get(mDefaultResultImage.outBuffer.array(), 0, yBuf.remaining());
-                            vuBuf.get(mDefaultResultImage.outBuffer.array(), mStride*mHeight, vuBuf.remaining());
+                            if (mIsDeepPortrait) {
+                                ImageFilter imageFilter = mController.getFrameFilters().get(0);
+                                DeepPortraitFilter deepPortraitFilter =
+                                        (DeepPortraitFilter) imageFilter;
+                                DPImage dpImage = new DPImage(image,0);
+                                long current = System.currentTimeMillis();
+                                deepPortraitFilter.addImage(null,null,0,dpImage);
+                                if (DEBUG_DUMP_FILTER_IMG) {
+                                    ImageFilter.ResultImage debugResultImage = new
+                                            ImageFilter.ResultImage(ByteBuffer.allocateDirect(
+                                            mStride * mHeight * 3 / 2), new Rect(0, 0, mWidth,
+                                            mHeight), mWidth, mHeight, mStride);
+                                    yBuf.get(debugResultImage.outBuffer.array(), 0, yBuf.remaining());
+                                    vuBuf.get(debugResultImage.outBuffer.array(), mStride * mHeight,
+                                            vuBuf.remaining());
+                                    yBuf.rewind();
+                                    vuBuf.rewind();
+
+                                    byte[] bytes = nv21ToJpeg(debugResultImage, mOrientation, null);
+                                    mActivity.getMediaSaveService().addImage(
+                                            bytes, "Debug_beforeApplyingFilter" + numImage, 0L, null,
+                                            debugResultImage.outRoi.width(),
+                                            debugResultImage.outRoi.height(),
+                                            mOrientation, null, mController.getMediaSavedListener(),
+                                            mActivity.getContentResolver(), "jpeg");
+
+                                    if (dpImage.mMask != null) {
+                                        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+                                        Bitmap mask = DeepPortraitFilter.DpMaskToImage(
+                                                dpImage.mMask, dpImage.mMaskWidth,dpImage.mMaskHeight);
+                                        mask.compress(Bitmap.CompressFormat.JPEG, 75, baos);
+                                        byte[] data = baos.toByteArray();
+                                        mActivity.getMediaSaveService().addImage(
+                                                data, "DPmask" + System.currentTimeMillis(), 0L, null,
+                                                dpImage.mMaskWidth,
+                                                dpImage.mMaskHeight,
+                                                mOrientation, null, mController.getMediaSavedListener(),
+                                                mActivity.getContentResolver(), "jpeg");
+                                    }
+                                }
+                                if (dpImage.mMask == null) {
+                                    Log.d(TAG,"can't generate deepportrait mask");
+                                    mDefaultResultImage = new ImageFilter.ResultImage(
+                                            ByteBuffer.allocateDirect(mStride * mHeight*3/2),
+                                            new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
+                                    yBuf.get(mDefaultResultImage.outBuffer.array(), 0, yBuf.remaining());
+                                    vuBuf.get(mDefaultResultImage.outBuffer.array(), mStride*mHeight, vuBuf.remaining());
+                                } else {
+                                    ByteBuffer dstY = ByteBuffer.allocateDirect(yBuf.capacity());
+                                    ByteBuffer dstVU = ByteBuffer.allocateDirect(vuBuf.capacity());
+                                    final SharedPreferences prefs =
+                                            PreferenceManager.getDefaultSharedPreferences(mActivity);
+                                    int level = prefs.getInt(SettingsManager.KEY_DEEPPORTRAIT_VALUE
+                                            ,50);
+                                    deepPortraitFilter.renderDeepportraitImage(
+                                            dpImage,dstY,dstVU,0, level/100f);
+                                    Log.d(TAG,"process Dp snapshot cost time "+ (System.currentTimeMillis() - current));
+                                    mDefaultResultImage = new ImageFilter.ResultImage(
+                                            ByteBuffer.allocateDirect(mStride * mHeight*3/2),
+                                            new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
+                                    dstY.get(mDefaultResultImage.outBuffer.array(), 0,
+                                            dstY.remaining());
+                                    dstVU.get(mDefaultResultImage.outBuffer.array(), mStride*mHeight,
+                                            dstVU.remaining());
+                                }
+                            } else {
+                                mDefaultResultImage = new ImageFilter.ResultImage(
+                                        ByteBuffer.allocateDirect(mStride * mHeight*3/2),
+                                        new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
+                                yBuf.get(mDefaultResultImage.outBuffer.array(), 0, yBuf.remaining());
+                                vuBuf.get(mDefaultResultImage.outBuffer.array(), mStride*mHeight, vuBuf.remaining());
+                            }
                             image.close();
                         } else {
                             if (DEBUG_DUMP_FILTER_IMG) {
@@ -1070,9 +1162,12 @@ public class PostProcessor{
                     }
                     if(resultImage != null) {
                         //Start processing FrameProcessor filter as well
-                        for (ImageFilter filter : mController.getFrameFilters()) {
-                            filter.init(resultImage.width, resultImage.height, resultImage.stride, resultImage.stride);
-                            filter.addImage(resultImage.outBuffer, null, 0, new Boolean(false));
+                        if (!mIsDeepPortrait) {
+                            for (ImageFilter filter : mController.getFrameFilters()) {
+                                filter.init(resultImage.width, resultImage.height,
+                                        resultImage.stride, resultImage.stride);
+                                filter.addImage(resultImage.outBuffer, null, 0, new Boolean(false));
+                            }
                         }
 
                         if(isSelfieMirrorOn() && !mController.isBackCamera()) {
@@ -1194,7 +1289,7 @@ public class PostProcessor{
         }
     };
 
-    private byte[] nv21ToJpeg(ImageFilter.ResultImage resultImage, int orientation, TotalCaptureResult result) {
+    public byte[] nv21ToJpeg(ImageFilter.ResultImage resultImage, int orientation, TotalCaptureResult result) {
         BitmapOutputStream bos = new BitmapOutputStream(1024);
         YuvImage im = new YuvImage(resultImage.outBuffer.array(), ImageFormat.NV21,
                                     resultImage.width, resultImage.height, new int[]{resultImage.stride, resultImage.stride});
diff --git a/src/com/android/camera/imageprocessor/filter/DeepPortraitFilter.java b/src/com/android/camera/imageprocessor/filter/DeepPortraitFilter.java
new file mode 100755
index 000000000..9cadcf325
--- /dev/null
+++ b/src/com/android/camera/imageprocessor/filter/DeepPortraitFilter.java
@@ -0,0 +1,325 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above
+ *    copyright notice, this list of conditions and the following
+ *    disclaimer in the documentation and/or other materials provided
+ *    with the distribution.
+ *  * Neither the name of The Linux Foundation nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package com.android.camera.imageprocessor.filter;
+
+import android.graphics.Bitmap;
+import android.graphics.Color;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.media.Image;
+import android.os.AsyncTask;
+import android.os.Environment;
+import android.os.Handler;
+import android.util.Log;
+import android.util.Size;
+
+import com.android.camera.CaptureModule;
+import com.android.camera.deepportrait.CamGLRenderer;
+import com.android.camera.deepportrait.DPImage;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+public class DeepPortraitFilter implements ImageFilter {
+    private static String TAG = "DeepPortraitFilter";
+    private static String VIDEO_DLC = "deepportrait_preview.dlce";
+    private static String SNAPSHOT_DLC = "deepportrait_snapshot.dlce";
+    private static String SD_ROOT_PATH = Environment.getExternalStorageDirectory().toString();
+    private static boolean mIsSupported = false;
+    int mWidth;
+    int mHeight;
+    int mSnapshotWidth;
+    int mSnapshotHeight;
+    int mStrideY;
+    int mStrideVU;
+    private CaptureModule mModule;
+    private CamGLRenderer mRender;
+    private Boolean mDPInitialized = false;
+    private Boolean mDPStillInit = false;
+    private int mVideoMaskSize = 0;
+    private static final int DP_QUEUE_SIZE = 30;
+    private ByteBuffer[] mMaskBufArray = new ByteBuffer[DP_QUEUE_SIZE];
+    private int mSeqNo;
+
+    public DeepPortraitFilter(CaptureModule module, CamGLRenderer render) {
+        mModule = module;
+        mRender = render;
+    }
+
+    @Override
+    public List<CaptureRequest> setRequiredImages(CaptureRequest.Builder builder) {
+        return null;
+    }
+
+    @Override
+    public String getStringName() {
+        return null;
+    }
+
+    @Override
+    public int getNumRequiredImage() {
+        return 0;
+    }
+
+    @Override
+    public void init(int width, int height, int strideY, int strideVU) {
+        mWidth = width;
+        mHeight = height;
+        mStrideY = strideY;
+        mStrideVU = strideVU;
+        mSeqNo = 0;
+        mDPInitialized = initPreview(width, height);
+        if (mDPInitialized) {
+            mVideoMaskSize = getMaskBufferSize();
+            for ( int i = 0; i < mMaskBufArray.length; ++i ) {
+                mMaskBufArray[i]  = ByteBuffer.allocateDirect(mVideoMaskSize);
+            }
+        }
+        Log.d(TAG,"init width = " +width +" height = " + height);
+    }
+
+    public void initSnapshot(int width, int height) {
+        String dlcPath = SD_ROOT_PATH + File.separator + SNAPSHOT_DLC;
+        File dlc = new File(dlcPath);
+        if (!dlc.exists()) {
+            mDPStillInit = false;
+            return;
+        }
+        mSnapshotWidth = width;
+        mSnapshotHeight = height;
+        new InitializeDpSnapShot().execute();
+        Log.d(TAG,"initSnapshot width = " +width +" height = " + height);
+    }
+
+    public boolean initPreview(int width, int height) {
+        String dlcPath = SD_ROOT_PATH + File.separator + VIDEO_DLC;
+        File dlc = new File(dlcPath);
+        if (!dlc.exists()) {
+            return false;
+        }
+        return initVideoDeepPortrait(width, height);
+    }
+
+    public boolean getDPInitialized() {
+        return mDPInitialized;
+    }
+
+    public boolean getDPStillInit() {
+        return mDPStillInit;
+    }
+
+    @Override
+    public void deinit() {
+        mDPInitialized = false;
+        mDPStillInit = false;
+    }
+
+    @Override
+    //inputimage is DPimage, imageNum > 0 preview ; imageNum = 0 snapshot
+    public void addImage(ByteBuffer bY, ByteBuffer bVU, int imageNum, Object inputImage) {
+        DPImage dpImage = (DPImage)inputImage;
+        Image image = dpImage.mImage;
+        Image.Plane[] planes = image.getPlanes();
+        ByteBuffer bufferY = planes[0].getBuffer();
+        ByteBuffer bufferC = planes[2].getBuffer();
+        if (imageNum > 0) {
+            mSeqNo++;
+            ByteBuffer mask = mMaskBufArray[mSeqNo % mMaskBufArray.length];
+            dpImage.mMask = mask;
+            dpImage.mSeqNumber = mSeqNo;
+            int displayOrientation = mModule.getDisplayOrientation() == -1?
+                    0:mModule.getDisplayOrientation();
+            int sensorOrientation = mModule.getSensorOrientation();
+            int adjustedRotation = ( sensorOrientation - displayOrientation + 360 ) % 360;
+            dpImage.mOrientation = adjustedRotation;
+            runDpVideoWarpMask( bufferY, bufferC, planes[0].getRowStride(),
+                    planes[2].getRowStride(),adjustedRotation,mask,getMaskWidth());
+        } else {
+            int[] maskSize = new int[2];
+            boolean success = false;
+            if (mDPStillInit) {
+                success = getSnapshotMaskBufferSize(mSnapshotWidth,mSnapshotHeight,maskSize);
+            }
+            int maskWidth = maskSize[0];
+            int maskHeight = maskSize[1];
+            int size = maskWidth * maskHeight;
+            if (!success || size == 0) {
+                Log.d(TAG,"failed to get SnapshotMaskBufferSize success = "
+                        + success +" size = " + size);
+                return;
+            }
+            ByteBuffer mask = ByteBuffer.allocateDirect(maskWidth * maskHeight);
+            dpImage.mMask = mask;
+            dpImage.mMaskWidth = maskWidth;
+            dpImage.mMaskHeight = maskHeight;
+            int displayOrientation = mModule.getDisplayOrientation() == -1?
+                    0:mModule.getDisplayOrientation();
+            int sensorOrientation = mModule.getSensorOrientation();
+            int adjustedRotation = ( sensorOrientation - displayOrientation + 360 ) % 360;
+            dpImage.mOrientation = adjustedRotation;
+            runDpSnapshotWarpMask(bufferY,bufferC,
+                    planes[0].getRowStride(), planes[2].getRowStride(),
+                    mask,maskWidth,adjustedRotation);
+        }
+    }
+
+    @Override
+    public ResultImage processImage() {
+        return null;
+    }
+
+    public boolean renderDeepportraitImage(DPImage dpImage,ByteBuffer dstY, ByteBuffer dstVU,
+                                        int effect, float intensity) {
+        boolean ret;
+        Image image = dpImage.mImage;
+        Image.Plane[] planes = image.getPlanes();
+        ByteBuffer bufferY = planes[0].getBuffer();
+        ByteBuffer bufferC = planes[2].getBuffer();
+        int width = image.getWidth();
+        int height = image.getHeight();
+        int strideY = planes[0].getRowStride();
+        int strideVU = planes[2].getRowStride();
+        if (dpImage.mMask == null) {
+            return false;
+        }
+        ret = initDpEffect(bufferY,bufferC,width,height,strideY,strideVU,
+                dpImage.mMask,dpImage.mMaskWidth,dpImage.mMaskHeight,dpImage.mMaskWidth);
+        Log.d(TAG,"initDpEffect success = " + ret);
+        if (ret) {
+            ret = renderDpEffect(dstY,dstVU,width,height,strideY,strideVU,effect,intensity,
+                    dpImage.mOrientation);
+            Log.d(TAG,"renderDpEffect  success = " + ret);
+        }
+        return ret;
+    }
+
+    public static Bitmap DpMaskToImage(ByteBuffer maskBuffer, int width, int height) {
+        byte[] maskArray = new byte[width * height];
+        maskBuffer.get(maskArray);
+        int[] rgbArray = new int[maskArray.length];
+        for (int i = 0; i < maskArray.length; i++) {
+            int alpha  = (int) maskArray[i];
+            rgbArray[i] = Color.rgb(alpha,alpha,alpha);
+        }
+        Bitmap maskImage = Bitmap.createBitmap(rgbArray,width,height, Bitmap.Config.ARGB_8888);
+        return maskImage;
+    }
+
+    @Override
+    public boolean isSupported() {
+        return mIsSupported;
+    }
+
+    public static boolean isSupportedStatic(){return mIsSupported;}
+
+    @Override
+    public boolean isFrameListener() {
+        return false;
+    }
+
+    @Override
+    public boolean isManualMode() {
+        return false;
+    }
+
+    @Override
+    public void manualCapture(CaptureRequest.Builder builder, CameraCaptureSession captureSession,
+                              CameraCaptureSession.CaptureCallback callback,
+                              Handler handler) throws CameraAccessException {
+
+    }
+
+    private class InitializeDpSnapShot extends AsyncTask<Void, Void, Void>
+    {
+
+        @Override
+        protected void onPreExecute()
+        {
+            super.onPreExecute();
+        }
+
+        @Override
+        protected void onPostExecute(Void params)
+        {
+            super.onPostExecute(params);
+        }
+
+        @Override
+        protected Void doInBackground(Void... params)
+        {
+
+            if ( !mDPStillInit ) {
+                mDPStillInit = initSnapshotDeepPortrait(mSnapshotWidth, mSnapshotHeight);
+            }
+            return null;
+        }
+    }
+
+    public int getDpMaskWidth() {
+        return getMaskWidth();
+    }
+
+    public int getDpMaskHieght() {
+        return getMaskHeight();
+    }
+
+
+    private native boolean initVideoDeepPortrait(int width, int height);
+    private native boolean initSnapshotDeepPortrait(int width, int height);
+    private native boolean runDpVideoWarpMask(ByteBuffer yData, ByteBuffer vuData, int yStride,
+                                              int vuStride, int orientation,
+                                              ByteBuffer mask, int maskStride);
+    private native boolean runDpSnapshotWarpMask(ByteBuffer yData, ByteBuffer vuData, int yStride,
+                                                 int vuStride, ByteBuffer mask, int maskStride,
+                                                 int orientation);
+    private native boolean getSnapshotMaskBufferSize(int width, int height, int[] maskSize);
+    private native int getMaskBufferSize( );
+    private native int getMaskWidth( );
+    private native int getMaskHeight( );
+    private native boolean initDpEffect(ByteBuffer yData, ByteBuffer vuData, int width, int height,
+                                        int yStride, int vuStride, ByteBuffer mask, int maskWidth,
+                                        int maskHeight,int maskStride);
+    private native boolean renderDpEffect(ByteBuffer dstYData, ByteBuffer dstVUData,int width,
+                                          int height, int yStride, int vuStride,int effect,
+                                          float intensity, int orientation);
+
+    static {
+        try {
+            System.loadLibrary("jni_deepportrait");
+            mIsSupported = true;
+        }catch(UnsatisfiedLinkError e) {
+            mIsSupported = false;
+            Log.d(TAG,"failed to load jni_deepportrait");
+        }
+    }
+}
diff --git a/src/com/android/camera/ui/OneUICameraControls.java b/src/com/android/camera/ui/OneUICameraControls.java
index 8d156e3fe..504cb2679 100755
--- a/src/com/android/camera/ui/OneUICameraControls.java
+++ b/src/com/android/camera/ui/OneUICameraControls.java
@@ -58,6 +58,7 @@ public class OneUICameraControls extends RotatableLayout {
     private View mPreview;
     private View mSceneModeSwitcher;
     private View mFilterModeSwitcher;
+    private View mDeepportraitSwitcher;
     private View mMakeupSeekBar;
     private View mMakeupSeekBarLowText;
     private View mMakeupSeekBarHighText;
@@ -152,6 +153,7 @@ public class OneUICameraControls extends RotatableLayout {
         mMakeupSeekBarLayout = findViewById(R.id.makeup_seekbar_layout);
         ((SeekBar)mMakeupSeekBar).setMax(100);
         mFlashButton = findViewById(R.id.flash_button);
+        mDeepportraitSwitcher = findViewById(R.id.deepportrait_switcher);
         mMute = findViewById(R.id.mute_button);
         mPreview = findViewById(R.id.preview_thumb);
         mSceneModeSwitcher = findViewById(R.id.scene_mode_switcher);
@@ -229,8 +231,8 @@ public class OneUICameraControls extends RotatableLayout {
 
         mViews = new View[]{
                 mSceneModeSwitcher, mFilterModeSwitcher, mFrontBackSwitcher,
-                mTsMakeupSwitcher, mFlashButton, mShutter, mPreview, mVideoShutter,
-                mPauseButton, mCancelButton
+                mTsMakeupSwitcher,mDeepportraitSwitcher, mFlashButton, mShutter,
+                mPreview, mVideoShutter, mPauseButton, mCancelButton
         };
         mBottomLargeSize = getResources().getDimensionPixelSize(
                 R.dimen.one_ui_bottom_large);
@@ -309,7 +311,12 @@ public class OneUICameraControls extends RotatableLayout {
         } else {
             v.setY(mHeight - mBottom + (mBottom - h) / 2);
         }
-        float bW = mWidth / 5f;
+        float bW;
+        if (top) {
+            bW = mWidth / 6f;
+        } else {
+            bW = mWidth / 5f;
+        }
         v.setX(bW * idx + (bW - w) / 2);
     }
 
@@ -341,6 +348,7 @@ public class OneUICameraControls extends RotatableLayout {
             setLocation(mFrontBackSwitcher, true, 2);
             setLocation(mTsMakeupSwitcher, true, 3);
             setLocation(mFlashButton, true, 4);
+            setLocation(mDeepportraitSwitcher,true,5);
             if (mIntentMode == CaptureModule.INTENT_MODE_CAPTURE) {
                 setLocation(mShutter, false, 2);
                 setLocation(mCancelButton, false, 0.85f);
-- 
GitLab