Selaa lähdekoodia

通过直接导入的方式来加载

zengjiebin 7 vuotta sitten
vanhempi
commit
684a09a724
100 muutettua tiedostoa jossa 17733 lisäystä ja 105 poistoa
  1. 2 0
      .idea/gradle.xml
  2. 1 1
      app/build.gradle
  3. 11 7
      app/src/main/AndroidManifest.xml
  4. 3 2
      app/src/main/java/com/sheep/gamegroup/util/Jump2View.java
  5. 2 0
      app/src/main/java/com/sheep/jiuyan/samllsheep/SheepApp.java
  6. 125 55
      media/app/build.gradle
  7. 4 7
      media/app/src/main/AndroidManifest.xml
  8. 34 33
      media/app/src/main/java/com/kfzs/cfyl/media/MediaApp.java
  9. 1 0
      media/cge_library/.gitignore
  10. 42 0
      media/cge_library/build.gradle
  11. 18 0
      media/cge_library/proguard-rules.pro
  12. 2 0
      media/cge_library/project.properties
  13. 5 0
      media/cge_library/src/main/AndroidManifest.xml
  14. 17 0
      media/cge_library/src/main/java/org/wysaid/algorithm/AlgorithmUtil.java
  15. 51 0
      media/cge_library/src/main/java/org/wysaid/algorithm/Matrix2x2.java
  16. 109 0
      media/cge_library/src/main/java/org/wysaid/algorithm/Matrix3x3.java
  17. 212 0
      media/cge_library/src/main/java/org/wysaid/algorithm/Matrix4x4.java
  18. 17 0
      media/cge_library/src/main/java/org/wysaid/algorithm/Vector2.java
  19. 18 0
      media/cge_library/src/main/java/org/wysaid/algorithm/Vector3.java
  20. 19 0
      media/cge_library/src/main/java/org/wysaid/algorithm/Vector4.java
  21. 418 0
      media/cge_library/src/main/java/org/wysaid/camera/CameraInstance.java
  22. 104 0
      media/cge_library/src/main/java/org/wysaid/common/Common.java
  23. 60 0
      media/cge_library/src/main/java/org/wysaid/common/ConcurrentQueueHelper.java
  24. 37 0
      media/cge_library/src/main/java/org/wysaid/common/FrameBufferObject.java
  25. 218 0
      media/cge_library/src/main/java/org/wysaid/common/ProgramObject.java
  26. 199 0
      media/cge_library/src/main/java/org/wysaid/common/SharedContext.java
  27. 141 0
      media/cge_library/src/main/java/org/wysaid/common/TextureDrawer.java
  28. 117 0
      media/cge_library/src/main/java/org/wysaid/geometryUtils/GeometryRenderer.java
  29. 9 0
      media/cge_library/src/main/java/org/wysaid/geometryUtils/GeometryRendererLine.java
  30. 29 0
      media/cge_library/src/main/java/org/wysaid/gpuCodec/TextureDrawerCodec.java
  31. 83 0
      media/cge_library/src/main/java/org/wysaid/gpuCodec/TextureDrawerI420ToRGB.java
  32. 42 0
      media/cge_library/src/main/java/org/wysaid/gpuCodec/TextureDrawerNV12ToRGB.java
  33. 76 0
      media/cge_library/src/main/java/org/wysaid/gpuCodec/TextureDrawerNV21ToRGB.java
  34. 128 0
      media/cge_library/src/main/java/org/wysaid/myUtils/FileUtil.java
  35. 89 0
      media/cge_library/src/main/java/org/wysaid/myUtils/ImageUtil.java
  36. 59 0
      media/cge_library/src/main/java/org/wysaid/myUtils/MsgUtil.java
  37. 136 0
      media/cge_library/src/main/java/org/wysaid/nativePort/CGEDeformFilterWrapper.java
  38. 27 0
      media/cge_library/src/main/java/org/wysaid/nativePort/CGEFFmpegNativeLibrary.java
  39. 169 0
      media/cge_library/src/main/java/org/wysaid/nativePort/CGEFaceTracker.java
  40. 143 0
      media/cge_library/src/main/java/org/wysaid/nativePort/CGEFrameRecorder.java
  41. 196 0
      media/cge_library/src/main/java/org/wysaid/nativePort/CGEFrameRenderer.java
  42. 155 0
      media/cge_library/src/main/java/org/wysaid/nativePort/CGEImageHandler.java
  43. 177 0
      media/cge_library/src/main/java/org/wysaid/nativePort/CGENativeLibrary.java
  44. 15 0
      media/cge_library/src/main/java/org/wysaid/nativePort/NativeLibraryLoader.java
  45. 154 0
      media/cge_library/src/main/java/org/wysaid/texUtils/TextureRenderer.java
  46. 206 0
      media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererBlur.java
  47. 84 0
      media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererDrawOrigin.java
  48. 82 0
      media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererEdge.java
  49. 54 0
      media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererEmboss.java
  50. 284 0
      media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererLerpBlur.java
  51. 143 0
      media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererMask.java
  52. 46 0
      media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererThreshold.java
  53. 87 0
      media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererWave.java
  54. 330 0
      media/cge_library/src/main/java/org/wysaid/view/CameraGLSurfaceView.java
  55. 244 0
      media/cge_library/src/main/java/org/wysaid/view/CameraGLSurfaceViewWithBuffer.java
  56. 422 0
      media/cge_library/src/main/java/org/wysaid/view/CameraGLSurfaceViewWithTexture.java
  57. 255 0
      media/cge_library/src/main/java/org/wysaid/view/CameraRecordGLSurfaceView.java
  58. 411 0
      media/cge_library/src/main/java/org/wysaid/view/ImageGLSurfaceView.java
  59. 616 0
      media/cge_library/src/main/java/org/wysaid/view/SimplePlayerGLSurfaceView.java
  60. 88 0
      media/cge_library/src/main/java/org/wysaid/view/TrackingCameraGLSurfaceView.java
  61. 582 0
      media/cge_library/src/main/java/org/wysaid/view/VideoPlayerGLSurfaceView.java
  62. 185 0
      media/cge_library/src/main/jni/Android.mk
  63. 14 0
      media/cge_library/src/main/jni/Application.mk
  64. 6 0
      media/cge_library/src/main/jni/README.md
  65. 70 0
      media/cge_library/src/main/jni/buildJNI
  66. 309 0
      media/cge_library/src/main/jni/cge/common/cgeCommonDefine.cpp
  67. 306 0
      media/cge_library/src/main/jni/cge/common/cgeGLFunctions.cpp
  68. 245 0
      media/cge_library/src/main/jni/cge/common/cgeGlobal.cpp
  69. 283 0
      media/cge_library/src/main/jni/cge/common/cgeImageFilter.cpp
  70. 790 0
      media/cge_library/src/main/jni/cge/common/cgeImageHandler.cpp
  71. 387 0
      media/cge_library/src/main/jni/cge/common/cgeShaderFunctions.cpp
  72. 868 0
      media/cge_library/src/main/jni/cge/common/cgeTextureUtils.cpp
  73. 347 0
      media/cge_library/src/main/jni/cge/extends/cgeThread.cpp
  74. 833 0
      media/cge_library/src/main/jni/cge/filters/CGELiquifyFilter.cpp
  75. 140 0
      media/cge_library/src/main/jni/cge/filters/cgeAdvancedEffects.cpp
  76. 67 0
      media/cge_library/src/main/jni/cge/filters/cgeAdvancedEffectsCommon.cpp
  77. 169 0
      media/cge_library/src/main/jni/cge/filters/cgeBeautifyFilter.cpp
  78. 221 0
      media/cge_library/src/main/jni/cge/filters/cgeBilateralBlurFilter.cpp
  79. 1142 0
      media/cge_library/src/main/jni/cge/filters/cgeBlendFilter.cpp
  80. 98 0
      media/cge_library/src/main/jni/cge/filters/cgeBrightnessAdjust.cpp
  81. 169 0
      media/cge_library/src/main/jni/cge/filters/cgeColorBalanceAdjust.cpp
  82. 65 0
      media/cge_library/src/main/jni/cge/filters/cgeColorLevelAdjust.cpp
  83. 234 0
      media/cge_library/src/main/jni/cge/filters/cgeColorMappingFilter.cpp
  84. 42 0
      media/cge_library/src/main/jni/cge/filters/cgeContrastAdjust.cpp
  85. 87 0
      media/cge_library/src/main/jni/cge/filters/cgeCrosshatchFilter.cpp
  86. 619 0
      media/cge_library/src/main/jni/cge/filters/cgeCurveAdjust.cpp
  87. 1396 0
      media/cge_library/src/main/jni/cge/filters/cgeDataParsingEngine.cpp
  88. 31 0
      media/cge_library/src/main/jni/cge/filters/cgeDynamicFilters.cpp
  89. 110 0
      media/cge_library/src/main/jni/cge/filters/cgeDynamicWaveFilter.cpp
  90. 110 0
      media/cge_library/src/main/jni/cge/filters/cgeEdgeFilter.cpp
  91. 64 0
      media/cge_library/src/main/jni/cge/filters/cgeEmbossFilter.cpp
  92. 37 0
      media/cge_library/src/main/jni/cge/filters/cgeExposureAdjust.cpp
  93. 176 0
      media/cge_library/src/main/jni/cge/filters/cgeFilterBasic.cpp
  94. 77 0
      media/cge_library/src/main/jni/cge/filters/cgeHalftoneFilter.cpp
  95. 65 0
      media/cge_library/src/main/jni/cge/filters/cgeHazeFilter.cpp
  96. 64 0
      media/cge_library/src/main/jni/cge/filters/cgeHueAdjust.cpp
  97. 200 0
      media/cge_library/src/main/jni/cge/filters/cgeLerpblurFilter.cpp
  98. 66 0
      media/cge_library/src/main/jni/cge/filters/cgeLookupFilter.cpp
  99. 33 0
      media/cge_library/src/main/jni/cge/filters/cgeMaxValueFilter.cpp
  100. 0 0
      media/cge_library/src/main/jni/cge/filters/cgeMidValueFilter.cpp

+ 2 - 0
.idea/gradle.xml

@@ -11,6 +11,8 @@
             <option value="$PROJECT_DIR$/WaterWaveProgress" />
             <option value="$PROJECT_DIR$/app" />
             <option value="$PROJECT_DIR$/joevideolib" />
+            <option value="$PROJECT_DIR$/media/app" />
+            <option value="$PROJECT_DIR$/media/cge_library" />
             <option value="$PROJECT_DIR$/media/share_library" />
             <option value="$PROJECT_DIR$/ucrop" />
             <option value="$PROJECT_DIR$/view" />

+ 1 - 1
app/build.gradle

@@ -423,7 +423,7 @@ dependencies {
     implementation('com.qiniu:qiniu-android-sdk:7.3.13') {
         exclude group: 'com.squareup.okhttp3', module: 'okhttp'
     }
-    implementation project(':media_share_lib')
+    api project(':media')
     //https://github.com/didi/VirtualAPK
     implementation 'com.didi.virtualapk:core:0.9.8'
     //fast json

+ 11 - 7
app/src/main/AndroidManifest.xml

@@ -909,7 +909,7 @@
         <activity
             android:name="com.sheep.gamegroup.module.login.LoginAct"
             android:screenOrientation="portrait"
-            android:theme="@style/AppActionTheme"></activity>
+            android:theme="@style/AppActionTheme"/>
         <activity
             android:name="com.sheep.gamegroup.module.login.ChangePasswordAct"
             android:screenOrientation="portrait"
@@ -917,17 +917,21 @@
         <activity
             android:name="com.sheep.gamegroup.module.login.ReLoginNameAct"
             android:screenOrientation="portrait"
-            android:theme="@style/AppActionTheme"></activity>
-        <!-- 小绵羊3.4.7 新增界面 -->
-        <activity
-            android:name="com.sheep.gamegroup.module.user.activity.ActVip"
-            android:screenOrientation="portrait"
             android:theme="@style/AppActionTheme"/>
         <!-- 小绵羊3.4.7 新增界面 -->
         <activity
-            android:name="com.sheep.gamegroup.module.user.activity.ActUserAddrInfo"
+            android:name="com.sheep.gamegroup.module.user.activity.ActVip"
             android:screenOrientation="portrait"
             android:theme="@style/AppActionTheme"/>
+        <!-- media界面 -->
+        <activity android:name="com.kfzs.cfyl.media.activity.ActCutVideo"
+            android:theme="@style/media_AppTheme"
+            android:configChanges="orientation|keyboardHidden|navigation|screenSize"
+            android:screenOrientation="sensor"/>
+        <activity android:name="com.kfzs.cfyl.media.activity.ActEditVideo"
+            android:theme="@style/media_AppTheme"
+            android:configChanges="orientation|keyboardHidden|navigation|screenSize"
+            android:screenOrientation="sensor"/>
     </application>
 
 </manifest>

+ 3 - 2
app/src/main/java/com/sheep/gamegroup/util/Jump2View.java

@@ -2341,8 +2341,9 @@ public class Jump2View {
      * @param data 视频数据,包括地址与时长,宽高等
      */
     public void goActCutVideo(Activity activity, Video data) {
-        SheepPluginUtil.checkAndRunPlugin(activity, Plugin.media)
-                .flatMap((Function<Plugin, ObservableSource<BaseMessage>>) plugin -> SheepApp.getInstance().getNetComponent().getApiService().getVideoTopic())
+//        SheepPluginUtil.checkAndRunPlugin(activity, Plugin.media)
+//                .flatMap((Function<Plugin, ObservableSource<BaseMessage>>) plugin -> SheepApp.getInstance().getNetComponent().getApiService().getVideoTopic())
+        SheepApp.getInstance().getNetComponent().getApiService().getVideoTopic()
                 .subscribeOn(Schedulers.io())
                 .observeOn(AndroidSchedulers.mainThread())
                 .subscribe(new AbsObserver<BaseMessage>() {

+ 2 - 0
app/src/main/java/com/sheep/jiuyan/samllsheep/SheepApp.java

@@ -17,6 +17,7 @@ import com.baidu.location.LocationClientOption;
 import com.bumptech.glide.Glide;
 import com.danikula.videocache.HttpProxyCacheServer;
 import com.didi.virtualapk.PluginManager;
+import com.kfzs.cfyl.media.MediaApp;
 import com.liulishuo.okdownload.OkDownload;
 import com.liulishuo.okdownload.core.dispatcher.DownloadDispatcher;
 import com.sheep.gamegroup.di.components.DaggerNetComponent;
@@ -266,6 +267,7 @@ public class SheepApp extends MultiDexApplication {
 //        RemitStoreOnSQLite.setRemitToDBDelayMillis(3000);
         //webView 缓存优化初始化 https://github.com/yale8848/CacheWebView
         WebViewCacheInterceptorInst.getInstance().init(new WebViewCacheInterceptor.Builder(this).setDebug(BuildConfig.DEBUG).setCacheSize(Long.MAX_VALUE));
+        MediaApp.initCgeLibrary(this);
     }
 
     private void initBdLocationOption() {

+ 125 - 55
media/app/build.gradle

@@ -1,11 +1,129 @@
-apply plugin: 'com.android.application'
-apply plugin: 'com.didi.virtualapk.plugin'
+//apply plugin: 'com.android.application'
+//apply plugin: 'com.didi.virtualapk.plugin'
+//
+//android {
+//    compileSdkVersion ANDROID_COMPILE_SDK_VERSION as int
+//
+//    defaultConfig {
+//        applicationId "com.kfzs.cfyl.media"
+//        minSdkVersion ANDROID_MIN_SDK_VERSION as int
+//        targetSdkVersion ANDORID_TARGET_SDK_VERSION as int
+//        versionCode 1
+//        versionName "1.0"
+//
+//        testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
+//
+//        ndk {
+//            abiFilters "armeabi-v7a"
+////            abiFilters 'x86'
+////            abiFilters "armeabi-v7a"//, 'x86'//, 'armeabi-v7a', 'x86_64', 'arm64-v8a'
+//        }
+//    }
+//
+//    signingConfigs {
+//        config {
+//            keyAlias 'sheep'
+//            keyPassword 'zhaoyi2004'
+//            storeFile file('.././../sign.jks')
+//            storePassword 'zhaoyi2004'
+//            v1SigningEnabled true
+//            v2SigningEnabled false
+//        }
+//
+//    }
+//    buildTypes {
+//        release {
+//            minifyEnabled false
+//            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+//            signingConfig signingConfigs.config
+//        }
+//        debug {
+//            signingConfig signingConfigs.config
+//        }
+//    }
+////    splits {
+////        abi {
+////            enable true
+////            reset()
+////            include 'x86', 'armeabi-v7a'//, 'arm64-v8a'
+//////            include 'mips', 'arm64-v8a', 'x86', 'x86_64', 'armeabi', 'armeabi-v7a'
+////            universalApk false
+////        }
+////    }
+//
+//}
+//
+//configurations.all {
+//    resolutionStrategy.eachDependency { DependencyResolveDetails details ->
+//        def requested = details.requested
+//        if (requested.group == 'com.android.support') {
+//            if (!requested.name.startsWith("multidex")) {
+//                details.useVersion "$supportLibVersion"
+//            }
+//        }
+//    }
+//}
+//dependencies {
+//    implementation fileTree(dir: 'libs', include: ['*.jar'])
+//
+//    implementation "com.android.support:appcompat-v7:$supportLibVersion"
+//    testImplementation 'junit:junit:4.12'
+//    androidTestImplementation 'com.android.support.test:runner:1.0.2'
+//    androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
+//    //显示图片视频帧,在插件中不兼容x86模拟器,所以x86不考虑
+////    implementation 'com.github.wseemann:FFmpegMediaMetadataRetriever:1.0.14'
+//    implementation project(':share_library')
+//    implementation("com.android.support:recyclerview-v7:$supportLibVersion")
+//    implementation 'com.android.support.constraint:constraint-layout:1.1.3'
+//    implementation 'com.github.ybq:Android-SpinKit:1.1.0'
+//    //All arch: armeabi, armeabi-v7a, arm64-v8a, mips, x86
+//    implementation 'org.wysaid:gpuimage-plus:2.5.1'
+//    //Pure graphics lib without ffmpeg. (all arch for branch 'min')
+////    implementation 'org.wysaid:gpuimage-plus:2.5.1-min'
+//    implementation('com.github.bumptech.glide:glide:4.1.1') {
+//        exclude group: 'com.android.support', module: 'support-fragment'
+////        exclude group: 'com.github.bumptech.glide', module: 'gifdecoder'
+////        exclude group: 'com.github.bumptech.glide', module: 'disklrucache'
+////        exclude group: 'com.github.bumptech.glide', module: 'annotations'
+//    }
+//    annotationProcessor 'com.github.bumptech.glide:compiler:4.1.1'
+//    implementation 'com.github.CymChad:BaseRecyclerViewAdapterHelper:2.9.34'
+//    //fast json
+//    implementation 'com.alibaba:fastjson:1.2.52'
+//    implementation 'com.github.1993hzw:Doodle:5.3'
+//    implementation project(':joevideolib')
+//    implementation project(':cge_library')
+//}
+//
+//
+//virtualApk {
+//    packageId = 0x6f             // The package id of Resources.
+//    targetHost='../app' // The path of application module in host project.
+//    applyHostMapping = true      // [Optional] Default value is true.
+//    forceUseHostDependences = true
+//}
+////static def releaseTime() {
+////    return new Date().format("MMddHHmm", TimeZone.getDefault())
+////}
+////在apk文件后边生成版本号信息
+//android.applicationVariants.all {
+//    variant ->
+//        variant.outputs.all {
+//            output ->
+////                if (buildType.name == "release") {
+//                outputFileName = "media_release.apk"
+////                outputFileName = "media_release_${output.getFilter(com.android.build.OutputFile.ABI)}.apk"
+////                outputFileName = "media_v${versionCode}-${releaseTime()}_${output.getFilter(com.android.build.OutputFile.ABI)}.apk"
+////                }
+//
+//        }
+//}
+apply plugin: 'com.android.library'
 
 android {
     compileSdkVersion ANDROID_COMPILE_SDK_VERSION as int
 
     defaultConfig {
-        applicationId "com.kfzs.cfyl.media"
         minSdkVersion ANDROID_MIN_SDK_VERSION as int
         targetSdkVersion ANDORID_TARGET_SDK_VERSION as int
         versionCode 1
@@ -15,41 +133,15 @@ android {
 
         ndk {
             abiFilters "armeabi-v7a"
-//            abiFilters 'x86'
-//            abiFilters "armeabi-v7a"//, 'x86'//, 'armeabi-v7a', 'x86_64', 'arm64-v8a'
         }
     }
 
-    signingConfigs {
-        config {
-            keyAlias 'sheep'
-            keyPassword 'zhaoyi2004'
-            storeFile file('.././../sign.jks')
-            storePassword 'zhaoyi2004'
-            v1SigningEnabled true
-            v2SigningEnabled false
-        }
-
-    }
     buildTypes {
         release {
             minifyEnabled false
             proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
-            signingConfig signingConfigs.config
-        }
-        debug {
-            signingConfig signingConfigs.config
         }
     }
-//    splits {
-//        abi {
-//            enable true
-//            reset()
-//            include 'x86', 'armeabi-v7a'//, 'arm64-v8a'
-////            include 'mips', 'arm64-v8a', 'x86', 'x86_64', 'armeabi', 'armeabi-v7a'
-//            universalApk false
-//        }
-//    }
 
 }
 
@@ -72,12 +164,12 @@ dependencies {
     androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
     //显示图片视频帧,在插件中不兼容x86模拟器,所以x86不考虑
 //    implementation 'com.github.wseemann:FFmpegMediaMetadataRetriever:1.0.14'
-    implementation project(':share_library')
+    api project(':share_library')
     implementation("com.android.support:recyclerview-v7:$supportLibVersion")
     implementation 'com.android.support.constraint:constraint-layout:1.1.3'
     implementation 'com.github.ybq:Android-SpinKit:1.1.0'
     //All arch: armeabi, armeabi-v7a, arm64-v8a, mips, x86
-    implementation 'org.wysaid:gpuimage-plus:2.5.1'
+//    implementation 'org.wysaid:gpuimage-plus:2.5.1'
     //Pure graphics lib without ffmpeg. (all arch for branch 'min')
 //    implementation 'org.wysaid:gpuimage-plus:2.5.1-min'
     implementation('com.github.bumptech.glide:glide:4.1.1') {
@@ -91,28 +183,6 @@ dependencies {
     //fast json
     implementation 'com.alibaba:fastjson:1.2.52'
     implementation 'com.github.1993hzw:Doodle:5.3'
+    api project(':joevideolib')
+    api project(':cge_library')
 }
-
-
-virtualApk {
-    packageId = 0x6f             // The package id of Resources.
-    targetHost='../app' // The path of application module in host project.
-    applyHostMapping = true      // [Optional] Default value is true.
-    forceUseHostDependences = true
-}
-//static def releaseTime() {
-//    return new Date().format("MMddHHmm", TimeZone.getDefault())
-//}
-//在apk文件后边生成版本号信息
-android.applicationVariants.all {
-    variant ->
-        variant.outputs.all {
-            output ->
-//                if (buildType.name == "release") {
-                outputFileName = "media_release.apk"
-//                outputFileName = "media_release_${output.getFilter(com.android.build.OutputFile.ABI)}.apk"
-//                outputFileName = "media_v${versionCode}-${releaseTime()}_${output.getFilter(com.android.build.OutputFile.ABI)}.apk"
-//                }
-
-        }
-}

+ 4 - 7
media/app/src/main/AndroidManifest.xml

@@ -86,14 +86,11 @@
     <uses-permission android:name="android.permission.INTERNET" />
     <application
         android:allowBackup="true"
-        android:name=".MediaApp"
-        android:icon="@mipmap/ic_launcher"
         android:label="@string/app_name"
         android:roundIcon="@mipmap/ic_launcher_round"
-        android:supportsRtl="true"
-        android:theme="@style/media_AppTheme">
+        android:supportsRtl="true">
 
-        <activity android:name=".activity.MainActivity">
+        <activity android:name="com.kfzs.cfyl.media.activity.MainActivity">
 
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
@@ -102,11 +99,11 @@
                 <action android:name="android.intent.action.VIEW" />
             </intent-filter>
         </activity>
-        <activity android:name=".activity.ActCutVideo"
+        <activity android:name="com.kfzs.cfyl.media.activity.ActCutVideo"
             android:theme="@style/media_AppTheme"
             android:configChanges="orientation|keyboardHidden|navigation|screenSize"
             android:screenOrientation="sensor"/>
-        <activity android:name=".activity.ActEditVideo"
+        <activity android:name="com.kfzs.cfyl.media.activity.ActEditVideo"
             android:theme="@style/media_AppTheme"
             android:configChanges="orientation|keyboardHidden|navigation|screenSize"
             android:screenOrientation="sensor"/>

+ 34 - 33
media/app/src/main/java/com/kfzs/cfyl/media/MediaApp.java

@@ -1,6 +1,7 @@
 package com.kfzs.cfyl.media;
 
 import android.app.Application;
+import android.content.Context;
 import android.content.res.AssetManager;
 import android.graphics.Bitmap;
 import android.graphics.BitmapFactory;
@@ -21,50 +22,50 @@ public class MediaApp extends Application {
     public void onCreate() {
         super.onCreate();
 //        initGlide(getApplicationContext());
-        initCgeLibrary();
+        initCgeLibrary(this);
     }
 
-    private void initCgeLibrary() {
+    public static void initCgeLibrary(final Context context) {
         //The second param will be passed as the second arg of the callback function.
         //第二个参数根据自身需要设置, 将作为 loadImage 第二个参数回传
-        try {
-            CGENativeLibrary.setLoadImageCallback(mLoadImageCallback, null);
-        } catch (Exception e){
-            e.printStackTrace();
-        }
-    }
+        CGENativeLibrary.LoadImageCallback mLoadImageCallback = new CGENativeLibrary.LoadImageCallback() {
 
-    public CGENativeLibrary.LoadImageCallback mLoadImageCallback = new CGENativeLibrary.LoadImageCallback() {
+            //Notice: the 'name' passed in is just what you write in the rule, e.g: 1.jpg
+            //注意, 这里回传的name不包含任何路径名, 仅为具体的图片文件名如 1.jpg
+            @Override
+            public Bitmap loadImage(String name, Object arg) {
 
-        //Notice: the 'name' passed in is just what you write in the rule, e.g: 1.jpg
-        //注意, 这里回传的name不包含任何路径名, 仅为具体的图片文件名如 1.jpg
-        @Override
-        public Bitmap loadImage(String name, Object arg) {
+                Log.i(Common.LOG_TAG, "Loading file: " + name);
+                AssetManager am = context.getAssets();
+                InputStream is;
+                try {
+                    is = am.open(name);
+                } catch (IOException e) {
+                    Log.e(Common.LOG_TAG, "Can not open file " + name);
+                    return null;
+                }
 
-            Log.i(Common.LOG_TAG, "Loading file: " + name);
-            AssetManager am = getAssets();
-            InputStream is;
-            try {
-                is = am.open(name);
-            } catch (IOException e) {
-                Log.e(Common.LOG_TAG, "Can not open file " + name);
-                return null;
+                return BitmapFactory.decodeStream(is);
             }
 
-            return BitmapFactory.decodeStream(is);
-        }
-
-        @Override
-        public void loadImageOK(Bitmap bmp, Object arg) {
-            Log.i(Common.LOG_TAG, "Loading bitmap over, you can choose to recycle or cache");
+            @Override
+            public void loadImageOK(Bitmap bmp, Object arg) {
+                Log.i(Common.LOG_TAG, "Loading bitmap over, you can choose to recycle or cache");
 
-            //The bitmap is which you returned at 'loadImage'.
-            //You can call recycle when this function is called, or just keep it for further usage.
-            //唯一不需要马上recycle的应用场景为 多个不同的滤镜都使用到相同的bitmap
-            //那么可以选择缓存起来。
-            bmp.recycle();
+                //The bitmap is which you returned at 'loadImage'.
+                //You can call recycle when this function is called, or just keep it for further usage.
+                //唯一不需要马上recycle的应用场景为 多个不同的滤镜都使用到相同的bitmap
+                //那么可以选择缓存起来。
+                bmp.recycle();
+            }
+        };
+        try {
+            CGENativeLibrary.setLoadImageCallback(mLoadImageCallback, null);
+        } catch (Exception e){
+            e.printStackTrace();
         }
-    };
+    }
+
 }
 //    public static void initGlide(Registry registry){
 //        LogUtil.println("VideoUtil", "initGlide");

+ 1 - 0
media/cge_library/.gitignore

@@ -0,0 +1 @@
+/build

+ 42 - 0
media/cge_library/build.gradle

@@ -0,0 +1,42 @@
+apply plugin: 'com.android.library'
+
+android {
+    compileSdkVersion ANDROID_COMPILE_SDK_VERSION as int
+    buildToolsVersion ANDROID_BUILD_TOOLS_VERSION
+
+    defaultConfig {
+        minSdkVersion ANDROID_MIN_SDK_VERSION as int
+        targetSdkVersion ANDORID_TARGET_SDK_VERSION as int
+        versionCode 1
+        versionName "2.4.6"
+
+        ndk {
+            moduleName "CGE"
+//            cFlags "-std=c++11 -DANDROID_NDK -DDEBUG -D_CGE_ONLY_FILTERS_ -D_CGE_STATIC_ASSERT_ -DCGE_TEXTURE_PREMULTIPLIED=1 -DCGE_LOG_TAG=\\\"libCGE\\\" -I${project.buildDir}/../src/main/jni/include -I${project.buildDir}/../src/main/jni/include/filters"
+//            stl "gnustl_shared"
+////            abiFilters "all"
+//            abiFilters "armeabi", "armeabi-v7a"
+//            ldLibs "log", "android", "EGL", "GLESv2", "jnigraphics"
+        }
+
+    }
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+        }
+    }
+
+    ////////////////////////////////////////
+
+    sourceSets.main {
+        jniLibs.srcDir 'src/main/libs' //set libs as .so's location instead of jni
+        jni.srcDirs = [] //disable automatic ndk-build call with auto-generated Android.mk file
+    }
+
+}
+
+dependencies {
+    implementation fileTree(dir: 'libs', include: ['*.jar'])
+    implementation "com.android.support:appcompat-v7:$supportLibVersion"
+}

+ 18 - 0
media/cge_library/proguard-rules.pro

@@ -0,0 +1,18 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in /Users/wysaid/android_develop/android-sdk-macosx/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
+-keep class org.wysaid.nativePort.** { *; }

+ 2 - 0
media/cge_library/project.properties

@@ -0,0 +1,2 @@
+target=android-21
+android.library=true

+ 5 - 0
media/cge_library/src/main/AndroidManifest.xml

@@ -0,0 +1,5 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="org.wysaid.library">
+    <application android:allowBackup="true" android:label="@string/app_name">
+    </application>
+
+</manifest>

+ 17 - 0
media/cge_library/src/main/java/org/wysaid/algorithm/AlgorithmUtil.java

@@ -0,0 +1,17 @@
+package org.wysaid.algorithm;
+
+/**
+ * Created by wysaid on 16/3/9.
+ * Mail: admin@wysaid.org
+ * blog: wysaid.org
+ * Description: algorithm目录的所有类都是为了OpenGL 辅助使用, 所以 float 使用较多.
+ */
+public class AlgorithmUtil {
+    public static float getNormalizeScaling(final float x, final float y, final float z) {
+        return (float)(1.0 / Math.sqrt(x*x + y*y + z*z));
+    }
+
+    public static float getNormalizeScaling(final float x, final float y, final float z, final float w) {
+        return (float)(1.0 / Math.sqrt(x*x + y*y + z*z + w*w));
+    }
+}

+ 51 - 0
media/cge_library/src/main/java/org/wysaid/algorithm/Matrix2x2.java

@@ -0,0 +1,51 @@
+package org.wysaid.algorithm;
+
+/**
+ * Created by wangyang on 15/11/27.
+ */
+public class Matrix2x2 {
+    public float[] data;
+
+    protected Matrix2x2() {
+        data = new float[4];
+    }
+
+    protected Matrix2x2(float[] _data) {
+        data = _data;
+    }
+
+    public static Matrix2x2 makeIdentity() {
+        return new Matrix2x2(new float[]{1.0f, 0.0f, 0.0f, 1.0f});
+    }
+
+    public static Matrix2x2 makeRotation(float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+        return new Matrix2x2(new float[]{cosRad, sinRad, -sinRad, cosRad});
+    }
+
+    protected static float[] _mul(float[] d1, float[] d2) {
+        return new float[] {
+                d1[0] * d2[0] + d1[2] * d2[1],
+                d1[1] * d2[0] + d1[3] * d2[1],
+                d1[0] * d2[2] + d1[2] * d2[3],
+                d1[1] * d2[2] + d1[3] * d2[3]
+        };
+    }
+
+    public Matrix2x2 multiply(Matrix2x2 mat) {
+        return new Matrix2x2(_mul(this.data, mat.data));
+    }
+
+    public Matrix2x2 multiplyBy(Matrix2x2 mat) {
+        this.data = _mul(this.data, mat.data);
+        return this;
+    }
+
+    @Override
+    public Matrix2x2 clone() {
+        return new Matrix2x2(this.data.clone());
+    }
+
+
+}

+ 109 - 0
media/cge_library/src/main/java/org/wysaid/algorithm/Matrix3x3.java

@@ -0,0 +1,109 @@
+package org.wysaid.algorithm;
+
+/**
+ * Created by wangyang on 15/11/27.
+ */
+public class Matrix3x3 {
+    public float[] data;
+
+    Matrix3x3() {
+        data = new float[9];
+    }
+
+    Matrix3x3(float[] _data) {
+        data = _data;
+    }
+
+    public static Matrix3x3 makeIdentity() {
+        return new Matrix3x3(new float[]{
+            1.0f, 0.0f, 0.0f,
+            0.0f, 1.0f, 0.0f,
+            0.0f, 0.0f, 1.0f
+        });
+    }
+
+    public static Matrix3x3 makeRotation(float rad, float x, float y, float z) {
+        final float normScaling = AlgorithmUtil.getNormalizeScaling(x, y, z);
+
+        x *= normScaling;
+        y *= normScaling;
+        z *= normScaling;
+
+        final float cosRad = (float)Math.cos(rad);
+        final float cosp = 1.0f - cosRad;
+        final float sinRad = (float)Math.sin(rad);
+
+        return new Matrix3x3(new float[]{
+                cosRad + cosp * x * x,
+                cosp * x * y + z * sinRad,
+                cosp * x * z - y * sinRad,
+                cosp * x * y - z * sinRad,
+                cosRad + cosp * y * y,
+                cosp * y * z + x * sinRad,
+                cosp * x * z + y * sinRad,
+                cosp * y * z - x * sinRad,
+                cosRad + cosp * z * z
+        });
+    }
+
+    public static Matrix3x3 makeXRotation(float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+        return new Matrix3x3(new float[] {
+                1.0f, 0.0f, 0.0f,
+                0.0f, cosRad, sinRad,
+                0.0f, -sinRad, cosRad
+        });
+    }
+
+    public static Matrix3x3 makeYRotation(float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+        return new Matrix3x3(new float[] {
+                cosRad, 0.0f, -sinRad,
+                0.0f, 1.0f, 0.0f,
+                sinRad, 0.0f, cosRad
+        });
+    }
+
+    public static Matrix3x3 makeZRotation(float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+        return new Matrix3x3(new float[] {
+                cosRad, sinRad, 0.0f,
+                -sinRad, cosRad, 0.0f,
+                0.0f, 0.0f, 1.0f
+        });
+    }
+
+    protected static float[] _mul(float[] d1, float[] d2) {
+        return new float[] {
+                d1[0] * d2[0] + d1[3] * d2[1] + d1[6] * d2[2],
+                d1[1] * d2[0] + d1[4] * d2[1] + d1[7] * d2[2],
+                d1[2] * d2[0] + d1[5] * d2[1] + d1[8] * d2[2],
+
+                d1[0] * d2[3] + d1[3] * d2[4] + d1[6] * d2[5],
+                d1[1] * d2[3] + d1[4] * d2[4] + d1[7] * d2[5],
+                d1[2] * d2[3] + d1[5] * d2[4] + d1[8] * d2[5],
+
+                d1[0] * d2[6] + d1[3] * d2[7] + d1[6] * d2[8],
+                d1[1] * d2[6] + d1[4] * d2[7] + d1[7] * d2[8],
+                d1[2] * d2[6] + d1[5] * d2[7] + d1[8] * d2[8]
+        };
+    }
+
+    public Matrix3x3 multiply(Matrix3x3 mat) {
+        return new Matrix3x3(_mul(this.data, mat.data));
+    }
+
+    public Matrix3x3 multiplyBy(Matrix3x3 mat) {
+        this.data = _mul(this.data, mat.data);
+        return this;
+    }
+
+    @Override
+    public Matrix3x3 clone() {
+        return new Matrix3x3(this.data.clone());
+    }
+
+}

+ 212 - 0
media/cge_library/src/main/java/org/wysaid/algorithm/Matrix4x4.java

@@ -0,0 +1,212 @@
+package org.wysaid.algorithm;
+
+/**
+ * Created by wangyang on 15/11/27.
+ */
+public class Matrix4x4 {
+    public float[] data;
+
+    Matrix4x4() {
+        data = new float[16];
+    }
+
+    Matrix4x4(float[] _data) {
+        data = _data;
+    }
+
+    public static Matrix4x4 makeIdentity() {
+        return new Matrix4x4(new float[]{
+                1.0f, 0.0f, 0.0f, 0.0f,
+                0.0f, 1.0f, 0.0f, 0.0f,
+                0.0f, 0.0f, 1.0f, 0.0f,
+                0.0f, 0.0f, 0.0f, 1.0f
+        });
+    }
+
+    public static Matrix4x4 makeRotation(float rad, float x, float y, float z) {
+        final float normScaling = AlgorithmUtil.getNormalizeScaling(x, y, z);
+
+        x *= normScaling;
+        y *= normScaling;
+        z *= normScaling;
+
+        final float cosRad = (float)Math.cos(rad);
+        final float cosp = 1.0f - cosRad;
+        final float sinRad = (float)Math.sin(rad);
+
+        return new Matrix4x4(new float[]{
+                cosRad + cosp * x * x,
+                cosp * x * y + z * sinRad,
+                cosp * x * z - y * sinRad,
+                0.0f,
+                cosp * x * y - z * sinRad,
+                cosRad + cosp * y * y,
+                cosp * y * z + x * sinRad,
+                0.0f,
+                cosp * x * z + y * sinRad,
+                cosp * y * z - x * sinRad,
+                cosRad + cosp * z * z,
+                0.0f,
+                0.0f,
+                0.0f,
+                0.0f,
+                1.0f
+        });
+    }
+
+    public static Matrix4x4 makeXRotation(float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+        return new Matrix4x4(new float[]{
+                1.0f, 0.0f, 0.0f, 0.0f,
+                0.0f, cosRad, sinRad, 0.0f,
+                0.0f, -sinRad, cosRad, 0.0f,
+                0.0f, 0.0f, 0.0f, 1.0f
+        });
+    }
+
+    public static Matrix4x4 makeYRotation(float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+        return new Matrix4x4(new float[]{
+                cosRad, 0.0f, -sinRad, 0.0f,
+                0.0f, 1.0f, 0.0f, 0.0f,
+                sinRad, 0.0f, cosRad, 0.0f,
+                0.0f, 0.0f, 0.0f, 1.0f
+        });
+    }
+
+    public static Matrix4x4 makeZRotation(float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+        return new Matrix4x4(new float[]{
+                cosRad, sinRad, 0.0f, 0.0f,
+                -sinRad, cosRad, 0.0f, 0.0f,
+                0.0f, 0.0f, 1.0f, 0.0f,
+                0.0f, 0.0f, 0.0f, 1.0f
+        });
+    }
+
+    public static Matrix4x4 makeTranslation(float x, float y, float z) {
+        return new Matrix4x4(new float[]{
+                1.0f, 0.0f, 0.0f, 0.0f,
+                0.0f, 1.0f, 0.0f, 0.0f,
+                0.0f, 0.0f, 1.0f, 0.0f,
+                x, y, z, 1.0f
+        });
+    }
+
+    public static Matrix4x4 makeScaling(float x, float y, float z) {
+        return new Matrix4x4(new float[]{
+                x, 0.0f, 0.0f, 0.0f,
+                0.0f, y, 0.0f, 0.0f,
+                0.0f, 0.0f, z, 0.0f,
+                0.0f, 0.0f, 0.0f, 1.0f
+        });
+    }
+
+    public static Matrix4x4 makePerspective(float fovyRad, float aspect, float nearZ, float farZ) {
+
+        final float cotan = 1.0f / (float)Math.tan(fovyRad / 2.0f);
+
+        return new Matrix4x4(new float[]{
+                cotan / aspect, 0.0f, 0.0f, 0.0f,
+                0.0f, cotan, 0.0f, 0.0f,
+                0.0f, 0.0f, (farZ + nearZ) / (nearZ - farZ), -1.0f,
+                0.0f, 0.0f, (2.0f * farZ * nearZ) / (nearZ - farZ), 0.0f
+        });
+    }
+
+    public static Matrix4x4 makeFrustum(float left, float right, float bottom, float top, float nearZ, float farZ) {
+
+        final float ral = right + left;
+        final float rsl = right - left;
+        final float tsb = top - bottom;
+        final float tab = top + bottom;
+        final float fan = farZ + nearZ;
+        final float fsn = farZ - nearZ;
+
+        return new Matrix4x4(new float[]{
+                2.0f * nearZ / rsl, 0.0f, 0.0f, 0.0f,
+                0.0f, 2.0f * nearZ / tsb, 0.0f, 0.0f,
+                ral / rsl, tab / tsb, -fan / fsn, -1.0f,
+                0.0f, 0.0f, (-2.0f * farZ * nearZ) / fsn, 0.0f
+        });
+    }
+
+    public static Matrix4x4 makeOrtho(float left, float right, float bottom, float top, float nearZ, float farZ)
+    {
+        final float ral = right + left;
+        final float rsl = right - left;
+        final float tsb = top - bottom;
+        final float tab = top + bottom;
+        final float fan = farZ + nearZ;
+        final float fsn = farZ - nearZ;
+
+        return new Matrix4x4(new float[]{
+                2.0f / rsl, 0.0f, 0.0f, 0.0f,
+                0.0f, 2.0f / tsb, 0.0f, 0.0f,
+                0.0f, 0.0f, -2.0f / fsn, 0.0f,
+                -ral / rsl, -tab / tsb, -fan / fsn, 1.0f
+        });
+    }
+
+//    public static Matrix4x4 makeLookAt(float eyeX, float eyeY, float eyeZ, float centerX, float centerY, float centerZ,	float upX, float upY, float upZ)
+//    {
+//        Vec3f ev(eyeX, eyeY, eyeZ);
+//        Vec3f cv(centerX, centerY, centerZ);
+//        Vec3f uv(upX, upY, upZ);
+//        return makeLookAt(ev, cv, uv);
+//    }
+//
+//    static inline Mat4 makeLookAt(const Vec3f& eye, const Vec3f& center, const Vec3f& up)
+//    {
+//        Vec3f forward((eye - center).normalize());
+//        Vec3f side(crossV3f(up, forward).normalize());
+//        Vec3f upVector(crossV3f(forward, side));
+//
+//        return Mat4(side[0], upVector[0], forward[0], 0.0f,
+//                side[1], upVector[1], forward[1], 0.0f,
+//                side[2], upVector[2], forward[2], 0.0f,
+//                -side.dot(eye),
+//                -upVector.dot(eye),
+//                -forward.dot(eye),
+//                1.0f);
+//    }
+
+    protected static float[] _mul(float[] d1, float[] d2) {
+        return new float[]{
+                d1[0] * d2[0] + d1[4] * d2[1] + d1[8] * d2[2] + d1[12] * d2[3],
+                d1[1] * d2[0] + d1[5] * d2[1] + d1[9] * d2[2] + d1[13] * d2[3],
+                d1[2] * d2[0] + d1[6] * d2[1] + d1[10] * d2[2] + d1[14] * d2[3],
+                d1[3] * d2[0] + d1[7] * d2[1] + d1[11] * d2[2] + d1[15] * d2[3],
+                d1[0] * d2[4] + d1[4] * d2[5] + d1[8] * d2[6] + d1[12] * d2[7],
+                d1[1] * d2[4] + d1[5] * d2[5] + d1[9] * d2[6] + d1[13] * d2[7],
+                d1[2] * d2[4] + d1[6] * d2[5] + d1[10] * d2[6] + d1[14] * d2[7],
+                d1[3] * d2[4] + d1[7] * d2[5] + d1[11] * d2[6] + d1[15] * d2[7],
+                d1[0] * d2[8] + d1[4] * d2[9] + d1[8] * d2[10] + d1[12] * d2[11],
+                d1[1] * d2[8] + d1[5] * d2[9] + d1[9] * d2[10] + d1[13] * d2[11],
+                d1[2] * d2[8] + d1[6] * d2[9] + d1[10] * d2[10] + d1[14] * d2[11],
+                d1[3] * d2[8] + d1[7] * d2[9] + d1[11] * d2[10] + d1[15] * d2[11],
+                d1[0] * d2[12] + d1[4] * d2[13] + d1[8] * d2[14] + d1[12] * d2[15],
+                d1[1] * d2[12] + d1[5] * d2[13] + d1[9] * d2[14] + d1[13] * d2[15],
+                d1[2] * d2[12] + d1[6] * d2[13] + d1[10] * d2[14] + d1[14] * d2[15],
+                d1[3] * d2[12] + d1[7] * d2[13] + d1[11] * d2[14] + d1[15] * d2[15]
+        };
+    }
+
+    public Matrix4x4 multiply(Matrix4x4 mat) {
+        return new Matrix4x4(_mul(this.data, mat.data));
+    }
+
+    public Matrix4x4 multiplyBy(Matrix4x4 mat) {
+        this.data = _mul(this.data, mat.data);
+        return this;
+    }
+
+    @Override
+    public Matrix4x4 clone() {
+        return new Matrix4x4(this.data.clone());
+    }
+
+}

+ 17 - 0
media/cge_library/src/main/java/org/wysaid/algorithm/Vector2.java

@@ -0,0 +1,17 @@
+package org.wysaid.algorithm;
+
+/**
+ * Created by wangyang on 15/11/27.
+ */
+public class Vector2 {
+    public float x, y;
+
+    public Vector2() {
+
+    }
+
+    public Vector2(float _x, float _y) {
+        x = _x;
+        y = _y;
+    }
+}

+ 18 - 0
media/cge_library/src/main/java/org/wysaid/algorithm/Vector3.java

@@ -0,0 +1,18 @@
+package org.wysaid.algorithm;
+
+/**
+ * Created by wangyang on 15/11/27.
+ */
+public class Vector3 {
+    public float x, y, z;
+
+    public Vector3() {
+
+    }
+
+    public Vector3(float _x, float _y, float _z) {
+        x = _x;
+        y = _y;
+        z = _z;
+    }
+}

+ 19 - 0
media/cge_library/src/main/java/org/wysaid/algorithm/Vector4.java

@@ -0,0 +1,19 @@
+package org.wysaid.algorithm;
+
+/**
+ * Created by wangyang on 15/11/27.
+ */
+public class Vector4 {
+    public float x, y, z, w;
+
+    public Vector4() {
+
+    }
+
+    public Vector4(float _x, float _y, float _z, float _w) {
+        x = _x;
+        y = _y;
+        z = _z;
+        w = _w;
+    }
+}

+ 418 - 0
media/cge_library/src/main/java/org/wysaid/camera/CameraInstance.java

@@ -0,0 +1,418 @@
+package org.wysaid.camera;
+
+import android.graphics.PixelFormat;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.os.Build;
+import android.util.Log;
+
+import org.wysaid.common.Common;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * Created by wangyang on 15/7/27.
+ */
+
+
+// Camera 仅适用单例
+public class CameraInstance {
+    public static final String LOG_TAG = Common.LOG_TAG;
+
+    private static final String ASSERT_MSG = "检测到CameraDevice 为 null! 请检查";
+
+    private Camera mCameraDevice;
+    private Camera.Parameters mParams;
+
+    public static final int DEFAULT_PREVIEW_RATE = 30;
+
+
+    private boolean mIsPreviewing = false;
+
+    private int mDefaultCameraID = -1;
+
+    private static CameraInstance mThisInstance;
+    private int mPreviewWidth;
+    private int mPreviewHeight;
+
+    private int mPictureWidth = 1000;
+    private int mPictureHeight = 1000;
+
+    private int mPreferPreviewWidth = 640;
+    private int mPreferPreviewHeight = 640;
+
+    private int mFacing = 0;
+
+    private CameraInstance() {}
+
+    public static synchronized CameraInstance getInstance() {
+        if(mThisInstance == null) {
+            mThisInstance = new CameraInstance();
+        }
+        return mThisInstance;
+    }
+
+    public boolean isPreviewing() { return mIsPreviewing; }
+
+    public int previewWidth() { return mPreviewWidth; }
+    public int previewHeight() { return mPreviewHeight; }
+    public int pictureWidth() { return mPictureWidth; }
+    public int pictureHeight() { return mPictureHeight; }
+
+    public void setPreferPreviewSize(int w, int h) {
+        mPreferPreviewHeight = w;
+        mPreferPreviewWidth = h;
+    }
+
+    public interface CameraOpenCallback {
+        void cameraReady();
+    }
+
+    public boolean tryOpenCamera(CameraOpenCallback callback) {
+        return tryOpenCamera(callback, Camera.CameraInfo.CAMERA_FACING_BACK);
+    }
+
+    public int getFacing() {
+        return mFacing;
+    }
+
+    public synchronized boolean tryOpenCamera(CameraOpenCallback callback, int facing) {
+        Log.i(LOG_TAG, "try open camera...");
+
+        try
+        {
+            if(Build.VERSION.SDK_INT > Build.VERSION_CODES.FROYO)
+            {
+                int numberOfCameras = Camera.getNumberOfCameras();
+
+                Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
+                for (int i = 0; i < numberOfCameras; i++) {
+                    Camera.getCameraInfo(i, cameraInfo);
+                    if (cameraInfo.facing == facing) {
+                        mDefaultCameraID = i;
+                        mFacing = facing;
+                    }
+                }
+            }
+            stopPreview();
+            if(mCameraDevice != null)
+                mCameraDevice.release();
+
+            if(mDefaultCameraID >= 0) {
+                mCameraDevice = Camera.open(mDefaultCameraID);
+            }
+            else {
+                mCameraDevice = Camera.open();
+                mFacing = Camera.CameraInfo.CAMERA_FACING_BACK; //default: back facing
+            }
+        }
+        catch(Exception e)
+        {
+            Log.e(LOG_TAG, "Open Camera Failed!");
+            e.printStackTrace();
+            mCameraDevice = null;
+            return false;
+        }
+
+        if(mCameraDevice != null) {
+            Log.i(LOG_TAG, "Camera opened!");
+
+            try {
+                initCamera(DEFAULT_PREVIEW_RATE);
+            } catch (Exception e) {
+                mCameraDevice.release();
+                mCameraDevice = null;
+                return false;
+            }
+
+            if (callback != null) {
+                callback.cameraReady();
+            }
+
+            return true;
+        }
+
+        return false;
+    }
+
+    public synchronized void stopCamera() {
+        if(mCameraDevice != null) {
+            mIsPreviewing = false;
+            mCameraDevice.stopPreview();
+            mCameraDevice.setPreviewCallback(null);
+            mCameraDevice.release();
+            mCameraDevice = null;
+        }
+    }
+
+    public boolean isCameraOpened() {
+        return mCameraDevice != null;
+    }
+
+    public synchronized void startPreview(SurfaceTexture texture, Camera.PreviewCallback callback) {
+        Log.i(LOG_TAG, "Camera startPreview...");
+        if(mIsPreviewing) {
+            Log.e(LOG_TAG, "Err: camera is previewing...");
+            return ;
+        }
+
+        if(mCameraDevice != null) {
+            try {
+                mCameraDevice.setPreviewTexture(texture);
+//                mCameraDevice.addCallbackBuffer(callbackBuffer);
+//                mCameraDevice.setPreviewCallbackWithBuffer(callback);
+                mCameraDevice.setPreviewCallbackWithBuffer(callback);
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+
+            mCameraDevice.startPreview();
+            mIsPreviewing = true;
+        }
+    }
+
+    public void startPreview(SurfaceTexture texture) {
+        startPreview(texture, null);
+    }
+
+    public void startPreview(Camera.PreviewCallback callback) {
+        startPreview(null, callback);
+    }
+
+    public synchronized void stopPreview() {
+        if(mIsPreviewing && mCameraDevice != null) {
+            Log.i(LOG_TAG, "Camera stopPreview...");
+            mIsPreviewing = false;
+            mCameraDevice.stopPreview();
+        }
+    }
+
+    public synchronized Camera.Parameters getParams() {
+        if(mCameraDevice != null)
+            return mCameraDevice.getParameters();
+        assert mCameraDevice != null : ASSERT_MSG;
+        return null;
+    }
+
+    public synchronized void setParams(Camera.Parameters param) {
+        if(mCameraDevice != null) {
+            mParams = param;
+            mCameraDevice.setParameters(mParams);
+        }
+        assert mCameraDevice != null : ASSERT_MSG;
+    }
+
+    public Camera getCameraDevice() {
+        return mCameraDevice;
+    }
+
+    //保证从大到小排列
+    private Comparator<Camera.Size> comparatorBigger = new Comparator<Camera.Size>() {
+        @Override
+        public int compare(Camera.Size lhs, Camera.Size rhs) {
+            int w = rhs.width - lhs.width;
+            if(w == 0)
+                return rhs.height - lhs.height;
+            return w;
+        }
+    };
+
+    //保证从小到大排列
+    private Comparator<Camera.Size> comparatorSmaller= new Comparator<Camera.Size>() {
+        @Override
+        public int compare(Camera.Size lhs, Camera.Size rhs) {
+            int w = lhs.width - rhs.width;
+            if(w == 0)
+                return lhs.height - rhs.height;
+            return w;
+        }
+    };
+
+    public void initCamera(int previewRate) {
+        if(mCameraDevice == null) {
+            Log.e(LOG_TAG, "initCamera: Camera is not opened!");
+            return;
+        }
+
+        mParams = mCameraDevice.getParameters();
+        List<Integer> supportedPictureFormats = mParams.getSupportedPictureFormats();
+
+        for(int fmt : supportedPictureFormats) {
+            Log.i(LOG_TAG, String.format("Picture Format: %x", fmt));
+        }
+
+        mParams.setPictureFormat(PixelFormat.JPEG);
+
+        List<Camera.Size> picSizes = mParams.getSupportedPictureSizes();
+        Camera.Size picSz = null;
+
+        Collections.sort(picSizes, comparatorBigger);
+
+        for(Camera.Size sz : picSizes) {
+            Log.i(LOG_TAG, String.format("Supported picture size: %d x %d", sz.width, sz.height));
+            if(picSz == null || (sz.width >= mPictureWidth && sz.height >= mPictureHeight)) {
+                picSz = sz;
+            }
+        }
+
+        List<Camera.Size> prevSizes = mParams.getSupportedPreviewSizes();
+        Camera.Size prevSz = null;
+
+        Collections.sort(prevSizes, comparatorBigger);
+
+        for(Camera.Size sz : prevSizes) {
+            Log.i(LOG_TAG, String.format("Supported preview size: %d x %d", sz.width, sz.height));
+            if(prevSz == null || (sz.width >= mPreferPreviewWidth && sz.height >= mPreferPreviewHeight)) {
+                prevSz = sz;
+            }
+        }
+
+        List<Integer> frameRates = mParams.getSupportedPreviewFrameRates();
+
+        int fpsMax = 0;
+
+        for(Integer n : frameRates) {
+            Log.i(LOG_TAG, "Supported frame rate: " + n);
+            if(fpsMax < n) {
+                fpsMax = n;
+            }
+        }
+
+        mParams.setPreviewSize(prevSz.width, prevSz.height);
+        mParams.setPictureSize(picSz.width, picSz.height);
+
+        List<String> focusModes = mParams.getSupportedFocusModes();
+        if(focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)){
+            mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+        }
+
+        previewRate = fpsMax;
+        mParams.setPreviewFrameRate(previewRate); //设置相机预览帧率
+//        mParams.setPreviewFpsRange(20, 60);
+
+        try {
+            mCameraDevice.setParameters(mParams);
+        }catch (Exception e) {
+            e.printStackTrace();
+        }
+
+
+        mParams = mCameraDevice.getParameters();
+
+        Camera.Size szPic = mParams.getPictureSize();
+        Camera.Size szPrev = mParams.getPreviewSize();
+
+        mPreviewWidth = szPrev.width;
+        mPreviewHeight = szPrev.height;
+
+        mPictureWidth = szPic.width;
+        mPictureHeight = szPic.height;
+
+        Log.i(LOG_TAG, String.format("Camera Picture Size: %d x %d", szPic.width, szPic.height));
+        Log.i(LOG_TAG, String.format("Camera Preview Size: %d x %d", szPrev.width, szPrev.height));
+    }
+
+    public synchronized void setFocusMode(String focusMode) {
+
+        if(mCameraDevice == null)
+            return;
+
+        mParams = mCameraDevice.getParameters();
+        List<String> focusModes = mParams.getSupportedFocusModes();
+        if(focusModes.contains(focusMode)){
+            mParams.setFocusMode(focusMode);
+        }
+    }
+
+    public synchronized void setPictureSize(int width, int height, boolean isBigger) {
+
+        if(mCameraDevice == null) {
+            mPictureWidth = width;
+            mPictureHeight = height;
+            return;
+        }
+
+        mParams = mCameraDevice.getParameters();
+
+
+        List<Camera.Size> picSizes = mParams.getSupportedPictureSizes();
+        Camera.Size picSz = null;
+
+        if(isBigger) {
+            Collections.sort(picSizes, comparatorBigger);
+            for(Camera.Size sz : picSizes) {
+                if(picSz == null || (sz.width >= width && sz.height >= height)) {
+                    picSz = sz;
+                }
+            }
+        } else {
+            Collections.sort(picSizes, comparatorSmaller);
+            for(Camera.Size sz : picSizes) {
+                if(picSz == null || (sz.width <= width && sz.height <= height)) {
+                    picSz = sz;
+                }
+            }
+        }
+
+        mPictureWidth = picSz.width;
+        mPictureHeight= picSz.height;
+
+        try {
+            mParams.setPictureSize(mPictureWidth, mPictureHeight);
+            mCameraDevice.setParameters(mParams);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+    public void focusAtPoint(float x, float y, final Camera.AutoFocusCallback callback) {
+        focusAtPoint(x, y, 0.2f, callback);
+    }
+
+    public synchronized void focusAtPoint(float x, float y, float radius, final Camera.AutoFocusCallback callback) {
+        if(mCameraDevice == null) {
+            Log.e(LOG_TAG, "Error: focus after release.");
+            return;
+        }
+
+        mParams = mCameraDevice.getParameters();
+
+        if(mParams.getMaxNumMeteringAreas() > 0) {
+
+            int focusRadius = (int) (radius * 1000.0f);
+            int left = (int) (x * 2000.0f - 1000.0f) - focusRadius;
+            int top = (int) (y * 2000.0f - 1000.0f) - focusRadius;
+
+            Rect focusArea = new Rect();
+            focusArea.left = Math.max(left, -1000);
+            focusArea.top = Math.max(top, -1000);
+            focusArea.right = Math.min(left + focusRadius, 1000);
+            focusArea.bottom = Math.min(top + focusRadius, 1000);
+            List<Camera.Area> meteringAreas = new ArrayList<Camera.Area>();
+            meteringAreas.add(new Camera.Area(focusArea, 800));
+
+            try {
+                mCameraDevice.cancelAutoFocus();
+                mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
+                mParams.setFocusAreas(meteringAreas);
+                mCameraDevice.setParameters(mParams);
+                mCameraDevice.autoFocus(callback);
+            } catch (Exception e) {
+                Log.e(LOG_TAG, "Error: focusAtPoint failed: " + e.toString());
+            }
+        } else {
+            Log.i(LOG_TAG, "The device does not support metering areas...");
+            try {
+                mCameraDevice.autoFocus(callback);
+            } catch (Exception e) {
+                Log.e(LOG_TAG, "Error: focusAtPoint failed: " + e.toString());
+            }
+        }
+
+    }
+}

+ 104 - 0
media/cge_library/src/main/java/org/wysaid/common/Common.java

@@ -0,0 +1,104 @@
+package org.wysaid.common;
+
+import android.graphics.Bitmap;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.GLUtils;
+import android.util.Log;
+
+import java.nio.FloatBuffer;
+
+/**
+ * Created by wangyang on 15/7/27.
+ */
+
+public class Common {
+
+    public static final boolean DEBUG = true;
+    public static final String LOG_TAG = "libCGE_java";
+    public static final float[] FULLSCREEN_VERTICES = {-1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f};
+
+    public static void checkGLError(final String tag) {
+        int loopCnt = 0;
+        for(int err = GLES20.glGetError(); loopCnt < 32 && err != GLES20.GL_FALSE; err = GLES20.glGetError(), ++loopCnt)
+        {
+            String msg;
+            switch (err)
+            {
+                case GLES20.GL_INVALID_ENUM:
+                    msg = "invalid enum"; break;
+                case GLES20.GL_INVALID_FRAMEBUFFER_OPERATION:
+                    msg = "invalid framebuffer operation"; break;
+                case GLES20.GL_INVALID_OPERATION:
+                    msg = "invalid operation";break;
+                case GLES20.GL_INVALID_VALUE:
+                    msg = "invalid value";break;
+                case GLES20.GL_OUT_OF_MEMORY:
+                    msg = "out of memory"; break;
+                default: msg = "unknown error";
+            }
+            Log.e(LOG_TAG, String.format("After tag \"%s\" glGetError %s(0x%x) ", tag, msg, err));
+        }
+    }
+
+    public static void texParamHelper(int type, int filter, int wrap) {
+        GLES20.glTexParameterf(type, GLES20.GL_TEXTURE_MIN_FILTER, filter);
+        GLES20.glTexParameterf(type, GLES20.GL_TEXTURE_MAG_FILTER, filter);
+        GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_WRAP_S, wrap);
+        GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_WRAP_T, wrap);
+    }
+
+    public static int genBlankTextureID(int width, int height) {
+        return genBlankTextureID(width, height, GLES20.GL_LINEAR, GLES20.GL_CLAMP_TO_EDGE);
+    }
+
+    public static int genBlankTextureID(int width, int height, int filter, int wrap) {
+        int[] texID = new int[1];
+        GLES20.glGenTextures(1, texID, 0);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texID[0]);
+        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+        texParamHelper(GLES20.GL_TEXTURE_2D, filter, wrap);
+        return texID[0];
+    }
+
+    public static int genNormalTextureID(Bitmap bmp) {
+        return genNormalTextureID(bmp, GLES20.GL_LINEAR, GLES20.GL_CLAMP_TO_EDGE);
+    }
+
+    public static int genNormalTextureID(Bitmap bmp, int filter, int wrap) {
+        int[] texID = new int[1];
+        GLES20.glGenTextures(1, texID, 0);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texID[0]);
+        GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
+        texParamHelper(GLES20.GL_TEXTURE_2D, filter, wrap);
+        return texID[0];
+    }
+
+    public static int genSurfaceTextureID() {
+        int[] texID = new int[1];
+        GLES20.glGenTextures(1, texID, 0);
+        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texID[0]);
+        texParamHelper(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_LINEAR, GLES20.GL_CLAMP_TO_EDGE);
+        return texID[0];
+    }
+
+    public static void deleteTextureID(int texID) {
+        GLES20.glDeleteTextures(1, new int[]{texID}, 0);
+    }
+
+    public static int genFullscreenVertexArrayBuffer() {
+        int[] vertexBuffer = new int[1];
+        GLES20.glGenBuffers(1, vertexBuffer, 0);
+
+        if(vertexBuffer[0] == 0) {
+            Log.e(LOG_TAG, "Invalid VertexBuffer! You must call this within an OpenGL thread!");
+            return 0;
+        }
+
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBuffer[0]);
+        FloatBuffer buffer = FloatBuffer.allocate(FULLSCREEN_VERTICES.length);
+        buffer.put(FULLSCREEN_VERTICES).position(0);
+        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, 32, buffer, GLES20.GL_STATIC_DRAW);
+        return vertexBuffer[0];
+    }
+}

+ 60 - 0
media/cge_library/src/main/java/org/wysaid/common/ConcurrentQueueHelper.java

@@ -0,0 +1,60 @@
+package org.wysaid.common;
+
+import android.util.Log;
+
+import java.util.concurrent.ConcurrentLinkedQueue;
+
+/**
+ * Author: wangyang
+ * Mail: admin@wysaid.org
+ * Date: 2017/6/16
+ * Description:
+ */
+
+public class ConcurrentQueueHelper {
+    protected ConcurrentLinkedQueue<Runnable> mQueue = new ConcurrentLinkedQueue<>();
+
+    public ConcurrentQueueHelper() {
+
+    }
+
+    public void offer(Runnable runnable) {
+        mQueue.offer(runnable);
+    }
+
+    public void consume() {
+
+        try {
+
+            Runnable runnable = null;
+
+            do {
+                runnable = mQueue.poll();
+                if(runnable != null)
+                    runnable.run();
+            } while (runnable != null);
+        } catch (Throwable throwable) {
+            Log.e(Common.LOG_TAG, "ConcurrentQueueHelper: " + throwable.getLocalizedMessage());
+        }
+    }
+
+    public boolean isEmpty() {
+        return mQueue.isEmpty();
+    }
+
+    public void consumeLast() {
+
+        try {
+            Runnable runnable = null;
+
+            do {
+                runnable = mQueue.poll();
+                if(runnable != null)
+                    runnable.run();
+            } while (runnable != null);
+        } catch (Throwable throwable) {
+            Log.e(Common.LOG_TAG, "ConcurrentQueueHelper: " + throwable.getLocalizedMessage());
+        }
+    }
+
+}

+ 37 - 0
media/cge_library/src/main/java/org/wysaid/common/FrameBufferObject.java

@@ -0,0 +1,37 @@
+package org.wysaid.common;
+
+import android.opengl.GLES20;
+import android.util.Log;
+
+/**
+ * Created by wangyang on 15/7/27.
+ */
+
+public class FrameBufferObject {
+    private int mFramebufferID;
+
+    public FrameBufferObject() {
+        int[] buf = new int[1];
+        GLES20.glGenFramebuffers(1, buf, 0);
+        mFramebufferID = buf[0];
+    }
+
+    public void release() {
+        GLES20.glDeleteFramebuffers(1, new int[]{mFramebufferID}, 0);
+    }
+
+    public void bind() {
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebufferID);
+    }
+
+    //将texture 绑定到该framebuffer的 GL_COLOR_ATTACHMENT0
+    public void bindTexture(int texID) {
+        bind();
+        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texID, 0);
+        if(GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE)
+        {
+            Log.e(Common.LOG_TAG, "CGE::FrameBuffer::bindTexture2D - Frame buffer is not valid!");
+        }
+    }
+
+}

+ 218 - 0
media/cge_library/src/main/java/org/wysaid/common/ProgramObject.java

@@ -0,0 +1,218 @@
+package org.wysaid.common;
+
+import android.opengl.GLES20;
+import android.util.Log;
+
+/**
+ * Created by wangyang on 15/7/27.
+ */
+
+
+public class ProgramObject {
+    public static final String LOG_TAG = Common.LOG_TAG;
+
+    private int mProgramID;
+    private ShaderObject mVertexShader, mFragmentShader;
+
+    //单独初始化之后可以进行一些 attribute location 的绑定操作
+    //之后再进行init
+    public ProgramObject() {
+        mProgramID = GLES20.glCreateProgram();
+    }
+
+    public ProgramObject(final String vsh, final String fsh) {
+        init(vsh, fsh);
+    }
+
+    public int programID() {
+        return mProgramID;
+    }
+
+    public final void release() {
+        if(mProgramID != 0)
+        {
+            GLES20.glDeleteProgram(mProgramID);
+            mProgramID = 0;
+        }
+    }
+
+    public boolean init(final String vsh, final String fsh) {
+        return init(vsh, fsh, mProgramID);
+    }
+
+    public boolean init(final String vsh, final String fsh, int programID) {
+        if(programID == 0) {
+            programID = GLES20.glCreateProgram();
+
+            if(programID == 0) {
+                Log.e(LOG_TAG, "Invalid Program ID! Check if the context is bound!");
+                return false;
+            }
+        }
+
+        if(mVertexShader != null)
+            mVertexShader.release();
+        if(mFragmentShader != null)
+            mFragmentShader.release();
+
+        mVertexShader = new ShaderObject(vsh, GLES20.GL_VERTEX_SHADER);
+        mFragmentShader = new ShaderObject(fsh, GLES20.GL_FRAGMENT_SHADER);
+
+        GLES20.glAttachShader(programID, mVertexShader.shaderID());
+        GLES20.glAttachShader(programID, mFragmentShader.shaderID());
+        Common.checkGLError("AttachShaders...");
+        GLES20.glLinkProgram(programID);
+
+        int[] programStatus = {0};
+        GLES20.glGetProgramiv(programID, GLES20.GL_LINK_STATUS, programStatus, 0);
+
+        //link 完毕之后即可释放 shader object
+        mVertexShader.release();
+        mFragmentShader.release();
+        mVertexShader = null;
+        mFragmentShader = null;
+
+        if(programStatus[0] != GLES20.GL_TRUE) {
+            String msg = GLES20.glGetProgramInfoLog(programID);
+            Log.e(LOG_TAG, msg);
+            return false;
+        }
+
+        if(mProgramID != programID && mProgramID != 0) {
+            GLES20.glDeleteProgram(mProgramID);
+        }
+
+        mProgramID = programID;
+        return true;
+    }
+
+    public void bind() {
+        GLES20.glUseProgram(mProgramID);
+    }
+
+    public int getUniformLoc(final String name) {
+        int uniform = GLES20.glGetUniformLocation(mProgramID, name);
+        if(Common.DEBUG) {
+            if(uniform < 0)
+                Log.e(LOG_TAG, String.format("uniform name %s does not exist", name));
+        }
+        return uniform;
+    }
+
+    public void sendUniformf(final String name, float x) {
+        GLES20.glUniform1f(getUniformLoc(name), x);
+    }
+
+    public void sendUniformf(final String name, float x, float y) {
+        GLES20.glUniform2f(getUniformLoc(name), x, y);
+    }
+
+    public void sendUniformf(final String name, float x, float y, float z) {
+        GLES20.glUniform3f(getUniformLoc(name), x, y, z);
+    }
+
+    public void sendUniformf(final String name, float x, float y, float z, float w) {
+        GLES20.glUniform4f(getUniformLoc(name), x, y, z, w);
+    }
+
+    public void sendUniformi(final String name, int x) {
+        GLES20.glUniform1i(getUniformLoc(name), x);
+    }
+
+    public void sendUniformi(final String name, int x, int y) {
+        GLES20.glUniform2i(getUniformLoc(name), x, y);
+    }
+
+    public void sendUniformi(final String name, int x, int y, int z) {
+        GLES20.glUniform3i(getUniformLoc(name), x, y, z);
+    }
+
+    public void sendUniformi(final String name, int x, int y, int z, int w) {
+        GLES20.glUniform4i(getUniformLoc(name), x, y, z, w);
+    }
+
+    public void sendUniformMat2(final String name, int count, boolean transpose, float[] matrix) {
+        GLES20.glUniformMatrix2fv(getUniformLoc(name), count, transpose, matrix, 0);
+    }
+
+    public void sendUniformMat3(final String name, int count, boolean transpose, float[] matrix) {
+        GLES20.glUniformMatrix3fv(getUniformLoc(name), count, transpose, matrix, 0);
+    }
+
+    public void sendUniformMat4(final String name, int count, boolean transpose, float[] matrix) {
+        GLES20.glUniformMatrix4fv(getUniformLoc(name), count, transpose, matrix, 0);
+    }
+
+    public int attributeLocation(final String name) {
+        return GLES20.glGetAttribLocation(mProgramID, name);
+    }
+
+    public void bindAttribLocation(final String name, int index) {
+        GLES20.glBindAttribLocation(mProgramID, index, name);
+    }
+
+    /**
+     * Created by wangyang on 15/7/18.
+     */
+    public static class ShaderObject {
+
+        private int mShaderType;
+        private int mShaderID;
+
+
+        public int shaderID() {
+            return mShaderID;
+        }
+
+        public ShaderObject() {
+            mShaderType = 0;
+            mShaderID = 0;
+        }
+
+        public ShaderObject(final String shaderCode, final int shaderType) {
+            init(shaderCode, shaderType);
+        }
+
+        public boolean init(final String shaderCode, final int shaderType) {
+            mShaderType = shaderType;
+            mShaderID = loadShader(shaderType, shaderCode);
+
+            //Debug Only
+            assert mShaderID != 0 : "Shader Create Failed!";
+
+            if(mShaderID == 0) {
+                Log.e(LOG_TAG, "glCreateShader Failed!...");
+                return false;
+            }
+
+            return true;
+        }
+
+        public final void release() {
+            if(mShaderID == 0)
+                return;
+            GLES20.glDeleteShader(mShaderID);
+            mShaderID = 0;
+        }
+
+        public static int loadShader(int type, final String code) {
+            int shaderID = GLES20.glCreateShader(type);
+
+            if(shaderID != 0) {
+                GLES20.glShaderSource(shaderID, code);
+                GLES20.glCompileShader(shaderID);
+                int[] compiled = {0};
+                GLES20.glGetShaderiv(shaderID, GLES20.GL_COMPILE_STATUS, compiled, 0);
+                if(compiled[0] != GLES20.GL_TRUE)
+                {
+                    String errMsg = GLES20.glGetShaderInfoLog(shaderID);
+                    Log.e(LOG_TAG, errMsg);
+                    GLES20.glDeleteShader(shaderID);
+                    return 0;
+                }
+            }
+            return shaderID;
+        }
+
+    }
+}

+ 199 - 0
media/cge_library/src/main/java/org/wysaid/common/SharedContext.java

@@ -0,0 +1,199 @@
+package org.wysaid.common;
+
+import android.annotation.SuppressLint;
+import android.opengl.EGL14;
+import android.util.Log;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * Created by wangyang on 15/12/13.
+ */
+
+@SuppressLint("InlinedApi")
+public class SharedContext {
+    public static final String LOG_TAG = Common.LOG_TAG;
+    public static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+    private EGLContext mContext;
+    private EGLConfig mConfig;
+    private EGLDisplay mDisplay;
+    private EGLSurface mSurface;
+    private EGL10 mEgl;
+    private GL10 mGl;
+
+    private static int mBitsR = 8, mBitsG = 8, mBitsB = 8, mBitsA = 8;
+
+    //注意, 设置之后将影响之后的所有操作
+    static public void setContextColorBits(int r, int g, int b, int a) {
+        mBitsR = r;
+        mBitsG = g;
+        mBitsB = b;
+        mBitsA = a;
+    }
+
+    public static SharedContext create() {
+        return create(EGL10.EGL_NO_CONTEXT, 64, 64, EGL10.EGL_PBUFFER_BIT, null);
+    }
+
+    public static SharedContext create(int width, int height) {
+        return create(EGL10.EGL_NO_CONTEXT, width, height, EGL10.EGL_PBUFFER_BIT, null);
+    }
+
+    public static SharedContext create(EGLContext context, int width, int height) {
+        return create(context, width, height, EGL10.EGL_PBUFFER_BIT, null);
+    }
+
+    //contextType: EGL10.EGL_PBUFFER_BIT
+    //             EGL10.EGL_WINDOW_BIT
+    //             EGL10.EGL_PIXMAP_BIT
+    //             EGL_RECORDABLE_ANDROID ( = 0x3142 )
+    //             etc.
+    public static SharedContext create(EGLContext context, int width, int height, int contextType, Object obj) {
+
+        SharedContext sharedContext = new SharedContext();
+        if(!sharedContext.initEGL(context, width, height, contextType, obj)) {
+            sharedContext.release();
+            sharedContext = null;
+        }
+        return sharedContext;
+    }
+
+    public EGLContext getContext() {
+        return mContext;
+    }
+
+    public EGLDisplay getDisplay() {
+        return mDisplay;
+    }
+
+    public EGLSurface getSurface() {
+        return mSurface;
+    }
+
+    public EGL10 getEGL() {
+        return mEgl;
+    }
+
+    public GL10 getGL() {
+        return mGl;
+    }
+
+    SharedContext() {}
+
+    public void release() {
+        Log.i(LOG_TAG, "#### CGESharedGLContext Destroying context... ####");
+        if(mDisplay != EGL10.EGL_NO_DISPLAY) {
+            mEgl.eglMakeCurrent(mDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
+            mEgl.eglDestroyContext(mDisplay, mContext);
+            mEgl.eglDestroySurface(mDisplay, mSurface);
+            mEgl.eglTerminate(mDisplay);
+        }
+
+        mDisplay = EGL10.EGL_NO_DISPLAY;
+        mSurface = EGL10.EGL_NO_SURFACE;
+        mContext = EGL10.EGL_NO_CONTEXT;
+    }
+
+    public void makeCurrent() {
+        if(!mEgl.eglMakeCurrent(mDisplay, mSurface, mSurface, mContext)) {
+            Log.e(LOG_TAG, "eglMakeCurrent failed:" + mEgl.eglGetError());
+        }
+    }
+
+    public boolean swapBuffers() {
+        return mEgl.eglSwapBuffers(mDisplay, mSurface);
+    }
+
+    private boolean initEGL(EGLContext context, int width, int height, int contextType, Object obj) {
+
+        int[] contextAttribList = {
+                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+                EGL10.EGL_NONE
+        };
+
+        int[] configSpec = {
+                EGL10.EGL_SURFACE_TYPE, contextType,
+                EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+                EGL10.EGL_RED_SIZE, 8, EGL10.EGL_GREEN_SIZE, 8,
+                EGL10.EGL_BLUE_SIZE, 8, EGL10.EGL_ALPHA_SIZE, 8,
+                EGL10.EGL_NONE
+        };
+
+        EGLConfig[] configs = new EGLConfig[1];
+        int[] numConfig = new int[1];
+        int[] version = new int[2];
+
+        int surfaceAttribList[] = {
+                EGL10.EGL_WIDTH, width,
+                EGL10.EGL_HEIGHT, height,
+                EGL10.EGL_NONE
+        };
+
+        mEgl = (EGL10) EGLContext.getEGL();
+
+        if((mDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY)) == EGL10.EGL_NO_DISPLAY) {
+            Log.e(LOG_TAG, String.format("eglGetDisplay() returned error 0x%x", mEgl.eglGetError()));
+            return false;
+        }
+
+        if(!mEgl.eglInitialize(mDisplay, version)) {
+            Log.e(LOG_TAG, String.format("eglInitialize() returned error 0x%x", mEgl.eglGetError()));
+            return false;
+        }
+
+        Log.i(LOG_TAG, String.format("eglInitialize - major: %d, minor: %d", version[0], version[1]));
+
+        if(!mEgl.eglChooseConfig(mDisplay, configSpec, configs, 1, numConfig)) {
+            Log.e(LOG_TAG, String.format("eglChooseConfig() returned error 0x%x", mEgl.eglGetError()));
+            return false;
+        }
+
+        Log.i(LOG_TAG, String.format("Config num: %d, has sharedContext: %s", numConfig[0], context == EGL10.EGL_NO_CONTEXT ? "NO" : "YES"));
+
+        mConfig = configs[0];
+
+        mContext = mEgl.eglCreateContext(mDisplay, mConfig,
+                context, contextAttribList);
+        if (mContext == EGL10.EGL_NO_CONTEXT) {
+            Log.e(LOG_TAG, "eglCreateContext Failed!");
+            return false;
+        }
+
+        switch (contextType) {
+            case EGL10.EGL_PIXMAP_BIT:
+                mSurface = mEgl.eglCreatePixmapSurface(mDisplay, mConfig, obj, surfaceAttribList);
+                break;
+            case EGL10.EGL_WINDOW_BIT:
+                mSurface = mEgl.eglCreateWindowSurface(mDisplay, mConfig, obj, surfaceAttribList);
+                break;
+            case EGL10.EGL_PBUFFER_BIT:
+            case EGL_RECORDABLE_ANDROID:
+                mSurface = mEgl.eglCreatePbufferSurface(mDisplay, mConfig,
+                        surfaceAttribList);
+        }
+
+        if (mSurface == EGL10.EGL_NO_SURFACE) {
+            Log.e(LOG_TAG, "eglCreatePbufferSurface Failed!");
+            return false;
+        }
+
+        if (!mEgl.eglMakeCurrent(mDisplay, mSurface, mSurface, mContext)) {
+            Log.e(LOG_TAG, "eglMakeCurrent failed:" + mEgl.eglGetError());
+            return false;
+        }
+
+        int[] clientVersion = new int[1];
+        mEgl.eglQueryContext(mDisplay, mContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, clientVersion);
+        Log.i(LOG_TAG, "EGLContext created, client version " + clientVersion[0]);
+
+        mGl = (GL10) mContext.getGL();
+
+        return true;
+    }
+}

+ 141 - 0
media/cge_library/src/main/java/org/wysaid/common/TextureDrawer.java

@@ -0,0 +1,141 @@
+package org.wysaid.common;
+
+import android.opengl.GLES20;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Created by wangyang on 15/8/8.
+ * A simple direct drawer with flip, scale & rotate
+ */
+public class TextureDrawer {
+
+    protected static final String vshDrawer = "" +
+            "attribute vec2 vPosition;\n"+
+            "varying vec2 texCoord;\n"+
+            "uniform mat2 rotation;\n"+
+            "uniform vec2 flipScale;\n"+
+            "void main()\n"+
+            "{\n"+
+            "   gl_Position = vec4(vPosition, 0.0, 1.0);\n"+
+            "   texCoord = flipScale * (vPosition / 2.0 * rotation) + 0.5;\n"+
+            "}";
+
+    protected static final String fshDrawer = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform sampler2D inputImageTexture;\n" +
+            "void main()\n" +
+            "{\n" +
+            "   gl_FragColor = texture2D(inputImageTexture, texCoord);\n" +
+            "}";
+
+    public static final float[] vertices = {-1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f};
+    public static final int DRAW_FUNCTION = GLES20.GL_TRIANGLE_FAN;
+
+    protected ProgramObject mProgram;
+    protected int mVertBuffer;
+    protected int mRotLoc, mFlipScaleLoc;
+
+    public ProgramObject getProgram() {
+        return mProgram;
+    }
+
+    protected TextureDrawer() {
+    }
+
+    protected boolean init(final String vsh, final String fsh) {
+        mProgram = new ProgramObject();
+        mProgram.bindAttribLocation("vPosition", 0);
+        if(!mProgram.init(vsh, fsh)) {
+            mProgram.release();
+            mProgram = null;
+            return false;
+        }
+
+        mProgram.bind();
+
+        mRotLoc = mProgram.getUniformLoc("rotation");
+        mFlipScaleLoc = mProgram.getUniformLoc("flipScale");
+
+        int[] vertBuffer = new int[1];
+        GLES20.glGenBuffers(1, vertBuffer, 0);
+        mVertBuffer = vertBuffer[0];
+
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertBuffer);
+        FloatBuffer buffer = ByteBuffer.allocateDirect(vertices.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
+        buffer.put(vertices).position(0);
+
+        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, 32, buffer, GLES20.GL_STATIC_DRAW);
+        setRotation(0.0f);
+        setFlipScale(1.0f, 1.0f);
+        return true;
+    }
+
+
+    public static TextureDrawer create() {
+        TextureDrawer drawer = new TextureDrawer();
+        if(!drawer.init(vshDrawer, fshDrawer))
+        {
+            Log.e(Common.LOG_TAG, "TextureDrawer create failed!");
+            drawer.release();
+            drawer = null;
+        }
+        return drawer;
+    }
+
+    public void release() {
+        if(mProgram != null) {
+            mProgram.release();
+            mProgram = null;
+        }
+        GLES20.glDeleteBuffers(1, new int[]{mVertBuffer}, 0);
+        mVertBuffer = 0;
+    }
+
+    public void drawTexture(int texID) {
+        drawTexture(texID, GLES20.GL_TEXTURE_2D);
+    }
+
+    public void drawTexture(int texID, int type) {
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(type, texID);
+
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertBuffer);
+        GLES20.glEnableVertexAttribArray(0);
+        GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 0, 0);
+        mProgram.bind();
+        GLES20.glDrawArrays(DRAW_FUNCTION, 0, 4);
+    }
+
+    //特殊外部辅助用法
+    public void bindVertexBuffer() {
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertBuffer);
+    }
+
+    public void setRotation(float rad) {
+        _rotate(mRotLoc, rad);
+    }
+
+    public void setFlipScale(float x, float y) {
+        mProgram.bind();
+        GLES20.glUniform2f(mFlipScaleLoc, x, y);
+    }
+
+    private void _rotate(int location, float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+
+        final float[] rotation = new float[] {
+                cosRad, sinRad,
+                -sinRad, cosRad
+        };
+
+        mProgram.bind();
+        GLES20.glUniformMatrix2fv(location, 1, false, rotation, 0);
+    }
+
+}

+ 117 - 0
media/cge_library/src/main/java/org/wysaid/geometryUtils/GeometryRenderer.java

@@ -0,0 +1,117 @@
+package org.wysaid.geometryUtils;
+
+import android.opengl.GLES20;
+
+import org.wysaid.common.ProgramObject;
+
+/**
+ * Created by wysaid on 16/3/1.
+ * Mail: admin@wysaid.org
+ * blog: wysaid.org
+ * Tips: GeometryRenderer needs no resource(such as textures)
+ */
+public class GeometryRenderer {
+
+    protected static final String vshDrawDefault = "" +
+            "attribute vec2 vPosition;\n" +
+            "uniform vec2 canvasSize;\n" +
+            "void main()\n" +
+            "{\n" +
+            "   gl_Position = vec4((vPosition / canvasSize) * 2.0 - 1.0, 0.0, 1.0);\n" +
+            "}";
+
+    private static final String fshDrawOrigin = "" +
+            "precision mediump float;\n" +
+            "uniform vec4 color;\n" +
+            "void main()\n" +
+            "{\n" +
+            "   gl_FragColor = color;\n" +
+            "}";
+
+    protected static final String POSITION_NAME = "vPosition";
+    protected static final String COLOR_NAME = "color";
+    protected static final String CANVAS_SIZE = "canvasSize";
+//    protected int mColorLoc;
+
+    protected ProgramObject mProgram;
+    protected int mVertexBuffer;
+
+    protected float mCanvasWidth;
+    protected float mCanvasHeight;
+
+    //////////////////////////////////////////////////////
+
+    GeometryRenderer() {
+
+    }
+
+    protected boolean init() {
+        mProgram = new ProgramObject();
+        mProgram.bindAttribLocation(POSITION_NAME, 0);
+        if(!mProgram.init(vshDrawDefault, fshDrawOrigin)) {
+            release();
+            return false;
+        }
+        setColor(1.0f, 1.0f, 1.0f, 1.0f);
+        setCanvasSize(1.0f, 1.0f);
+        return true;
+    }
+
+    public void release() {
+        if(mProgram != null) {
+            mProgram.release();
+            mProgram = null;
+        }
+
+        if(mVertexBuffer != 0) {
+            GLES20.glDeleteBuffers(1, new int[]{mVertexBuffer}, 0);
+            mVertexBuffer = 0;
+        }
+    }
+
+    public static GeometryRenderer create() {
+        GeometryRenderer renderer = new GeometryRenderer();
+        if(!renderer.init()) {
+            renderer.release();
+            renderer = null;
+        }
+        return renderer;
+    }
+
+    public void setColor(float r, float g, float b, float a) {
+        mProgram.bind();
+        mProgram.sendUniformf(COLOR_NAME, r, g, b, a);
+    }
+
+    public int getVertexBuffer() {
+        return mVertexBuffer;
+    }
+
+    public void setVertexBuffer(int buffer) {
+        mVertexBuffer = buffer;
+    }
+
+    public void setCanvasSize(float w, float h) {
+        mCanvasWidth = w;
+        mCanvasHeight = h;
+        mProgram.bind();
+        mProgram.sendUniformf(CANVAS_SIZE, w, h);
+    }
+
+    public ProgramObject getProgram() {
+        return mProgram;
+    }
+
+    public void bindBufferAttrib() {
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertexBuffer);
+        GLES20.glEnableVertexAttribArray(0);
+        GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 0, 0);
+    }
+
+    public void render(int mode, int first, int count) {
+
+        bindBufferAttrib();
+        mProgram.bind();
+        GLES20.glDrawArrays(mode, first, count);
+    }
+}

+ 9 - 0
media/cge_library/src/main/java/org/wysaid/geometryUtils/GeometryRendererLine.java

@@ -0,0 +1,9 @@
+package org.wysaid.geometryUtils;
+
+/**
+ * Created by wysaid on 16/3/1.
+ * Mail: admin@wysaid.org
+ * blog: wysaid.org
+ */
+public class GeometryRendererLine extends GeometryRenderer {
+}

+ 29 - 0
media/cge_library/src/main/java/org/wysaid/gpuCodec/TextureDrawerCodec.java

@@ -0,0 +1,29 @@
+package org.wysaid.gpuCodec;
+
+import org.wysaid.common.TextureDrawer;
+
+/**
+ * @Author: wangyang
+ * @Mail: admin@wysaid.org
+ * @Date: 2017/5/3
+ * @Description:
+ */
+
+public class TextureDrawerCodec extends TextureDrawer {
+
+    static final float[] MATRIX_YUV2RGB = new float[] {
+            1.0f, 1.0f, 1.0f,
+            0.0f, -0.18732f, 1.8556f,
+            1.57481f, -0.46813f, 0.0f
+    };
+
+    //Invert matrix for 'MATRIX_YUV2RGB'
+    static final float[] MATRIX_RGB2YUV = new float[] {
+            0.21260134f, -0.11457283f,  0.49999598f,
+            0.71520028f, -0.38542805f, -0.4541502f,
+            0.07219838f,  0.50000087f, -0.04584577f,
+    };
+
+    public static final String COLOR_CONVERSION_NAME = "colorConversion";
+
+}

+ 83 - 0
media/cge_library/src/main/java/org/wysaid/gpuCodec/TextureDrawerI420ToRGB.java

@@ -0,0 +1,83 @@
+package org.wysaid.gpuCodec;
+
+import android.opengl.GLES20;
+import android.util.Log;
+
+import org.wysaid.common.Common;
+
+/**
+ * @Author: wangyang
+ * @Mail: admin@wysaid.org
+ * @Date: 2017/5/3
+ * @Description:
+ */
+
+public class TextureDrawerI420ToRGB extends TextureDrawerCodec {
+
+    protected static final String fshI420ToRGB = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform sampler2D textureY;\n" +
+            "uniform sampler2D textureU;\n" +
+            "uniform sampler2D textureV;\n" +
+            "uniform mat3 colorConversion;\n" +
+            "void main()\n" +
+            "{\n" +
+            "    vec3 yuv;\n" +
+            "    yuv.x = texture2D(textureY, texCoord).r;\n" +
+            "    yuv.y = texture2D(textureU, texCoord).r - 0.5;\n" +
+            "    yuv.z = texture2D(textureV, texCoord).r - 0.5;\n" +
+            "    vec3 rgb = colorConversion * yuv;\n" +
+            "    gl_FragColor = vec4(rgb, 1.0);\n" +
+            "}";
+
+    public static TextureDrawerI420ToRGB create() {
+        TextureDrawerI420ToRGB drawer = new TextureDrawerI420ToRGB();
+        if(!drawer.init(vshDrawer, fshI420ToRGB))
+        {
+            Log.e(Common.LOG_TAG, "TextureDrawerI420ToRGB create failed!");
+            drawer.release();
+            drawer = null;
+        }
+        return drawer;
+    }
+
+    @Override
+    protected boolean init(String vsh, String fsh) {
+
+        if(super.init(vsh, fsh)) {
+            mProgram.bind();
+            mProgram.sendUniformi("textureU", 1);
+            mProgram.sendUniformi("textureV", 2);
+            mProgram.sendUniformMat3(COLOR_CONVERSION_NAME, 1, false, MATRIX_YUV2RGB);
+            return true;
+        }
+
+        return false;
+    }
+
+    //Just draw. Please ensure the textures are bound to the right target.
+    public void drawTextures() {
+        mProgram.bind();
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertBuffer);
+        GLES20.glEnableVertexAttribArray(0);
+        GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 0, 0);
+        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, 0, 4);
+    }
+
+    public void drawTextures(int texY, int texU, int texV) {
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texY);
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texU);
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texV);
+
+        drawTextures();
+    }
+
+
+}

+ 42 - 0
media/cge_library/src/main/java/org/wysaid/gpuCodec/TextureDrawerNV12ToRGB.java

@@ -0,0 +1,42 @@
+package org.wysaid.gpuCodec;
+
+import android.util.Log;
+
+import org.wysaid.common.Common;
+
+/**
+ * @Author: wangyang
+ * @Mail: admin@wysaid.org
+ * @Date: 2017/5/3
+ * @Description:
+ */
+
+public class TextureDrawerNV12ToRGB extends TextureDrawerNV21ToRGB {
+
+    private static final String fshNV12ToRGB = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform sampler2D textureY;\n" +
+            "uniform sampler2D textureUV;\n" +
+            "uniform mat3 colorConversion;\n" +
+            "void main()\n" +
+            "{\n" +
+            "    vec3 yuv;\n" +
+            "    yuv.x = texture2D(textureY, texCoord).r;\n" +
+            "    yuv.yz = texture2D(textureUV, texCoord).ar - vec2(0.5, 0.5);\n" +
+            "    vec3 rgb = colorConversion * yuv;\n" +
+            "    gl_FragColor = vec4(rgb, 1.0);\n" +
+            "}";
+
+    public static TextureDrawerNV12ToRGB create() {
+        TextureDrawerNV12ToRGB drawer = new TextureDrawerNV12ToRGB();
+        if(!drawer.init(vshDrawer, fshNV12ToRGB))
+        {
+            Log.e(Common.LOG_TAG, "TextureDrawerNV12ToRGB create failed!");
+            drawer.release();
+            drawer = null;
+        }
+        return drawer;
+    }
+
+}

+ 76 - 0
media/cge_library/src/main/java/org/wysaid/gpuCodec/TextureDrawerNV21ToRGB.java

@@ -0,0 +1,76 @@
+package org.wysaid.gpuCodec;
+
+import android.opengl.GLES20;
+import android.util.Log;
+
+import org.wysaid.common.Common;
+
+/**
+ * @Author: wangyang
+ * @Mail: admin@wysaid.org
+ * @Date: 2017/5/3
+ * @Description:
+ */
+
+public class TextureDrawerNV21ToRGB extends TextureDrawerCodec {
+
+    private static final String fshNV21ToRGB = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform sampler2D textureY;\n" +
+            "uniform sampler2D textureUV;\n" +
+            "uniform mat3 colorConversion;\n" +
+            "void main()\n" +
+            "{\n" +
+            "    vec3 yuv;\n" +
+            "    yuv.x = texture2D(textureY, texCoord).r;\n" +
+            "    yuv.yz = texture2D(textureUV, texCoord).ra - vec2(0.5, 0.5);\n" +
+            "    vec3 rgb = colorConversion * yuv;\n" +
+            "    gl_FragColor = vec4(rgb, 1.0);\n" +
+            "}";
+
+    public static TextureDrawerNV21ToRGB create() {
+        TextureDrawerNV21ToRGB drawer = new TextureDrawerNV21ToRGB();
+        if(!drawer.init(vshDrawer, fshNV21ToRGB))
+        {
+            Log.e(Common.LOG_TAG, "TextureDrawerNV21ToRGB create failed!");
+            drawer.release();
+            drawer = null;
+        }
+        return drawer;
+    }
+
+    @Override
+    protected boolean init(String vsh, String fsh) {
+
+        if(super.init(vsh, fsh)) {
+            mProgram.bind();
+            mProgram.sendUniformi("textureUV", 1);
+            mProgram.sendUniformMat3(COLOR_CONVERSION_NAME, 1, false, MATRIX_YUV2RGB);
+            return true;
+        }
+
+        return false;
+    }
+
+    //Just draw. Please ensure the textures are bound to the right target.
+    public void drawTextures() {
+        mProgram.bind();
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertBuffer);
+        GLES20.glEnableVertexAttribArray(0);
+        GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 0, 0);
+        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_FAN, 0, 4);
+    }
+
+    public void drawTextures(int texY, int texUV) {
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texY);
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texUV);
+
+        drawTextures();
+    }
+
+}

+ 128 - 0
media/cge_library/src/main/java/org/wysaid/myUtils/FileUtil.java

@@ -0,0 +1,128 @@
+package org.wysaid.myUtils;
+
+import android.content.Context;
+import android.os.Environment;
+import android.util.Log;
+
+import org.wysaid.common.Common;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+
+/**
+ * Created by wangyang on 15/11/27.
+ */
+public class FileUtil {
+
+    public static final String LOG_TAG = Common.LOG_TAG;
+    public static final File externalStorageDirectory = Environment.getExternalStorageDirectory();
+    public static String packageFilesDirectory = null;
+    public static String storagePath = null;
+    private static String mDefaultFolder = "libCGE";
+
+    public static void setDefaultFolder(String defaultFolder) {
+        mDefaultFolder = defaultFolder;
+    }
+
+    public static String getPath() {
+        return getPath(null);
+    }
+
+    public static String getPath(Context context) {
+
+        if(storagePath == null) {
+            storagePath = externalStorageDirectory.getAbsolutePath() + "/" + mDefaultFolder;
+            File file = new File(storagePath);
+            if(!file.exists()) {
+                if(!file.mkdirs()) {
+                    storagePath = getPathInPackage(context, true);
+                }
+            }
+        }
+
+        return storagePath;
+    }
+
+    public static String getPathInPackage(Context context, boolean grantPermissions) {
+
+        if(context == null || packageFilesDirectory != null)
+            return packageFilesDirectory;
+
+        //手机不存在sdcard, 需要使用 data/data/name.of.package/files 目录
+        String path = context.getFilesDir() + "/" + mDefaultFolder;
+        File file = new File(path);
+
+        if(!file.exists()) {
+            if(!file.mkdirs()) {
+                Log.e(LOG_TAG, "Create package dir of CGE failed!");
+                return null;
+            }
+
+            if(grantPermissions) {
+
+                //设置隐藏目录权限.
+                if (file.setExecutable(true, false)) {
+                    Log.i(LOG_TAG, "Package folder is executable");
+                }
+
+                if (file.setReadable(true, false)) {
+                    Log.i(LOG_TAG, "Package folder is readable");
+                }
+
+                if (file.setWritable(true, false)) {
+                    Log.i(LOG_TAG, "Package folder is writable");
+                }
+            }
+        }
+
+        packageFilesDirectory = path;
+        return packageFilesDirectory;
+    }
+
+    public static void saveTextContent(String text, String filename) {
+        Log.i(LOG_TAG, "Saving text : " + filename);
+
+        try {
+            FileOutputStream fileout = new FileOutputStream(filename);
+            fileout.write(text.getBytes());
+            fileout.flush();
+            fileout.close();
+        } catch (Exception e) {
+            Log.e(LOG_TAG, "Error: " + e.getMessage());
+        }
+    }
+
+    public static String getTextContent(String filename) {
+        Log.i(LOG_TAG, "Reading text : " + filename);
+
+        if(filename == null) {
+            return null;
+        }
+
+        String content = "";
+        byte[] buffer = new byte[256]; //Create cache for reading.
+
+        try {
+
+            FileInputStream filein = new FileInputStream(filename);
+            int len;
+
+            while(true) {
+                len = filein.read(buffer);
+
+                if(len <= 0)
+                    break;
+
+                content += new String(buffer, 0, len);
+            }
+
+        } catch (Exception e) {
+            Log.e(LOG_TAG, "Error: " + e.getMessage());
+            return null;
+        }
+
+        return content;
+    }
+
+}

+ 89 - 0
media/cge_library/src/main/java/org/wysaid/myUtils/ImageUtil.java

@@ -0,0 +1,89 @@
+package org.wysaid.myUtils;
+
+import android.graphics.Bitmap;
+import android.media.FaceDetector;
+import android.util.Log;
+
+import java.io.BufferedOutputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+/**
+ * Created by wangyang on 15/7/27.
+ */
+
+public class ImageUtil extends FileUtil {
+
+
+    public static String saveBitmap(Bitmap bmp) {
+        String path = getPath();
+        long currentTime = System.currentTimeMillis();
+        String filename = path + "/" + currentTime + ".jpg";
+        return saveBitmap(bmp, filename);
+    }
+
+    public static String saveBitmap(Bitmap bmp, String filename) {
+
+        Log.i(LOG_TAG, "saving Bitmap : " + filename);
+
+        try {
+            FileOutputStream fileout = new FileOutputStream(filename);
+            BufferedOutputStream bufferOutStream = new BufferedOutputStream(fileout);
+            bmp.compress(Bitmap.CompressFormat.JPEG, 100, bufferOutStream);
+            bufferOutStream.flush();
+            bufferOutStream.close();
+        } catch (IOException e) {
+            Log.e(LOG_TAG, "Err when saving bitmap...");
+            e.printStackTrace();
+            return null;
+        }
+
+        Log.i(LOG_TAG, "Bitmap " + filename + " saved!");
+        return filename;
+    }
+
+    public static class FaceRects {
+        public int numOfFaces; // 实际检测出的人脸数
+        public FaceDetector.Face[] faces; // faces.length >= numOfFaces
+    }
+
+    public static FaceRects findFaceByBitmap(Bitmap bmp) {
+        return findFaceByBitmap(bmp, 1);
+    }
+
+    public static FaceRects findFaceByBitmap(Bitmap bmp, int maxFaces) {
+
+        if(bmp == null) {
+            Log.e(LOG_TAG, "Invalid Bitmap for Face Detection!");
+            return null;
+        }
+
+        Bitmap newBitmap = bmp;
+
+        //人脸检测API 仅支持 RGB_565 格式当图像. (for now)
+        if(newBitmap.getConfig() != Bitmap.Config.RGB_565) {
+            newBitmap = newBitmap.copy(Bitmap.Config.RGB_565, false);
+        }
+
+        FaceRects rects = new FaceRects();
+        rects.faces = new FaceDetector.Face[maxFaces];
+
+        try {
+            FaceDetector detector = new FaceDetector(newBitmap.getWidth(), newBitmap.getHeight(), maxFaces);
+            rects.numOfFaces = detector.findFaces(newBitmap, rects.faces);
+        } catch (Exception e) {
+            Log.e(LOG_TAG, "findFaceByBitmap error: " + e.getMessage());
+            return null;
+        }
+
+
+        if(newBitmap != bmp) {
+            newBitmap.recycle();
+        }
+        return rects;
+    }
+
+
+
+}
+

+ 59 - 0
media/cge_library/src/main/java/org/wysaid/myUtils/MsgUtil.java

@@ -0,0 +1,59 @@
+package org.wysaid.myUtils;
+
+import android.content.Context;
+import android.view.View;
+import android.widget.Toast;
+
+import java.lang.ref.WeakReference;
+
+/**
+ * Created by wysaid on 16/2/26.
+ * Mail: admin@wysaid.org
+ * blog: wysaid.org
+ */
+public class MsgUtil {
+
+    static WeakReference<Context> mContext;
+    static Toast mToast;
+
+    public static Toast getCurrentToast() {
+        return mToast;
+    }
+
+    public static void setCurrentToast(Context context, Toast toast) {
+        mContext = new WeakReference<Context>(context);
+        mToast = toast;
+    }
+
+    public static void clear() {
+        mContext = null;
+        mToast = null;
+    }
+
+    public static void toastMsg(Context context, String msg) {
+        toastMsg(context, msg, Toast.LENGTH_LONG);
+    }
+
+    public static void toastMsg(Context context, String msg, int duration) {
+        if(mContext == null || mContext.get() != context) {
+            if(context == null) {
+                mContext = null;
+                return;
+            }
+
+            mContext = new WeakReference<Context>(context);
+            mToast = Toast.makeText(mContext.get(), "", duration);
+            mToast.setDuration(duration);
+        }
+
+        if(mContext.get() != null && mToast != null) {
+//            mToast.cancel();
+            mToast.setText(msg);
+            mToast.show();
+        }
+    }
+
+    public static boolean isDisplaying() {
+        return mToast != null && (mToast.getView() != null && mToast.getView().getWindowVisibility() == View.VISIBLE);
+    }
+}

+ 136 - 0
media/cge_library/src/main/java/org/wysaid/nativePort/CGEDeformFilterWrapper.java

@@ -0,0 +1,136 @@
+package org.wysaid.nativePort;
+
+import android.util.Log;
+
+import org.wysaid.common.Common;
+
+/**
+ * @Author: wangyang
+ * @Mail: admin@wysaid.org
+ * @Date: 2018/12/09
+ * @Description:
+ */
+
+public class CGEDeformFilterWrapper {
+
+    static {
+        NativeLibraryLoader.load();
+    }
+
+    private CGEDeformFilterWrapper(int w, int h, float stride) {
+        mNativeAddress = nativeCreate(w, h, stride);
+    }
+
+    //'width', 'height': The real size of the picture in pixels.
+    //'stride': Set the mesh in real pixels, the mesh size would be (width / stride, height / stride)
+    public static CGEDeformFilterWrapper create(int w, int h, float stride) {
+        CGEDeformFilterWrapper wrapper = new CGEDeformFilterWrapper(w, h, stride);
+        if(wrapper.mNativeAddress == 0) {
+            wrapper.release(true);
+            wrapper = null;
+            Log.e(Common.LOG_TAG, "CGEDeformFilterWrapper.create failed!");
+        }
+        return wrapper;
+    }
+
+    // You should not delete the native filter if the wrapper is binding to the image handler.
+    // The handler will release the native filter when necessary.
+    public void release(boolean shouldDeleteNativeFilter) {
+        if(mNativeAddress != 0) {
+            if(shouldDeleteNativeFilter) {
+                nativeRelease(mNativeAddress);
+            }
+            mNativeAddress = 0;
+        }
+    }
+
+    public void restore() {
+        nativeRestore(mNativeAddress);
+    }
+
+    public void restoreWithIntensity(float intensity) {
+        nativeRestoreWithIntensity(mNativeAddress, intensity);
+    }
+
+    //'start', 'end': the real position of the cursor between two operations.
+    //'x/y': the real position of the cursor.
+    //'w', 'h': the canvas size. (the max 'x' and 'y' of the cursor)
+    //radius: the deform radius in real pixels.
+    //intensity: range (0, 1], 0 for origin. Better not more than 0.5
+    public void forwardDeform(float startX, float startY, float endX, float endY, float w, float h, float radius, float intensity) {
+        nativeForwardDeform(mNativeAddress, startX, startY, endX, endY, w, h, radius, intensity);
+    }
+
+    public void restoreWithPoint(float x, float y, float w, float h, float radius, float intensity) {
+        nativeRestoreWithPoint(mNativeAddress, x, y, w, h, radius, intensity);
+    }
+
+    public void bloatDeform(float x, float y, float w, float h, float radius, float intensity) {
+        nativeBloatDeform(mNativeAddress, x, y, w, h, radius, intensity);
+    }
+
+    public void wrinkleDeform(float x, float y, float w, float h, float radius, float intensity) {
+        nativeWrinkleDeform(mNativeAddress, x, y, w, h, radius, intensity);
+    }
+
+    public void setUndoSteps(int undoSteps) {
+        nativeSetUndoSteps(mNativeAddress, undoSteps);
+    }
+
+    // check if `undo` is available.
+    public boolean canUndo() {
+        return nativeCanUndo(mNativeAddress);
+    }
+
+    // check if `redo` is available.
+    public boolean canRedo() {
+        return nativeCanRedo(mNativeAddress);
+    }
+
+    // The return value marks if `undo` is done correctly.
+    public boolean undo() {
+        return nativeUndo(mNativeAddress);
+    }
+
+    // The return value marks if `redo` is done correctly.
+    public boolean redo() {
+        return nativeRedo(mNativeAddress);
+    }
+
+    // The return value marks if `pushDeformStep` is done correctly.
+    // `false` will be returned when the steps count meet the "max step" set by "setUndoSteps"
+    public boolean pushDeformStep() {
+        return nativePushDeformStep(mNativeAddress);
+    }
+
+    public void showMesh(boolean show) {
+        nativeShowMesh(mNativeAddress, show);
+    }
+
+    protected long mNativeAddress;
+
+    // You should not release the native filter if you pass the native address to the handler.
+    public long getNativeAddress() {
+        return mNativeAddress;
+    }
+
+    protected native long nativeCreate(int width, int height, float stride);
+    protected native void nativeRelease(long holder);
+
+    // reset to default.
+    protected native void nativeRestore(long holder);
+    protected native void nativeRestoreWithIntensity(long holder, float intensity);
+    protected native void nativeForwardDeform(long holder, float startX, float startY, float endX, float endY, float w, float h, float radius, float intensity);
+    protected native void nativeRestoreWithPoint(long holder, float x, float y, float w, float h, float radius, float intensity);
+    protected native void nativeBloatDeform(long holder, float x, float y, float w, float h, float radius, float intensity);
+    protected native void nativeWrinkleDeform(long holder, float x, float y, float w, float h, float radius, float intensity);
+
+    protected native void nativeSetUndoSteps(long holder, int undoSteps);
+    protected native boolean nativeCanUndo(long holder);
+    protected native boolean nativeCanRedo(long holder);
+    protected native boolean nativeUndo(long holder);
+    protected native boolean nativeRedo(long holder);
+    protected native boolean nativePushDeformStep(long holder);
+
+    protected native void nativeShowMesh(long holder, boolean show);
+}

+ 27 - 0
media/cge_library/src/main/java/org/wysaid/nativePort/CGEFFmpegNativeLibrary.java

@@ -0,0 +1,27 @@
+package org.wysaid.nativePort;
+
+import android.graphics.Bitmap;
+
+/**
+ * Created by wangyang on 15/7/30.
+ */
+public class CGEFFmpegNativeLibrary {
+    static {
+        NativeLibraryLoader.load();
+    }
+
+    //CN: 视频转换+特效可能执行较长的时间, 请置于后台线程运行.
+    //EN: Convert video + Filter Effects may take some time, so you'd better put it on another thread.
+    public static boolean generateVideoWithFilter(String outputFilename, String inputFilename, String filterConfig, float filterIntensity, Bitmap blendImage, CGENativeLibrary.TextureBlendMode blendMode, float blendIntensity, boolean mute) {
+
+        return nativeGenerateVideoWithFilter(outputFilename, inputFilename, filterConfig, filterIntensity, blendImage, blendMode == null ? 0 : blendMode.ordinal(), blendIntensity, mute);
+
+    }
+
+    //////////////////////////////////////////
+
+    private static native boolean nativeGenerateVideoWithFilter(String outputFilename, String inputFilename, String filterConfig, float filterIntensity, Bitmap blendImage, int blendMode, float blendIntensity, boolean mute);
+
+    public static native void avRegisterAll();
+
+}

+ 169 - 0
media/cge_library/src/main/java/org/wysaid/nativePort/CGEFaceTracker.java

@@ -0,0 +1,169 @@
+package org.wysaid.nativePort;
+
+import android.graphics.Bitmap;
+import android.graphics.PointF;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Created by wysaid on 16/2/23.
+ * Mail: admin@wysaid.org
+ * blog: wysaid.org
+ */
+public class CGEFaceTracker {
+
+    static {
+        //You can ignore the tracker libraries below(for decreasing your package size, just delete the .so file below), if you don't want any face features.
+//        System.loadLibrary("opencv_java3");  // OpenCV is statically built into FaceTracker now.
+        System.loadLibrary("FaceTracker");
+    }
+
+    //临时处理, 后续将扩展更复杂的操作
+    public static class FaceResultSimple {
+
+        public PointF leftEyePos, rightEyepos; //左眼&右眼位置
+        public PointF nosePos; //鼻子正中间
+        public PointF mouthPos; //嘴巴正中间
+        public PointF jawPos; //下巴最下面那个点
+    }
+
+    protected static boolean sIsTrackerSetup = false;
+
+    public static boolean isTrackerSetup() {
+        return sIsTrackerSetup;
+    }
+
+    protected long mNativeAddress;
+
+    private CGEFaceTracker() {
+        mNativeAddress = nativeCreateFaceTracker();
+    }
+
+    public static CGEFaceTracker createFaceTracker() {
+
+        if(!sIsTrackerSetup) {
+            nativeSetupTracker(null, null, null);
+            sIsTrackerSetup = true;
+        }
+
+        return new CGEFaceTracker();
+    }
+
+    public void release() {
+        if(mNativeAddress != 0) {
+            nativeRelease(mNativeAddress);
+            mNativeAddress = 0;
+        }
+    }
+
+    @Override
+    protected void finalize() throws Throwable {
+        release();
+        super.finalize();
+    }
+
+    //Send your own tracking file, face tracking ref: https://github.com/kylemcdonald/FaceTracker
+//    public static void setupTracker(Context context) {
+//        setupTracker(context, "face2.tracker", "face.tri", "face.con");
+//    }
+//
+//    public static void setupTracker(Context context, String modelFile, String triFile, String conFile) {
+//
+//        if(sIsTrackerSetup) {
+//            Log.e(Common.LOG_TAG, "The tracker is already setup!");
+//            return;
+//        }
+//
+//        AssetManager am = context.getAssets();
+//        String pathInPackage = FileUtil.getPathInPackage(context, false);
+//        String model = pathInPackage + "/" + modelFile;
+//        String tri = pathInPackage + "/" + triFile;
+//        String con = pathInPackage + "/" + conFile;
+//
+//        try {
+//
+//            InputStream modelStream = am.open(modelFile);
+//            FileUtil.saveStreamContent(modelStream, model);
+//            modelStream.close();
+//
+//            InputStream triStream = am.open(triFile);
+//            FileUtil.saveStreamContent(triStream, tri);
+//            triStream.close();
+//
+//            InputStream conStream = am.open(conFile);
+//            FileUtil.saveStreamContent(conStream, con);
+//            conStream.close();
+//
+//        } catch (Exception e) {
+//            Log.e(Common.LOG_TAG, "Can not setup face tracker!");
+//            return;
+//        }
+//
+//        nativeSetupTracker(model, tri, con);
+//
+//        nativeSetupTracker(null, null, null);
+//        Log.i(Common.LOG_TAG, "Face Tracker is set!");
+//        sIsTrackerSetup = true;
+//    }
+
+    //命名Simple 是因为后续将接入更加完整的结果数据(包含66个点的网格等等)
+    public FaceResultSimple detectFaceWithSimpleResult(Bitmap bmp, boolean drawFeature) {
+        float[] result = nativeDetectFaceWithSimpleResult(mNativeAddress, bmp, drawFeature);
+
+        if(result == null) {
+            return null;
+        }
+
+        FaceResultSimple faceResultSimple = new FaceResultSimple();
+
+        faceResultSimple.leftEyePos = new PointF(result[0], result[1]);
+        faceResultSimple.rightEyepos = new PointF(result[2], result[3]);
+        faceResultSimple.nosePos = new PointF(result[4], result[5]);
+        faceResultSimple.mouthPos = new PointF(result[6], result[7]);
+        faceResultSimple.jawPos = new PointF(result[8], result[9]);
+
+        return faceResultSimple;
+    }
+
+    /////////// for video frames
+
+    public static class FaceResult {
+        // 66 key points.
+        public FloatBuffer faceKeyPoints = ByteBuffer.allocateDirect(66 * 8).order(ByteOrder.nativeOrder()).asFloatBuffer();
+    }
+
+    protected FaceResult mFaceResult = new FaceResult();
+
+    public FaceResult getFaceResult() {
+        return mFaceResult;
+    }
+
+    //recommended
+    public boolean detectFaceWithGrayBuffer(ByteBuffer buffer, int width, int height, int bytesPerRow) {
+        return nativeDetectFaceWithBuffer(mNativeAddress, buffer, width, height, 1, bytesPerRow, mFaceResult.faceKeyPoints);
+    }
+
+    public boolean detectFaceWithBGRABuffer(ByteBuffer buffer, int width, int height, int bytesPerRow) {
+        return nativeDetectFaceWithBuffer(mNativeAddress, buffer, width, height, 4, bytesPerRow, mFaceResult.faceKeyPoints);
+    }
+
+    public boolean detectFaceWithBGRBuffer(ByteBuffer buffer, int width, int height, int bytesPerRow) {
+        return nativeDetectFaceWithBuffer(mNativeAddress, buffer, width, height, 3, bytesPerRow, mFaceResult.faceKeyPoints);
+    }
+
+    ////////////////////////////////////////
+
+    //static
+    private static native void nativeSetupTracker(String modelFile, String triFile, String conFile);
+
+    //non-static
+    protected native long nativeCreateFaceTracker();
+    protected native void nativeRelease(long addr);
+    protected native float[] nativeDetectFaceWithSimpleResult(long addr, Bitmap bmp, boolean drawFeature);
+
+    protected native boolean nativeDetectFaceWithBuffer(long addr, ByteBuffer buffer, int w, int h, int channel, int bytesPerRow, FloatBuffer outputBuffer);
+
+
+}

+ 143 - 0
media/cge_library/src/main/java/org/wysaid/nativePort/CGEFrameRecorder.java

@@ -0,0 +1,143 @@
+package org.wysaid.nativePort;
+
+import java.nio.ShortBuffer;
+
+/**
+ * Created by wangyang on 15/7/29.
+ */
+
+//A wrapper for native class FrameRecorder
+public class CGEFrameRecorder extends CGEFrameRenderer {
+
+    static {
+        NativeLibraryLoader.load();
+    }
+
+    public CGEFrameRecorder() {
+        super(0); //avoid multiple creation.
+        mNativeAddress = nativeCreateRecorder();
+    }
+
+    /////////////////视频录制相关////////////////////
+
+    public boolean startRecording(int fps, String filename) {
+        return startRecording(fps, 1650000, filename);
+    }
+
+    public boolean startRecording(int fps, int bitRate, String filename) {
+        if(mNativeAddress != 0)
+            return nativeStartRecording(mNativeAddress, fps, filename, bitRate);
+        return false;
+    }
+
+    public boolean isRecordingStarted() {
+        if(mNativeAddress != 0)
+            return nativeIsRecordingStarted(mNativeAddress);
+        return false;
+    }
+
+    public boolean endRecording(boolean shouldSave) {
+        if(mNativeAddress != 0)
+            return nativeEndRecording(mNativeAddress, shouldSave);
+        return false;
+    }
+
+    public void pauseRecording() {
+        if(mNativeAddress != 0)
+            nativePauseRecording(mNativeAddress);
+    }
+
+    //Not completed by now
+//    public boolean isRecordingPaused() {
+//        if(mNativeAddress != 0)
+//            return nativeIsRecordingPaused(mNativeAddress);
+//        return false;
+//    }
+//
+//    public boolean resumeRecording() {
+//        if(mNativeAddress != 0)
+//            return nativeResumeRecording(mNativeAddress);
+//        return false;
+//    }
+
+    public double getTimestamp() {
+        if(mNativeAddress != 0)
+            return nativeGetTimestamp(mNativeAddress);
+        return 0.0;
+    }
+
+    public double getVideoStreamtime() {
+        if(mNativeAddress != 0)
+            return nativeGetVideoStreamtime(mNativeAddress);
+        return 0.0;
+    }
+
+    public double getAudioStreamtime() {
+        if(mNativeAddress != 0)
+            return nativeGetAudioStreamtime(mNativeAddress);
+        return 0.0;
+    }
+
+    public void setTempDir(String dir) {
+        if(mNativeAddress != 0)
+            nativeSetTempDir(mNativeAddress, dir);
+    }
+
+    //需要置于GPU绘制线程
+    public void recordImageFrame() {
+        if(mNativeAddress != 0)
+            nativeRecordImageFrame(mNativeAddress);
+    }
+
+    //需要自行loop
+    public void recordAudioFrame(ShortBuffer audioBuffer, int bufferLen) {
+        if(mNativeAddress != 0)
+            nativeRecordAudioFrame(mNativeAddress, audioBuffer, bufferLen);
+    }
+
+    public void setGlobalFilter(String config) {
+        if(mNativeAddress != 0)
+            nativeSetGlobalFilter(mNativeAddress, config);
+    }
+
+    public void setBeautifyFilter() {
+        if(mNativeAddress != 0)
+            nativeSetBeautifyFilter(mNativeAddress);
+    }
+
+    public void setGlobalFilterIntensity(float intensity) {
+        if(mNativeAddress != 0)
+            nativeSetGlobalFilterIntensity(mNativeAddress, intensity);
+    }
+
+    public void isGlobalFilterEnabled() {
+        if(mNativeAddress != 0)
+            nativeIsGlobalFilterEnabled(mNativeAddress);
+    }
+
+    /////////////////      private         ///////////////////////
+
+    private native long nativeCreateRecorder();
+
+    /////////////////视频录制相关////////////////////
+    private native boolean nativeStartRecording(long holder, int fps, String filename, int bitRate);
+    private native boolean nativeIsRecordingStarted(long holder);
+    private native boolean nativeEndRecording(long holder, boolean shouldSave);
+    private native void nativePauseRecording(long holder);
+//    private native boolean nativeIsRecordingPaused(long holder);
+//    private native boolean nativeResumeRecording(long holder);
+    private native double nativeGetTimestamp(long holder);
+
+    private native double nativeGetVideoStreamtime(long holder);
+    private native double nativeGetAudioStreamtime(long holder);
+
+    private native void nativeSetTempDir(long holder, String dir);
+
+    private native void nativeRecordImageFrame(long holder);
+    private native void nativeRecordAudioFrame(long holder, ShortBuffer audioBuffer, int bufferLen);
+
+    private native void nativeSetGlobalFilter(long holder, String config);
+    private native void nativeSetBeautifyFilter(long holder);
+    private native void nativeSetGlobalFilterIntensity(long holder, float intensity);
+    private native void nativeIsGlobalFilterEnabled(long holder);
+}

+ 196 - 0
media/cge_library/src/main/java/org/wysaid/nativePort/CGEFrameRenderer.java

@@ -0,0 +1,196 @@
+package org.wysaid.nativePort;
+
+/**
+ * Created by wangyang on 15/11/26.
+ */
+
+//A wrapper for native class FrameRecorder
+public class CGEFrameRenderer {
+
+    static {
+        NativeLibraryLoader.load();
+    }
+
+    protected long mNativeAddress;
+
+    public CGEFrameRenderer() {
+        mNativeAddress = nativeCreateRenderer();
+    }
+
+    //Avoid 'nativeCreateRenderer' being called.
+    protected CGEFrameRenderer(int dummy) {
+
+    }
+
+    //srcWidth&srcheight stands for the external_texture's width&height
+    //dstWidth&dstHeight stands for the recording resolution (default 640*480)
+    //dstWidth/dstHeight should not be changed after "init()" is called.
+    //srcWidth/srcHeight could be changed by calling "srcResize" function.
+    public boolean init(int srcWidth, int srcHeight, int dstWidth, int dstHeight) {
+        if(mNativeAddress != 0)
+            return nativeInit(mNativeAddress, srcWidth, srcHeight, dstWidth, dstHeight);
+        return false;
+    }
+
+    //Will affect the framebuffer
+    public void update(int externalTexture, float[] transformMatrix) {
+        if(mNativeAddress != 0)
+            nativeUpdate(mNativeAddress, externalTexture, transformMatrix);
+    }
+
+    public void runProc() {
+        if(mNativeAddress != 0)
+            nativeRunProc(mNativeAddress);
+    }
+
+    //Won't affect the framebuffer
+    //the arguments means the viewport.
+    public void render(int x, int y, int width, int height) {
+        if(mNativeAddress != 0)
+            nativeRender(mNativeAddress, x, y, width, height);
+    }
+
+    public void drawCache() {
+        if(mNativeAddress != 0)
+            nativeDrawCache(mNativeAddress);
+    }
+
+    //set the rotation of the camera texture
+    public void setSrcRotation(float rad) {
+        if(mNativeAddress != 0)
+            nativeSetSrcRotation(mNativeAddress, rad);
+    }
+
+    //set the flip/scaling for the camera texture
+    public void setSrcFlipScale(float x, float y) {
+        if(mNativeAddress != 0)
+            nativeSetSrcFlipScale(mNativeAddress, x, y);
+    }
+
+    //set the render result's rotation
+    public void setRenderRotation(float rad) {
+        if(mNativeAddress != 0)
+            nativeSetRenderRotation(mNativeAddress, rad);
+    }
+
+    //set the render result's flip/scaling
+    public void setRenderFlipScale(float x, float y) {
+        if(mNativeAddress != 0)
+            nativeSetRenderFlipScale(mNativeAddress, x, y);
+    }
+
+    //initialize the filters width config string
+    public void setFilterWidthConfig(final String config) {
+        if(mNativeAddress != 0)
+            nativeSetFilterWithConfig(mNativeAddress, config);
+    }
+
+    //set the mask rotation (radian)
+    public void setMaskRotation(float rot) {
+        if(mNativeAddress != 0)
+            nativeSetMaskRotation(mNativeAddress, rot);
+    }
+
+    //set the mask flipscale
+    public void setMaskFlipScale(float x, float y) {
+        if(mNativeAddress != 0)
+            nativeSetMaskFlipScale(mNativeAddress, x, y);
+
+    }
+
+
+    //set the intensity of the filter
+    public void setFilterIntensity(float value) {
+        if(mNativeAddress != 0)
+            nativeSetFilterIntensity(mNativeAddress, value);
+    }
+
+    public void srcResize(int width, int height) {
+        if(mNativeAddress != 0)
+            nativeSrcResize(mNativeAddress, width, height);
+    }
+
+    public  void release() {
+        if(mNativeAddress != 0) {
+            nativeRelease(mNativeAddress);
+            mNativeAddress = 0;
+        }
+    }
+
+
+    public void setMaskTexture(int texID, float aspectRatio) {
+        if(mNativeAddress != 0)
+            nativeSetMaskTexture(mNativeAddress, texID, aspectRatio);
+    }
+
+    public void setMaskTextureRatio(float aspectRatio) {
+        if(mNativeAddress != 0)
+            nativeSetMaskTextureRatio(mNativeAddress, aspectRatio);
+    }
+
+    public int queryBufferTexture() {
+        if(mNativeAddress != 0)
+            return nativeQueryBufferTexture(mNativeAddress);
+        return 0;
+    }
+
+    public long getImageHandler() {
+        return nativeGetImageHandler(mNativeAddress);
+    }
+
+    public void bindImageFBO() {
+        nativeBindImageFBO(mNativeAddress);
+    }
+
+    public void swapImageFBO() {
+        nativeSwapBufferFBO(mNativeAddress);
+    }
+
+    //nativeFilter 为 JNI 下的 CGEImageFilterInterfaceAbstract 或者其子类.
+    public void processWithFilter(long nativeFilter) {
+        nativeProcessWithFilter(mNativeAddress, nativeFilter);
+    }
+
+    //用于加入自定义滤镜
+    public void setNativeFilter(long nativeFilter) {
+        nativeSetFilterWithAddr(mNativeAddress, nativeFilter);
+    }
+
+    /////////////////      protected         ///////////////////////
+
+    protected native long nativeCreateRenderer();
+    protected native boolean nativeInit(long holder, int srcWidth, int srcHeight, int dstWidth, int dstHeight);
+    protected native void nativeUpdate(long holder, int externalTexture, float[] transformMatrix);
+    protected native void nativeRunProc(long holder);
+
+    protected native void nativeRender(long holder, int x, int y, int width, int height);
+    protected native void nativeDrawCache(long holder);
+
+    protected native void nativeSetSrcRotation(long holder, float rad);
+    protected native void nativeSetSrcFlipScale(long holder, float x, float y);
+    protected native void nativeSetRenderRotation(long holder, float rad);
+    protected native void nativeSetRenderFlipScale(long holder, float x, float y);
+    protected native void nativeSetFilterWithConfig(long holder, String config);
+    protected native void nativeSetFilterIntensity(long holder, float value);
+    protected native void nativeSetMaskRotation(long holder, float value);
+    protected native void nativeSetMaskFlipScale(long holder, float x, float y);
+
+    protected native void nativeSrcResize(long holder, int width, int height);
+
+    protected native void nativeSetMaskTexture(long holder, int texID, float aspectRatio);
+    protected native void nativeSetMaskTextureRatio(long holder, float aspectRatio);
+
+    protected native void nativeRelease(long holder);
+
+    protected native int nativeQueryBufferTexture(long holder);
+
+    protected native long nativeGetImageHandler(long holder);
+    protected native void nativeBindImageFBO(long holder);
+    protected native void nativeSwapBufferFBO(long holder);
+
+    //辅助方法
+    protected native void nativeProcessWithFilter(long holder, long nativeFilter);
+
+    //特殊用法, 谨慎使用, 使用不当可能造成程序运行异常.
+    protected native void nativeSetFilterWithAddr(long holder, long filter);
+}

+ 155 - 0
media/cge_library/src/main/java/org/wysaid/nativePort/CGEImageHandler.java

@@ -0,0 +1,155 @@
+package org.wysaid.nativePort;
+
+import android.graphics.Bitmap;
+
+/**
+ * Created by wysaid on 15/12/25.
+ * Mail: admin@wysaid.org
+ * blog: wysaid.org
+ */
+
+//对 c++ native class 'CGEImageHandlerAndroid' 做映射
+
+public class CGEImageHandler {
+
+    static {
+        NativeLibraryLoader.load();
+    }
+
+    protected long mNativeAddress;
+
+    public CGEImageHandler() {
+        mNativeAddress = nativeCreateHandler();
+    }
+
+    public boolean initWithBitmap(Bitmap bmp) {
+        if(bmp == null)
+            return false;
+
+        if(bmp.getConfig() != Bitmap.Config.ARGB_8888) {
+            bmp = bmp.copy(Bitmap.Config.ARGB_8888, false);
+        }
+
+        return nativeInitWithBitmap(mNativeAddress, bmp);
+    }
+
+    public boolean initWithSize(int width, int height) {
+        return nativeInitWithSize(mNativeAddress, width, height);
+    }
+
+    public Bitmap getResultBitmap() {
+        return nativeGetResultBitmap(mNativeAddress);
+    }
+
+    public void setDrawerRotation(float rad) {
+        nativeSetDrawerRotation(mNativeAddress, rad);
+    }
+
+    public void setDrawerFlipScale(float x, float y) {
+        nativeSetDrawerFlipScale(mNativeAddress, x, y);
+    }
+
+    public void setFilterWithConfig(String config) {
+        nativeSetFilterWithConfig(mNativeAddress, config, true, true);
+    }
+
+    //config: The filter rule string. Pass null for config to clear all filters.
+    //shouldClearOlder: The last filter will be cleared if it's true.
+    //    There may be memory leaks if you pass false for 'shouldClearOlder' and you have not cleared it by yourself.
+    //shouldProcess: This marks if the filter should be run right now.
+    //    The result will not change if you pass false for 'shouldProcess' until you call 'processFilters'.
+    public void setFilterWithConfig(String config, boolean shouldClearOlder, boolean shouldProcess) {
+        nativeSetFilterWithConfig(mNativeAddress, config, shouldClearOlder, shouldProcess);
+    }
+
+    public void setFilterIntensity(float intensity) {
+        nativeSetFilterIntensity(mNativeAddress, intensity, true);
+    }
+
+    //intensity: filter intensity.
+    //shouldProcess: This marks if the filter should be run right now.
+    //    The result will not change if you pass false for 'shouldProcess' until you call 'processFilters'.
+    public void setFilterIntensity(float intensity, boolean shouldProcess) {
+        nativeSetFilterIntensity(mNativeAddress, intensity, shouldProcess);
+    }
+
+    //intensity: filter intensity.
+    //index: Only the intensity of the filter on the given index will be changed.
+    //shouldProcess: This marks if the filter should be run right now.
+    //    The result will not change if you pass false for 'shouldProcess' until you call 'processFilters'.
+    //return value: marks if this function worked. (It will fail when the index is out of range.)
+    //    e.g. If you're running "@adjust contrast 0.5 @adjust brightness 1",
+    //       Pass 0 for index to set the intensity of "@adjust contrast 0.5", the return value is true.
+    //       Pass 1 for index to set the intensity of "@adjust brightness 1", the return value is true.
+    //       Otherwise the return value is false.
+    public boolean setFilterIntensityAtIndex(float intensity, int index, boolean shouldProcess) {
+        return nativeSetFilterIntensityAtIndex(mNativeAddress, intensity, index, shouldProcess);
+    }
+
+    public void drawResult() {
+        nativeDrawResult(mNativeAddress);
+    }
+
+    //绑定handler输出的FBO
+    public void bindTargetFBO() {
+        nativeBindTargetFBO(mNativeAddress);
+    }
+
+    //绑定handler输出的FBO 并且设置viewport为FBO大小
+    public void setAsTarget() {
+        nativeSetAsTarget(mNativeAddress);
+    }
+
+    //交换缓存
+    public void swapBufferFBO() {
+        nativeSwapBufferFBO(mNativeAddress);
+    }
+
+    //恢复图像
+    public void revertImage() {
+        nativeRevertImage(mNativeAddress);
+    }
+
+    public void processFilters() {
+        nativeProcessingFilters(mNativeAddress);
+    }
+
+    public void processWithFilter(long filterAddress) {
+        nativeProcessWithFilter(mNativeAddress, filterAddress);
+    }
+
+    public void release() {
+        if(mNativeAddress != 0) {
+            nativeRelease(mNativeAddress);
+            mNativeAddress = 0;
+        }
+    }
+
+    public void setFilterWithAddres(long filter) {
+        nativeSetFilterWithAddress(mNativeAddress, filter);
+    }
+
+    /////////////////      protected         ///////////////////////
+
+    protected native long nativeCreateHandler();
+    protected native boolean nativeInitWithBitmap(long holder, Bitmap bmp);
+    protected native boolean nativeInitWithSize(long holder, int width, int height);
+    protected native Bitmap nativeGetResultBitmap(long holder);
+
+    protected native void nativeSetDrawerRotation(long holder, float rad);
+    protected native void nativeSetDrawerFlipScale(long holder, float x, float y);
+    protected native boolean nativeSetFilterWithConfig(long holder, String config, boolean shouldCleanOlder, boolean shouldProcess);
+    protected native void nativeSetFilterWithAddress(long holder, long filter);
+    protected native void nativeSetFilterIntensity(long holder, float value, boolean shouldProcess);
+    protected native boolean nativeSetFilterIntensityAtIndex(long holder, float value, int index, boolean shouldProcess);
+
+    protected native void nativeDrawResult(long holder);
+    protected native void nativeBindTargetFBO(long holder);
+    protected native void nativeSetAsTarget(long holder);
+    protected native void nativeSwapBufferFBO(long holder);
+    protected native void nativeRevertImage(long holder);
+    protected native void nativeProcessingFilters(long holder);
+    protected native void nativeProcessWithFilter(long holder, long filterAddress);
+
+    protected native void nativeRelease(long holder);
+}

+ 177 - 0
media/cge_library/src/main/java/org/wysaid/nativePort/CGENativeLibrary.java

@@ -0,0 +1,177 @@
+/**
+ * Created by wysaid on 15/7/8.
+ * Mail: admin@wysaid.org
+ * blog: wysaid.org
+ */
+
+package org.wysaid.nativePort;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.util.Log;
+
+import org.wysaid.common.Common;
+
+public class CGENativeLibrary {
+
+    static {
+        NativeLibraryLoader.load();
+    }
+
+    public enum TextureBlendMode
+    {
+        CGE_BLEND_MIX,            // 0 正常 - "result.rgb = src.rgb * (1 - alpha) + dst.rgb * alpha, alpha = intensity * dst.a, result.a = src.a" - Because android is using premultiplied bitmap&texture, 'CGE_BLEND_MIX' may get a result of twice the strength of the blendImage's alpha channel(The result would be darker than you want). For common usage of mix blending, please use 'CGE_BLEND_ADDREV'.
+        CGE_BLEND_DISSOLVE,       // 1 溶解
+
+        CGE_BLEND_DARKEN,         // 2 变暗
+        CGE_BLEND_MULTIPLY,       // 3 正片叠底
+        CGE_BLEND_COLORBURN,      // 4 颜色加深
+        CGE_BLEND_LINEARBURN,     // 5 线性加深
+        CGE_BLEND_DARKER_COLOR,   // 6 深色
+
+        CGE_BLEND_LIGHTEN,        // 7 变亮
+        CGE_BLEND_SCREEN,         // 8 滤色
+        CGE_BLEND_COLORDODGE,     // 9 颜色减淡
+        CGE_BLEND_LINEARDODGE,    // 10 线性减淡
+        CGE_BLEND_LIGHTERCOLOR,  // 11 浅色
+
+        CGE_BLEND_OVERLAY,        // 12 叠加
+        CGE_BLEND_SOFTLIGHT,      // 13 柔光
+        CGE_BLEND_HARDLIGHT,      // 14 强光
+        CGE_BLEND_VIVIDLIGHT,     // 15 亮光
+        CGE_BLEND_LINEARLIGHT,    // 16 线性光
+        CGE_BLEND_PINLIGHT,       // 17 点光
+        CGE_BLEND_HARDMIX,        // 18 实色混合
+
+        CGE_BLEND_DIFFERENCE,     // 19 差值
+        CGE_BLEND_EXCLUDE,        // 20 排除
+        CGE_BLEND_SUBTRACT,       // 21 减去
+        CGE_BLEND_DIVIDE,         // 22 划分
+
+        CGE_BLEND_HUE,            // 23 色相
+        CGE_BLEND_SATURATION,     // 24 饱和度
+        CGE_BLEND_COLOR,          // 25 颜色
+        CGE_BLEND_LUMINOSITY,     // 26 明度
+
+        /////////////    More blend mode below (You can't see them in Adobe Photoshop)    //////////////
+
+        CGE_BLEND_ADD,			  // 27
+        CGE_BLEND_ADDREV,         // 28 - A fix for premultiplied BLEND_MIX
+        CGE_BLEND_COLORBW,		  // 29
+
+        /////////////    More blend mode above     //////////////
+
+        CGE_BLEND_TYPE_MAX_NUM //Its value defines the max num of blend.
+    };
+
+    public interface LoadImageCallback {
+        Bitmap loadImage(String name, Object arg);
+        void loadImageOK(Bitmap bmp, Object arg);
+    }
+
+    static LoadImageCallback loadImageCallback;
+    static Object callbackArg;
+
+    public static void setLoadImageCallback(LoadImageCallback callback, Object arg) {
+        loadImageCallback = callback;
+        callbackArg = arg;
+    }
+
+    public static class TextureResult {
+        int texID;
+        int width, height;
+    }
+
+    //will be called from jni.
+    public static TextureResult loadTextureByName(String sourceName) {
+        if(loadImageCallback == null) {
+            Log.i(Common.LOG_TAG, "The loading callback is not set!");
+            return null;
+        }
+
+        Bitmap bmp = loadImageCallback.loadImage(sourceName, callbackArg);
+
+        if(bmp == null) {
+            return null;
+        }
+
+        TextureResult result = loadTextureByBitmap(bmp);
+
+        loadImageCallback.loadImageOK(bmp, callbackArg);
+        return result;
+    }
+
+    //May be called from jni.
+    public static TextureResult loadTextureByBitmap(Bitmap bmp) {
+
+        if(bmp == null) {
+            return null;
+        }
+
+        TextureResult result = new TextureResult();
+
+        result.texID = Common.genNormalTextureID(bmp);
+        result.width = bmp.getWidth();
+        result.height = bmp.getHeight();
+        return result;
+    }
+
+    public static TextureResult loadTextureByFile(String fileName) {
+        Bitmap bmp = BitmapFactory.decodeFile(fileName);
+        TextureResult result = loadTextureByBitmap(bmp);
+        bmp.recycle();
+        return result;
+    }
+
+    public static Bitmap filterImage_MultipleEffects(Bitmap bmp, String config, float intensity) {
+        if(config == null || config.length() == 0) {
+            return bmp;
+        }
+        return cgeFilterImage_MultipleEffects(bmp, config, intensity);
+    }
+
+    public static void filterImage_MultipleEffectsWriteBack(Bitmap bmp, String config, float intensity) {
+        if(config != null && config.length() != 0) {
+            cgeFilterImage_MultipleEffectsWriteBack(bmp, config, intensity);
+        }
+    }
+
+    public enum BlendFilterType {
+        BLEND_NORMAL,
+        BLEND_KEEP_RATIO,
+        BLEND_TILE,
+    }
+
+    //带纹理的 blendFilter 较为特殊, 增加单独处理方法, 第二个参数 texID 表示将要使用到的纹理id
+    public static long createBlendFilter(TextureBlendMode blendMode, int texID, int texWidth, int texHeight, BlendFilterType blendFilterType, float intensity) {
+        return cgeCreateBlendFilter(blendMode.ordinal(), texID, texWidth, texHeight, blendFilterType.ordinal(), intensity);
+    }
+
+    //////////////////////   native method  //////////////////
+
+    // 多重特效滤镜, 提供配置文件内容直接进行, 返回相同大小的bitmap。
+    // intensity 表示滤镜强度 [0, 1]
+    public static native Bitmap cgeFilterImage_MultipleEffects(Bitmap bmp, String config, float intensity);
+
+    // 同上, 结果直接写回传入bitmap, 无返回值
+    public static native void cgeFilterImage_MultipleEffectsWriteBack(Bitmap bmp, String config, float intensity);
+
+    ////////////////////////////////////
+
+    public static native long cgeCreateFilterWithConfig(String config, float intensity);
+    public static native void cgeDeleteFilterWithAddress(long address);
+    public static native long cgeCreateBlendFilter(int blendMode, int texID, int texWidth, int texHeight, int blendFilterType, float intensity);
+
+    ////////////////////////////////////
+    // demo for custom filter, see more at `customFilter` of `jni/source`.
+    // `hasContext` marks if the function is called width gl context already bind. (Extra gl context may be created if `hasContext` is false)
+    // `intensity`:  0 for origin, 1 for most. (It can be more than 1 when `useWrapper` is true)
+    // `useWrapper` marks if you want to use a wrapper. (The wrapper will receive the intensity, and do interpolation between the origin and the result when it's true)
+    public static native Bitmap cgeFilterImageWithCustomFilter(Bitmap bmp, int index, float intensity, boolean hasContext, boolean useWrapper);
+    // `index` marks which you want from your custom filter list.
+    // `intensity`:  0 for origin, 1 for most. (It can be more than 1 when `useWrapper` is true)
+    // `useWrapper` marks if you want to use a wrapper. (The wrapper will receive the intensity, and do interpolation between the origin and the result when it's true)
+    public static native long cgeCreateCustomNativeFilter(int index, float intensity, boolean useWrapper);
+    public static native int cgeGetCustomFilterNum();
+
+}

+ 15 - 0
media/cge_library/src/main/java/org/wysaid/nativePort/NativeLibraryLoader.java

@@ -0,0 +1,15 @@
+package org.wysaid.nativePort;
+
+/**
+ * Created by wangyang on 15/7/30.
+ */
+public class NativeLibraryLoader {
+
+    public static void load() {
+        System.loadLibrary("ffmpeg");
+        System.loadLibrary("CGE");
+        System.loadLibrary("CGEExt");
+        CGEFFmpegNativeLibrary.avRegisterAll();
+    }
+
+}

+ 154 - 0
media/cge_library/src/main/java/org/wysaid/texUtils/TextureRenderer.java

@@ -0,0 +1,154 @@
+package org.wysaid.texUtils;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.util.Log;
+
+import org.wysaid.common.Common;
+import org.wysaid.common.ProgramObject;
+
+import java.nio.FloatBuffer;
+
+/**
+ * Created by wangyang on 15/7/23.
+ */
+public abstract class TextureRenderer {
+
+    public static final String LOG_TAG = Common.LOG_TAG;
+
+    //初始化program 等
+    public abstract boolean init(boolean isExternalOES);
+
+    //为了保证GLContext 的对应, 不能等待finalize
+    public void release() {
+        if(mVertexBuffer != 0) {
+            GLES20.glDeleteBuffers(1, new int[]{mVertexBuffer}, 0);
+            mVertexBuffer = 0;
+        }
+
+        if(mProgram != null) {
+            mProgram.release();
+            mProgram = null;
+        }
+    }
+
+    public abstract void renderTexture(int texID, Viewport viewport);
+
+    public abstract void setTextureSize(int width, int height);
+
+    public abstract String getVertexShaderString();
+
+    public abstract String getFragmentShaderString();
+
+    public static class Viewport {
+        public int x, y;
+        public int width, height;
+        public Viewport() {}
+        public Viewport(int _x, int _y, int _width, int _height) {
+            x = _x;
+            y = _y;
+            width = _width;
+            height = _height;
+        }
+    }
+
+    ////////////////////////////////////////////////////////////////
+
+    protected static final String REQUIRE_STRING_EXTERNAL_OES = "#extension GL_OES_EGL_image_external : require\n";
+    protected static final String SAMPLER2D_VAR_EXTERNAL_OES = "samplerExternalOES";
+    protected static final String SAMPLER2D_VAR = "sampler2D";
+
+    protected static final String vshDrawDefault = "" +
+            "attribute vec2 vPosition;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform mat4 transform;\n" +
+            "uniform mat2 rotation;\n" +
+            "uniform vec2 flipScale;\n" +
+            "void main()\n" +
+            "{\n" +
+            "   gl_Position = vec4(vPosition, 0.0, 1.0);\n" +
+            "   vec2 coord = flipScale * (vPosition / 2.0 * rotation) + 0.5;\n" +
+            "   texCoord = (transform * vec4(coord, 0.0, 1.0)).xy;\n" +
+            "}";
+
+
+    protected static final String POSITION_NAME = "vPosition";
+    protected static final String ROTATION_NAME = "rotation";
+    protected static final String FLIPSCALE_NAME = "flipScale";
+    protected static final String TRANSFORM_NAME = "transform";
+
+    public static final float[] vertices = {-1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f};
+    public static final int DRAW_FUNCTION = GLES20.GL_TRIANGLE_FAN;
+
+    protected int TEXTURE_2D_BINDABLE;
+
+    protected int mVertexBuffer;
+    protected ProgramObject mProgram;
+
+    protected int mTextureWidth, mTextureHeight;
+
+    protected int mRotationLoc, mFlipScaleLoc, mTransformLoc;
+
+    //设置界面旋转弧度 -- 录像时一般是 PI / 2 (也就是 90°) 的整数倍
+    public void setRotation(float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+
+        float rot[] = new float[] {
+                cosRad, sinRad,
+                -sinRad, cosRad
+        };
+
+        assert mProgram != null : "setRotation must not be called before init!";
+
+        mProgram.bind();
+        GLES20.glUniformMatrix2fv(mRotationLoc, 1, false, rot, 0);
+    }
+
+    public void setFlipscale(float x, float y) {
+        mProgram.bind();
+        GLES20.glUniform2f(mFlipScaleLoc, x, y);
+    }
+
+    public void setTransform(float[] matrix) {
+        mProgram.bind();
+        GLES20.glUniformMatrix4fv(mTransformLoc, 1, false, matrix, 0);
+    }
+
+    protected boolean setProgramDefault(String vsh, String fsh, boolean isExternalOES) {
+        TEXTURE_2D_BINDABLE = isExternalOES ? GLES11Ext.GL_TEXTURE_EXTERNAL_OES : GLES20.GL_TEXTURE_2D;
+        mProgram = new ProgramObject();
+        mProgram.bindAttribLocation(POSITION_NAME, 0);
+        String fshResult = (isExternalOES ? REQUIRE_STRING_EXTERNAL_OES : "") + String.format(fsh, isExternalOES ? SAMPLER2D_VAR_EXTERNAL_OES : SAMPLER2D_VAR);
+        if(mProgram.init(vsh, fshResult)) {
+            mRotationLoc = mProgram.getUniformLoc(ROTATION_NAME);
+            mFlipScaleLoc = mProgram.getUniformLoc(FLIPSCALE_NAME);
+            mTransformLoc = mProgram.getUniformLoc(TRANSFORM_NAME);
+            setRotation(0.0f);
+            setFlipscale(1.0f, 1.0f);
+            setTransform(new float[]{
+                    1.0f, 0.0f, 0.0f, 0.0f,
+                    0.0f, 1.0f, 0.0f, 0.0f,
+                    0.0f, 0.0f, 1.0f, 0.0f,
+                    0.0f, 0.0f, 0.0f, 1.0f
+            });
+            return true;
+        }
+        return false;
+    }
+
+    protected void defaultInitialize() {
+        int[] vertexBuffer = new int[1];
+        GLES20.glGenBuffers(1, vertexBuffer, 0);
+        mVertexBuffer = vertexBuffer[0];
+
+        if(mVertexBuffer == 0) {
+            Log.e(LOG_TAG, "Invalid VertexBuffer!");
+        }
+
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertexBuffer);
+        FloatBuffer buffer = FloatBuffer.allocate(vertices.length);
+        buffer.put(vertices).position(0);
+        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, 32, buffer, GLES20.GL_STATIC_DRAW);
+    }
+}

+ 206 - 0
media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererBlur.java

@@ -0,0 +1,206 @@
+package org.wysaid.texUtils;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.util.Log;
+
+import org.wysaid.common.FrameBufferObject;
+import org.wysaid.common.ProgramObject;
+
+/**
+ * Created by wangyang on 15/7/23.
+ */
+public class TextureRendererBlur extends TextureRendererDrawOrigin {
+
+    private static final String vshBlur = vshDrawDefault;
+
+    private static final String vshBlurCache = "" +
+            "attribute vec2 vPosition;\n" +
+            "varying vec2 texCoord;\n" +
+            "void main()\n" +
+            "{\n" +
+            "   gl_Position = vec4(vPosition, 0.0, 1.0);\n" +
+            "   texCoord = vPosition / 2.0 + 0.5;\n" +
+            "}";
+
+    private static final String fshBlur = "" +
+            "precision highp float;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform %s inputImageTexture;\n" +
+            "uniform vec2 samplerSteps;\n" +
+
+            "const int samplerRadius = 5;\n" +
+            "const float samplerRadiusFloat = 5.0;\n" +
+
+            "float random(vec2 seed)\n" +
+            "{\n" +
+            "  return fract(sin(dot(seed ,vec2(12.9898,78.233))) * 43758.5453);\n" +
+            "}\n" +
+
+            "void main()\n" +
+            "{\n" +
+            "  vec3 resultColor = vec3(0.0);\n" +
+            "  float blurPixels = 0.0;\n" +
+            "  float offset = random(texCoord) - 0.5;\n" +
+            "  \n" +
+            "  for(int i = -samplerRadius; i <= samplerRadius; ++i)\n" +
+            "  {\n" +
+            "    float percent = (float(i) + offset) / samplerRadiusFloat;\n" +
+            "    float weight = 1.0 - abs(percent);\n" +
+            "    vec2 coord = texCoord + samplerSteps * percent;\n" +
+            "    resultColor += texture2D(inputImageTexture, coord).rgb * weight;\n" +
+            "    blurPixels += weight;\n" +
+            "  }\n" +
+
+            "  gl_FragColor = vec4(resultColor / blurPixels, 1.0);\n" +
+//            "  gl_FragColor.r = texture2D(inputImageTexture, texCoord).r;\n" +
+            "}";
+
+    protected int mTexCache = 0;
+
+    protected FrameBufferObject mFBO;
+
+    protected int mCacheTexWidth, mCacheTexHeight;
+
+    private static final String SAMPLER_STEPS = "samplerSteps";
+
+    private int mStepsLoc = 0;
+    private int mStepsLocCache = 0;
+    private float mSamplerScale = 1.0f;
+
+    private ProgramObject mProgramDrawCache;
+
+    public static TextureRendererBlur create(boolean isExternalOES) {
+        TextureRendererBlur renderer = new TextureRendererBlur();
+        if(!renderer.init(isExternalOES)) {
+            renderer.release();
+            return null;
+        }
+        return renderer;
+    }
+
+    public void setSamplerRadius(float radius) {
+        mSamplerScale = radius / 4.0f;
+    }
+
+
+    //TODO 优化非external_OES逻辑, cache和原始相同
+    @Override
+    public boolean init(boolean isExternalOES) {
+        TEXTURE_2D_BINDABLE = isExternalOES ? GLES11Ext.GL_TEXTURE_EXTERNAL_OES : GLES20.GL_TEXTURE_2D;
+        final String fshBlurExtOES = (isExternalOES ? REQUIRE_STRING_EXTERNAL_OES : "") + String.format(fshBlur, isExternalOES ? SAMPLER2D_VAR_EXTERNAL_OES : SAMPLER2D_VAR);
+        final String fshBlurTex2D = String.format(fshBlur, SAMPLER2D_VAR);
+        mFBO = new FrameBufferObject();
+
+        mProgramDrawCache = new ProgramObject();
+        mProgramDrawCache.bindAttribLocation(POSITION_NAME, 0);
+
+        if(!mProgramDrawCache.init(vshBlurCache, fshBlurExtOES)) {
+            Log.e(LOG_TAG, "blur filter program init failed - 1...");
+            return false;
+        }
+
+        mProgramDrawCache.bind();
+        mStepsLocCache = mProgramDrawCache.getUniformLoc(SAMPLER_STEPS);
+
+        mProgram = new ProgramObject();
+        mProgram.bindAttribLocation(POSITION_NAME, 0);
+
+        if(!mProgram.init(vshBlur, fshBlurTex2D)) {
+            Log.e(LOG_TAG, "blur filter program init failed - 2...");
+            return false;
+        }
+
+        mProgram.bind();
+        mStepsLoc = mProgram.getUniformLoc(SAMPLER_STEPS);
+        setRotation(0.0f);
+
+        return true;
+    }
+
+    @Override
+    public void release() {
+        if(mProgramDrawCache != mProgram)
+            mProgramDrawCache.release();
+        super.release();
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+        mFBO.release();
+        mFBO = null;
+        GLES20.glDeleteTextures(1, new int[]{mTexCache}, 0);
+        mTexCache = 0;
+        mProgramDrawCache = null;
+    }
+
+    @Override
+    public void renderTexture(int texID, Viewport viewport) {
+
+        if(mTexCache == 0 || mCacheTexWidth != mTextureWidth || mCacheTexHeight != mTextureHeight) {
+            resetCacheTexture();
+        }
+
+        mFBO.bind();
+
+        GLES20.glViewport(0, 0, mCacheTexWidth, mCacheTexHeight);
+
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertexBuffer);
+        GLES20.glEnableVertexAttribArray(0);
+        GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 0, 0);
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(TEXTURE_2D_BINDABLE, texID);
+
+        mProgramDrawCache.bind();
+        GLES20.glUniform2f(mStepsLocCache, (1.0f / mTextureWidth) * mSamplerScale, 0.0f);
+
+        GLES20.glDrawArrays(DRAW_FUNCTION, 0, 4);
+
+        if(viewport != null)
+            GLES20.glViewport(viewport.x, viewport.y, viewport.width, viewport.height);
+
+        mProgram.bind();
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTexCache);
+        GLES20.glUniform2f(mStepsLoc, 0.0f, (1.0f / mCacheTexWidth) * mSamplerScale);
+        GLES20.glDrawArrays(DRAW_FUNCTION, 0, 4);
+    }
+
+    @Override
+    public void setTextureSize(int w, int h) {
+        super.setTextureSize(w, h);
+    }
+
+    @Override
+    public String getVertexShaderString() {
+        return vshBlur;
+    }
+
+    @Override
+    public String getFragmentShaderString() {
+        return fshBlur;
+    }
+
+
+    protected void resetCacheTexture() {
+        Log.i(LOG_TAG, "resetCacheTexture...");
+        mCacheTexWidth = mTextureWidth;
+        mCacheTexHeight = mTextureHeight;
+        if(mTexCache == 0)
+        {
+            int[] texCache = new int[1];
+            GLES20.glGenTextures(1, texCache, 0);
+            mTexCache = texCache[0];
+        }
+
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTexCache);
+
+        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mCacheTexWidth, mCacheTexHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+
+        mFBO.bindTexture(mTexCache);
+    }
+
+}

+ 84 - 0
media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererDrawOrigin.java

@@ -0,0 +1,84 @@
+package org.wysaid.texUtils;
+
+import android.opengl.GLES20;
+
+/**
+ * Created by wangyang on 15/7/23.
+ */
+public class TextureRendererDrawOrigin extends TextureRenderer {
+
+    private static final String fshDrawOrigin = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform %s inputImageTexture;\n" +
+            "void main()\n" +
+            "{\n" +
+            "   gl_FragColor = texture2D(inputImageTexture, texCoord);\n" +
+            "}";
+
+    //初始化默认的顶点序列等。
+    protected TextureRendererDrawOrigin() {
+        defaultInitialize();
+    }
+
+    protected TextureRendererDrawOrigin(boolean noDefaultInitialize) {
+        if(!noDefaultInitialize)
+            defaultInitialize();
+    }
+
+    public static TextureRendererDrawOrigin create(boolean isExternalOES) {
+        TextureRendererDrawOrigin renderer = new TextureRendererDrawOrigin();
+        if(!renderer.init(isExternalOES)) {
+            renderer.release();
+            return null;
+        }
+        return renderer;
+    }
+
+    @Override
+    public boolean init(boolean isExternalOES) {
+        return setProgramDefault(getVertexShaderString(), getFragmentShaderString(), isExternalOES);
+    }
+
+//    @Override
+//    public void release() {
+//        GLES20.glDeleteBuffers(1, new int[]{mVertexBuffer}, 0);
+//        mVertexBuffer = 0;
+//        mProgram.release();
+//        mProgram = null;
+//    }
+
+    @Override
+    public void renderTexture(int texID, Viewport viewport) {
+
+        if(viewport != null) {
+            GLES20.glViewport(viewport.x, viewport.y, viewport.width, viewport.height);
+        }
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(TEXTURE_2D_BINDABLE, texID);
+
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertexBuffer);
+        GLES20.glEnableVertexAttribArray(0);
+        GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 0, 0);
+
+        mProgram.bind();
+        GLES20.glDrawArrays(DRAW_FUNCTION, 0, 4);
+    }
+
+    @Override
+    public void setTextureSize(int w, int h) {
+        mTextureWidth = w;
+        mTextureHeight = h;
+    }
+
+    @Override
+    public String getVertexShaderString() {
+        return vshDrawDefault;
+    }
+
+    @Override
+    public String getFragmentShaderString() {
+        return fshDrawOrigin;
+    }
+}

+ 82 - 0
media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererEdge.java

@@ -0,0 +1,82 @@
+package org.wysaid.texUtils;
+
+/**
+ * Created by wangyang on 15/7/23.
+ */
+public class TextureRendererEdge extends TextureRendererEmboss {
+
+        private static final String vshEdge = "" +
+                "attribute vec2 vPosition;\n" +
+                "varying vec2 texCoord;\n" +
+                "varying vec2 coords[8];\n" +
+
+                "uniform mat4 transform;\n" +
+                "uniform mat2 rotation;\n" +
+                "uniform vec2 flipScale;\n" +
+                "uniform vec2 samplerSteps;\n" +
+
+                "const float stride = 2.0;\n" +
+
+                "void main()\n" +
+                "{\n" +
+                "  gl_Position = vec4(vPosition, 0.0, 1.0);\n" +
+                "  vec2 coord = flipScale * (vPosition / 2.0 * rotation) + 0.5;\n" +
+                "  texCoord = (transform * vec4(coord, 0.0, 1.0)).xy;\n" +
+
+                "  coords[0] = texCoord - samplerSteps * stride;\n" +
+                "  coords[1] = texCoord + vec2(0.0, -samplerSteps.y) * stride;\n" +
+                "  coords[2] = texCoord + vec2(samplerSteps.x, -samplerSteps.y) * stride;\n" +
+
+                "  coords[3] = texCoord - vec2(samplerSteps.x, 0.0) * stride;\n" +
+                "  coords[4] = texCoord + vec2(samplerSteps.x, 0.0) * stride;\n" +
+
+                "  coords[5] = texCoord + vec2(-samplerSteps.x, samplerSteps.y) * stride;\n" +
+                "  coords[6] = texCoord + vec2(0.0, samplerSteps.y) * stride;\n" +
+                "  coords[7] = texCoord + vec2(samplerSteps.x, samplerSteps.y) * stride;\n" +
+
+                "}";
+
+        private static final String fshEdge = "" +
+                "precision mediump float;\n" +
+                "varying vec2 texCoord;\n" +
+                "uniform %s inputImageTexture;\n" +
+                "varying vec2 coords[8];\n" +
+
+                "void main()\n" +
+                "{\n" +
+                "  vec3 colors[8];\n" +
+
+                "  for(int i = 0; i < 8; ++i)\n" +
+                "  {\n" +
+                "    colors[i] = texture2D(inputImageTexture, coords[i]).rgb;\n" +
+                "  }\n" +
+
+                "  vec4 src = texture2D(inputImageTexture, texCoord);\n" +
+
+                "  vec3 h = -colors[0] - 2.0 * colors[1] - colors[2] + colors[5] + 2.0 * colors[6] + colors[7];\n" +
+                "  vec3 v = -colors[0] + colors[2] - 2.0 * colors[3] + 2.0 * colors[4] - colors[5] + colors[7];\n" +
+
+                "  gl_FragColor = vec4(sqrt(h * h + v * v), 1.0);\n" +
+                "}";
+
+
+        public static TextureRendererEdge create(boolean isExternalOES) {
+                TextureRendererEdge renderer = new TextureRendererEdge();
+                if(!renderer.init(isExternalOES)) {
+                        renderer.release();
+                        return null;
+                }
+                return renderer;
+        }
+
+        @Override
+        public String getFragmentShaderString() {
+                return fshEdge;
+        }
+
+        @Override
+        public String getVertexShaderString() {
+            return vshEdge;
+    }
+
+}

+ 54 - 0
media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererEmboss.java

@@ -0,0 +1,54 @@
+package org.wysaid.texUtils;
+
+/**
+ * Created by wangyang on 15/7/23.
+ */
+public class TextureRendererEmboss extends TextureRendererDrawOrigin {
+    private static final String fshEmboss = "" +
+            "precision mediump float;\n" +
+            "uniform %s inputImageTexture;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform vec2 samplerSteps;\n" +
+            "const float stride = 2.0;\n" +
+            "const vec2 norm = vec2(0.72, 0.72);\n" +
+            "void main() {\n" +
+            "  vec4 src = texture2D(inputImageTexture, texCoord);\n" +
+            "  vec3 tmp = texture2D(inputImageTexture, texCoord + samplerSteps * stride * norm).rgb - src.rgb + 0.5;\n" +
+            "  float f = (tmp.r + tmp.g + tmp.b) / 3.0;\n" +
+            "  gl_FragColor = vec4(f, f, f, src.a);\n" +
+            "}";
+
+    protected static final String SAMPLER_STEPS = "samplerSteps";
+
+    public static TextureRendererEmboss create(boolean isExternalOES) {
+        TextureRendererEmboss renderer = new TextureRendererEmboss();
+        if(!renderer.init(isExternalOES)) {
+            renderer.release();
+            return null;
+        }
+        return renderer;
+    }
+
+    @Override
+    public boolean init(boolean isExternalOES) {
+        if(setProgramDefault(getVertexShaderString(), getFragmentShaderString(), isExternalOES)) {
+            mProgram.bind();
+            mProgram.sendUniformf(SAMPLER_STEPS, 1.0f / 640.0f, 1.0f / 640.0f);
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public void setTextureSize(int w, int h) {
+        super.setTextureSize(w, h);
+        mProgram.bind();
+        mProgram.sendUniformf(SAMPLER_STEPS, 1.0f / w, 1.0f / h);
+    }
+
+
+    @Override
+    public String getFragmentShaderString() {
+        return fshEmboss;
+    }
+}

+ 284 - 0
media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererLerpBlur.java

@@ -0,0 +1,284 @@
+package org.wysaid.texUtils;
+
+import android.opengl.GLES20;
+import android.util.Log;
+
+import org.wysaid.common.FrameBufferObject;
+import org.wysaid.common.ProgramObject;
+
+/**
+ * Created by wangyang on 15/7/24.
+ */
+public class TextureRendererLerpBlur extends TextureRendererDrawOrigin {
+
+    private static final String vshUpScale = "" +
+            "attribute vec2 vPosition;\n" +
+            "varying vec2 texCoord;\n" +
+            "void main()\n" +
+            "{\n" +
+            "   gl_Position = vec4(vPosition, 0.0, 1.0);\n" +
+            "   texCoord = vPosition / 2.0 + 0.5;\n" +
+            "}";
+
+    private static final String fshUpScale = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform sampler2D inputImageTexture;\n" +
+
+            "void main()\n" +
+            "{\n" +
+            "   gl_FragColor = texture2D(inputImageTexture, texCoord);\n" +
+            "}";
+
+    private static final String vshBlurUpScale = "" +
+            "attribute vec2 vPosition;\n" +
+            "varying vec2 texCoords[5];\n" +
+            "uniform vec2 samplerSteps;\n" +
+            "\n" +
+            "void main()\n" +
+            "{\n" +
+            "  gl_Position = vec4(vPosition, 0.0, 1.0);\n" +
+            "  vec2 texCoord = vPosition / 2.0 + 0.5;\n" +
+            "  texCoords[0] = texCoord - 2.0 * samplerSteps;\n" +
+            "  texCoords[1] = texCoord - 1.0 * samplerSteps;\n" +
+            "  texCoords[2] = texCoord;\n" +
+            "  texCoords[3] = texCoord + 1.0 * samplerSteps;\n" +
+            "  texCoords[4] = texCoord + 2.0 * samplerSteps;\n" +
+            "}";
+
+    private static final String fshBlurUpScale = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoords[5];\n" +
+            "uniform sampler2D inputImageTexture;\n" +
+            "\n" +
+            "void main()\n" +
+            "{\n" +
+            "  vec3 color = texture2D(inputImageTexture, texCoords[0]).rgb * 0.1;\n" +
+            "  color += texture2D(inputImageTexture, texCoords[1]).rgb * 0.2;\n" +
+            "  color += texture2D(inputImageTexture, texCoords[2]).rgb * 0.4;\n" +
+            "  color += texture2D(inputImageTexture, texCoords[3]).rgb * 0.2;\n" +
+            "  color += texture2D(inputImageTexture, texCoords[4]).rgb * 0.1;\n" +
+            "\n" +
+            "  gl_FragColor = vec4(color, 1.0);\n" +
+            "}";
+
+    private static final String vshBlurCache = "" +
+            "attribute vec2 vPosition;\n" +
+            "varying vec2 texCoord;\n" +
+            "void main()\n" +
+            "{\n" +
+            "   gl_Position = vec4(vPosition, 0.0, 1.0);\n" +
+            "   texCoord = vPosition / 2.0 + 0.5;\n" +
+            "}";
+
+    private static final String fshBlur = "" +
+            "precision highp float;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform sampler2D inputImageTexture;\n" +
+            "uniform vec2 samplerSteps;\n" +
+
+            "const int samplerRadius = 5;\n" +
+            "const float samplerRadiusFloat = 5.0;\n" +
+
+            "float random(vec2 seed)\n" +
+            "{\n" +
+            "  return fract(sin(dot(seed ,vec2(12.9898,78.233))) * 43758.5453);\n" +
+            "}\n" +
+
+            "void main()\n" +
+            "{\n" +
+            "  vec3 resultColor = vec3(0.0);\n" +
+            "  float blurPixels = 0.0;\n" +
+            "  float offset = random(texCoord) - 0.5;\n" +
+            "  \n" +
+            "  for(int i = -samplerRadius; i <= samplerRadius; ++i)\n" +
+            "  {\n" +
+            "    float percent = (float(i) + offset) / samplerRadiusFloat;\n" +
+            "    float weight = 1.0 - abs(percent);\n" +
+            "    vec2 coord = texCoord + samplerSteps * percent;\n" +
+            "    resultColor += texture2D(inputImageTexture, coord).rgb * weight;\n" +
+            "    blurPixels += weight;\n" +
+            "  }\n" +
+
+            "  gl_FragColor = vec4(resultColor / blurPixels, 1.0);\n" +
+            "}";
+
+    private static final String SAMPLER_STEPS = "samplerSteps";
+
+    private ProgramObject mScaleProgram;
+    private int[] mTextureDownScale;
+
+    private FrameBufferObject mFramebuffer;
+    private Viewport mTexViewport;
+    private int mSamplerStepLoc = 0;
+
+    private int mIntensity = 0;
+
+    private float mSampleScaling = 1.0f;
+
+    private final int mLevel = 16;
+    private final float mBase = 2.0f;
+
+    public static TextureRendererLerpBlur create(boolean isExternalOES) {
+        TextureRendererLerpBlur renderer = new TextureRendererLerpBlur();
+        if(!renderer.init(isExternalOES)) {
+            renderer.release();
+            return null;
+        }
+        return renderer;
+    }
+
+    //intensity >= 0
+    public void setIntensity(int intensity) {
+
+        if(intensity == mIntensity)
+            return;
+
+        mIntensity = intensity;
+        if(mIntensity > mLevel)
+            mIntensity = mLevel;
+    }
+
+    @Override
+    public boolean init(boolean isExternalOES) {
+        return super.init(isExternalOES) && initLocal();
+    }
+
+    @Override
+    public void renderTexture(int texID, Viewport viewport) {
+
+        if(mIntensity == 0) {
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+            super.renderTexture(texID, viewport);
+            return;
+        }
+
+//        if(mShouldUpdateTexture) {
+//            updateTexture();
+//        }
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+
+        mFramebuffer.bindTexture(mTextureDownScale[0]);
+        //down scale
+
+        mTexViewport.width = calcMips(512, 1);
+        mTexViewport.height = calcMips(512, 1);
+        super.renderTexture(texID, mTexViewport);
+
+        mScaleProgram.bind();
+        for(int i = 1; i < mIntensity; ++i) {
+            mFramebuffer.bindTexture(mTextureDownScale[i]);
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDownScale[i - 1]);
+            GLES20.glViewport(0, 0, calcMips(512, i + 1), calcMips(512, i + 1));
+            GLES20.glDrawArrays(DRAW_FUNCTION, 0, 4);
+        }
+
+        for(int i = mIntensity - 1; i > 0; --i) {
+            mFramebuffer.bindTexture(mTextureDownScale[i - 1]);
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDownScale[i]);
+            GLES20.glViewport(0, 0, calcMips(512, i), calcMips(512, i));
+            GLES20.glDrawArrays(DRAW_FUNCTION, 0, 4);
+        }
+
+        GLES20.glViewport(viewport.x, viewport.y, viewport.width, viewport.height);
+
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDownScale[0]);
+//        GLES20.glUniform2f(mSamplerStepLoc, 0.0f, (0.5f / mTexViewport.width) * mSampleScaling);
+
+        GLES20.glDrawArrays(DRAW_FUNCTION, 0, 4);
+    }
+
+    @Override
+    public void release() {
+        mScaleProgram.release();
+        mFramebuffer.release();
+        GLES20.glDeleteTextures(mTextureDownScale.length, mTextureDownScale, 0);
+        mScaleProgram = null;
+        mFramebuffer = null;
+    }
+
+    private boolean initLocal() {
+
+        genMipmaps(mLevel, 512, 512);
+        mFramebuffer = new FrameBufferObject();
+
+        mScaleProgram = new ProgramObject();
+        mScaleProgram.bindAttribLocation(POSITION_NAME, 0);
+
+//        if(!mScaleProgram.init(vshBlurUpScale, fshBlurUpScale)) {
+        if(!mScaleProgram.init(vshUpScale, fshUpScale)) {
+            Log.e(LOG_TAG, "Lerp blur initLocal failed...");
+            return false;
+        }
+
+//        mScaleProgram.bind();
+//        mSamplerStepLoc = mScaleProgram.getUniformLoc(SAMPLER_STEPS);
+
+        return true;
+    }
+
+    private void updateTexture() {
+//        if(mIntensity == 0)
+//            return;
+//
+//        int useIntensity = mIntensity;
+//
+//        if(useIntensity > 6) {
+//            mSampleScaling = useIntensity / 6.0f;
+//            useIntensity = 6;
+//        }
+//
+//        int scalingWidth = mTextureHeight / useIntensity;
+//        int scalingHeight = mTextureWidth / useIntensity;
+//
+//        if(scalingWidth == 0)
+//            scalingWidth = 1;
+//        if(scalingHeight == 0)
+//            scalingHeight = 1;
+//
+//        mTexViewport = new Viewport(0, 0, scalingWidth, scalingHeight);
+//
+//        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDownScale[0]);
+//        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, scalingWidth, scalingHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+//
+//        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDownScale[1]);
+//        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, scalingWidth, scalingHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+//
+//        mShouldUpdateTexture = false;
+//
+//        Log.i(LOG_TAG, "Lerp blur - updateTexture");
+//
+//        Common.checkGLError("Lerp blur - updateTexture");
+    }
+
+
+
+    @Override
+    public void setTextureSize(int w, int h) {
+        super.setTextureSize(w, h);
+    }
+
+    private void genMipmaps(int level, int width, int height) {
+        mTextureDownScale = new int[level];
+        GLES20.glGenTextures(level, mTextureDownScale, 0);
+
+        for(int i = 0; i < level; ++i) {
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDownScale[i]);
+            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, calcMips(width, i + 1), calcMips(height, i + 1), 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+        }
+
+        mTexViewport = new Viewport(0, 0, 512, 512);
+    }
+
+    private int calcMips(int len, int level) {
+//        return (int)(len / Math.pow(mBase, (level + 1)));
+        return len / (level + 1);
+    }
+
+}

+ 143 - 0
media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererMask.java

@@ -0,0 +1,143 @@
+package org.wysaid.texUtils;
+
+import android.opengl.GLES20;
+
+/**
+ * Created by wangyang on 15/8/20.
+ */
+public class TextureRendererMask extends TextureRendererDrawOrigin {
+
+    private static final String vshMask = "" +
+            "attribute vec2 vPosition;\n" +
+            "varying vec2 texCoord;\n" +
+            "varying vec2 maskCoord;\n" +
+
+            "uniform mat2 rotation;\n" +
+            "uniform vec2 flipScale;\n" +
+
+            "uniform mat2 maskRotation;\n" +
+            "uniform vec2 maskFlipScale;\n" +
+
+            "uniform mat4 transform;\n" +
+
+            "void main()\n" +
+            "{\n" +
+            "   gl_Position = vec4(vPosition, 0.0, 1.0);\n" +
+
+            "   vec2 coord = flipScale * (vPosition / 2.0 * rotation) + 0.5;\n" +
+            "   texCoord = (transform * vec4(coord, 0.0, 1.0)).xy;\n" +
+
+            "   maskCoord = maskFlipScale * (vPosition / 2.0 * maskRotation) + 0.5;\n" +
+            "}";
+
+    private static final String fshMask = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoord;\n" +
+            "varying vec2 maskCoord;\n" +
+            "uniform %s inputImageTexture;\n" +
+            "uniform sampler2D maskTexture;\n" +
+            "void main()\n" +
+            "{\n" +
+            "   gl_FragColor = texture2D(inputImageTexture, texCoord);\n" +
+            "   vec4 maskColor = texture2D(maskTexture, maskCoord);\n" +
+            //不预乘
+//            "   maskColor.rgb *= maskColor.a;\n" +
+            "   gl_FragColor *= maskColor;\n" +
+            "}";
+
+    protected static final String MASK_ROTATION_NAME = "maskRotation";
+    protected static final String MASK_FLIPSCALE_NAME = "maskFlipScale";
+    protected static final String MASK_TEXTURE_NAME = "maskTexture";
+
+    protected int mMaskRotLoc, mMaskFlipscaleLoc;
+    protected int mMaskTexture;
+
+    public static TextureRendererMask create(boolean isExternalOES) {
+        TextureRendererMask renderer = new TextureRendererMask();
+        if(!renderer.init(isExternalOES)) {
+            renderer.release();
+            return null;
+        }
+        return renderer;
+    }
+
+    @Override
+    public boolean init(boolean isExternalOES) {
+        if(setProgramDefault(getVertexShaderString(), getFragmentShaderString(), isExternalOES)) {
+            mProgram.bind();
+            mMaskRotLoc = mProgram.getUniformLoc(MASK_ROTATION_NAME);
+            mMaskFlipscaleLoc = mProgram.getUniformLoc(MASK_FLIPSCALE_NAME);
+            mProgram.sendUniformi(MASK_TEXTURE_NAME, 1);
+            setMaskRotation(0.0f);
+            setMaskFlipscale(1.0f, 1.0f);
+            return true;
+        }
+        return false;
+    }
+
+    public void setMaskRotation(float rad) {
+        final float cosRad = (float)Math.cos(rad);
+        final float sinRad = (float)Math.sin(rad);
+
+        float rot[] = new float[] {
+                cosRad, sinRad,
+                -sinRad, cosRad
+        };
+
+        assert mProgram != null : "setRotation must not be called before init!";
+
+        mProgram.bind();
+        GLES20.glUniformMatrix2fv(mMaskRotLoc, 1, false, rot, 0);
+    }
+
+    public void setMaskFlipscale(float x, float y) {
+        mProgram.bind();
+        GLES20.glUniform2f(mMaskFlipscaleLoc, x, y);
+    }
+
+    public void setMaskTexture(int texID) {
+        if(texID == mMaskTexture)
+            return;
+
+        GLES20.glDeleteTextures(1, new int[]{mMaskTexture}, 0);
+        mMaskTexture = texID;
+    }
+
+    @Override
+    public void renderTexture(int texID, Viewport viewport) {
+
+        if(viewport != null) {
+            GLES20.glViewport(viewport.x, viewport.y, viewport.width, viewport.height);
+        }
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(TEXTURE_2D_BINDABLE, texID);
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mMaskTexture);
+
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertexBuffer);
+        GLES20.glEnableVertexAttribArray(0);
+        GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 0, 0);
+
+        mProgram.bind();
+        GLES20.glDrawArrays(DRAW_FUNCTION, 0, 4);
+    }
+
+    @Override
+    public String getVertexShaderString() {
+        return vshMask;
+    }
+
+    @Override
+    public String getFragmentShaderString() {
+        return fshMask;
+    }
+
+    @Override
+    public void release() {
+        super.release();
+        GLES20.glDeleteTextures(1, new int[]{mMaskTexture}, 0);
+    }
+
+}

+ 46 - 0
media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererThreshold.java

@@ -0,0 +1,46 @@
+package org.wysaid.texUtils;
+
+/**
+ * Created by wysaid on 16/3/9.
+ * Mail: admin@wysaid.org
+ * blog: wysaid.org
+ */
+public class TextureRendererThreshold extends TextureRendererDrawOrigin {
+
+    private static final String fshThreshold = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoord;\n" +
+            " uniform %s inputImageTexture;\n" +
+            " uniform float thresholdValue;\n" +
+            " void main()\n" +
+            "{\n" +
+            "    vec4 color = texture2D(inputImageTexture, texCoord);\n" +
+            "    \n" +
+            "    float weight = (color.r + color.g + color.b) / 3.0;\n" +
+            "    color.a = smoothstep(0.0, thresholdValue, weight);\n" +
+            "    \n" +
+            "    gl_FragColor = color;\n" +
+            "}";
+
+    protected static final String THRESHOLD_VALUE = "thresholdValue";
+
+    public static TextureRendererThreshold create(boolean isExternalOES) {
+        TextureRendererThreshold renderer = new TextureRendererThreshold();
+        if(!renderer.init(isExternalOES)) {
+            renderer.release();
+            renderer = null;
+        }
+        return renderer;
+    }
+
+    public void setThresholdValue(float thresholdValue) {
+        mProgram.bind();
+        mProgram.sendUniformf(THRESHOLD_VALUE, thresholdValue);
+    }
+
+    @Override
+    public String getFragmentShaderString() {
+        return fshThreshold;
+    }
+
+}

+ 87 - 0
media/cge_library/src/main/java/org/wysaid/texUtils/TextureRendererWave.java

@@ -0,0 +1,87 @@
+package org.wysaid.texUtils;
+
+import android.opengl.GLES20;
+
+
+/**
+ * Created by wangyang on 15/7/18.
+ */
+public class TextureRendererWave extends TextureRendererDrawOrigin {
+
+    private static final String fshWave = "" +
+            "precision mediump float;\n" +
+            "varying vec2 texCoord;\n" +
+            "uniform %s inputImageTexture;\n" +
+            "uniform float motion;\n" +
+            "const float angle = 20.0;" +
+            "void main()\n" +
+            "{\n" +
+            "   vec2 coord;\n" +
+            "   coord.x = texCoord.x + 0.01 * sin(motion + texCoord.x * angle);\n" +
+            "   coord.y = texCoord.y + 0.01 * sin(motion + texCoord.y * angle);\n" +
+            "   gl_FragColor = texture2D(inputImageTexture, coord);\n" +
+            "}";
+
+    private int mMotionLoc = 0;
+
+    private boolean mAutoMotion = false;
+    private float mMotion = 0.0f;
+    private float mMotionSpeed = 0.0f;
+
+    public TextureRendererWave() {
+    }
+
+    public static TextureRendererWave create(boolean isExternalOES) {
+        TextureRendererWave renderer = new TextureRendererWave();
+        if(!renderer.init(isExternalOES)) {
+            renderer.release();
+            return null;
+        }
+        return renderer;
+    }
+
+    @Override
+    public boolean init(boolean isExternalOES) {
+        if(setProgramDefault(vshDrawDefault, fshWave, isExternalOES)) {
+            mProgram.bind();
+            mMotionLoc = mProgram.getUniformLoc("motion");
+            return true;
+        }
+        return false;
+    }
+
+    public void setWaveMotion(float motion) {
+        mProgram.bind();
+        GLES20.glUniform1f(mMotionLoc, motion);
+    }
+
+    public void setAutoMotion(float speed) {
+        mMotionSpeed = speed;
+        mAutoMotion = (speed != 0.0f);
+    }
+
+    @Override
+    public void renderTexture(int texID, Viewport viewport) {
+
+        if(viewport != null) {
+            GLES20.glViewport(viewport.x, viewport.y, viewport.width, viewport.height);
+        }
+
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(TEXTURE_2D_BINDABLE, texID);
+
+        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVertexBuffer);
+        GLES20.glEnableVertexAttribArray(0);
+        GLES20.glVertexAttribPointer(0, 2, GLES20.GL_FLOAT, false, 0, 0);
+
+        mProgram.bind();
+        if(mAutoMotion) {
+            mMotion += mMotionSpeed;
+            GLES20.glUniform1f(mMotionLoc, mMotion);
+            if(mMotion > Math.PI * 20.0f) {
+                mMotion -= Math.PI * 20.0f;
+            }
+        }
+        GLES20.glDrawArrays(DRAW_FUNCTION, 0, 4);
+    }
+}

+ 330 - 0
media/cge_library/src/main/java/org/wysaid/view/CameraGLSurfaceView.java

@@ -0,0 +1,330 @@
+package org.wysaid.view;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.graphics.Bitmap;
+import android.graphics.PixelFormat;
+import android.hardware.Camera;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.SurfaceHolder;
+
+import org.wysaid.camera.CameraInstance;
+import org.wysaid.common.Common;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * Created by wangyang on 15/7/17.
+ */
+
+public class CameraGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer {
+
+    public CameraGLSurfaceView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+
+        setEGLContextClientVersion(2);
+        setEGLConfigChooser(8, 8, 8, 8, 0, 0);
+        getHolder().setFormat(PixelFormat.RGBA_8888);
+        setRenderer(this);
+        setRenderMode(RENDERMODE_WHEN_DIRTY);
+//        setZOrderOnTop(true);
+//        setZOrderMediaOverlay(true);
+    }
+
+    public static final String LOG_TAG = Common.LOG_TAG;
+
+    public int mMaxTextureSize = 0;
+
+    protected int mViewWidth;
+    protected int mViewHeight;
+
+    protected int mRecordWidth = 480;
+    protected int mRecordHeight = 640;
+
+    //isBigger 为true 表示当宽高不满足时,取最近的较大值.
+    // 若为 false 则取较小的
+    public void setPictureSize(int width, int height, boolean isBigger) {
+        //默认会旋转90度.
+        cameraInstance().setPictureSize(height, width, isBigger);
+    }
+
+    // mode value should be:
+    //    Camera.Parameters.FLASH_MODE_AUTO;
+    //    Camera.Parameters.FLASH_MODE_OFF;
+    //    Camera.Parameters.FLASH_MODE_ON;
+    //    Camera.Parameters.FLASH_MODE_RED_EYE
+    //    Camera.Parameters.FLASH_MODE_TORCH 等
+    public synchronized boolean setFlashLightMode(String mode) {
+
+        if (!getContext().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
+            Log.e(LOG_TAG, "No flash light is supported by current device!");
+            return false;
+        }
+
+        if (!mIsCameraBackForward) {
+            return false;
+        }
+
+        Camera.Parameters parameters = cameraInstance().getParams();
+
+        if (parameters == null)
+            return false;
+
+        try {
+
+            if (!parameters.getSupportedFlashModes().contains(mode)) {
+                Log.e(LOG_TAG, "Invalid Flash Light Mode!!!");
+                return false;
+            }
+
+            parameters.setFlashMode(mode);
+            cameraInstance().setParams(parameters);
+        } catch (Exception e) {
+            Log.e(LOG_TAG, "Switch flash light failed, check if you're using front camera.");
+            return false;
+        }
+
+        return true;
+    }
+
+    protected int mMaxPreviewWidth = 1280;
+    protected int mMaxPreviewHeight = 1280;
+
+    public static class Viewport {
+         public int x, y, width, height;
+    }
+
+    protected Viewport mDrawViewport = new Viewport();
+
+    public Viewport getDrawViewport() {
+        return mDrawViewport;
+    }
+
+    //The max preview size. Change it to 1920+ if you want to preview with 1080P
+    void setMaxPreviewSize(int w, int h) {
+        mMaxPreviewWidth = w;
+        mMaxPreviewHeight = h;
+    }
+
+    protected boolean mFitFullView = false;
+
+    public void setFitFullView(boolean fit) {
+        mFitFullView = fit;
+        calcViewport();
+    }
+
+    //是否使用后置摄像头
+    protected boolean mIsCameraBackForward = true;
+
+    public boolean isCameraBackForward() {
+        return mIsCameraBackForward;
+    }
+
+    public CameraInstance cameraInstance() {
+        return CameraInstance.getInstance();
+    }
+
+    //should be called before 'onSurfaceCreated'.
+    public void presetCameraForward(boolean isBackForward) {
+        mIsCameraBackForward = isBackForward;
+    }
+
+    //注意, 录制的尺寸将影响preview的尺寸
+    //这里的width和height表示竖屏尺寸
+    //在onSurfaceCreated之前设置有效
+    public void presetRecordingSize(int width, int height) {
+        if (width > mMaxPreviewWidth || height > mMaxPreviewHeight) {
+            float scaling = Math.min(mMaxPreviewWidth / (float) width, mMaxPreviewHeight / (float) height);
+            width = (int) (width * scaling);
+            height = (int) (height * scaling);
+        }
+
+        mRecordWidth = width;
+        mRecordHeight = height;
+        cameraInstance().setPreferPreviewSize(width, height);
+    }
+
+    public void resumePreview() {
+
+    }
+
+    public void stopPreview() {
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+                cameraInstance().stopPreview();
+            }
+        });
+    }
+
+    protected void onSwitchCamera() {
+
+    }
+
+    public final void switchCamera() {
+        mIsCameraBackForward = !mIsCameraBackForward;
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                cameraInstance().stopCamera();
+                onSwitchCamera();
+                int facing = mIsCameraBackForward ? Camera.CameraInfo.CAMERA_FACING_BACK : Camera.CameraInfo.CAMERA_FACING_FRONT;
+
+                cameraInstance().tryOpenCamera(new CameraInstance.CameraOpenCallback() {
+                    @Override
+                    public void cameraReady() {
+                        resumePreview();
+                    }
+                }, facing);
+
+                requestRender();
+            }
+        });
+    }
+
+    //Attention, 'focusAtPoint' will change focus mode to 'FOCUS_MODE_AUTO'
+    //If you want to keep the previous focus mode, please reset the focus mode after 'AutoFocusCallback'.
+    //x,y should be: [0, 1], stands for 'touchEventPosition / viewSize'.
+    public void focusAtPoint(float x, float y, Camera.AutoFocusCallback focusCallback) {
+        cameraInstance().focusAtPoint(y, 1.0f - x, focusCallback);
+    }
+
+    @Override
+    public void surfaceDestroyed(SurfaceHolder holder) {
+        super.surfaceDestroyed(holder);
+        cameraInstance().stopCamera();
+    }
+
+    public interface OnCreateCallback {
+        void createOver();
+    }
+
+    protected OnCreateCallback mOnCreateCallback;
+
+    //定制一些初始化操作
+    public void setOnCreateCallback(final OnCreateCallback callback) {
+        mOnCreateCallback = callback;
+    }
+
+    @Override
+    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+        Log.i(LOG_TAG, "onSurfaceCreated...");
+
+        GLES20.glDisable(GLES20.GL_DEPTH_TEST);
+        GLES20.glDisable(GLES20.GL_STENCIL_TEST);
+        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
+
+        int texSize[] = new int[1];
+
+        GLES20.glGetIntegerv(GLES20.GL_MAX_TEXTURE_SIZE, texSize, 0);
+        mMaxTextureSize = texSize[0];
+
+        if (mOnCreateCallback != null) {
+            mOnCreateCallback.createOver();
+        }
+    }
+
+    @Override
+    public void onSurfaceChanged(GL10 gl, int width, int height) {
+        Log.i(LOG_TAG, String.format("onSurfaceChanged: %d x %d", width, height));
+
+        GLES20.glClearColor(0, 0, 0, 0);
+
+        mViewWidth = width;
+        mViewHeight = height;
+
+        calcViewport();
+    }
+
+    @Override
+    public void onDrawFrame(GL10 gl) {
+        //Nothing . See `CameraGLSurfaceViewWithTexture` or `CameraGLSurfaceViewWithBuffer`
+    }
+
+    @Override
+    public void onResume() {
+        super.onResume();
+        Log.i(LOG_TAG, "glsurfaceview onResume...");
+    }
+
+    @Override
+    public void onPause() {
+        Log.i(LOG_TAG, "glsurfaceview onPause in...");
+
+        cameraInstance().stopCamera();
+        super.onPause();
+        Log.i(LOG_TAG, "glsurfaceview onPause out...");
+    }
+
+    public interface ReleaseOKCallback {
+
+        void releaseOK();
+    }
+
+    protected void onRelease() {
+
+    }
+
+    public final void release(final ReleaseOKCallback callback) {
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                onRelease();
+
+                Log.i(LOG_TAG, "GLSurfaceview release...");
+                if (callback != null)
+                    callback.releaseOK();
+            }
+        });
+    }
+
+    public interface TakePictureCallback {
+        //You can recycle the bitmap.
+        void takePictureOK(Bitmap bmp);
+    }
+
+    public void takeShot(final TakePictureCallback callback) {
+    }
+
+    protected void calcViewport() {
+
+        float scaling = mRecordWidth / (float) mRecordHeight;
+        float viewRatio = mViewWidth / (float) mViewHeight;
+        float s = scaling / viewRatio;
+
+        int w, h;
+
+        if (mFitFullView) {
+            //撑满全部view(内容大于view)
+            if (s > 1.0) {
+                w = (int) (mViewHeight * scaling);
+                h = mViewHeight;
+            } else {
+                w = mViewWidth;
+                h = (int) (mViewWidth / scaling);
+            }
+        } else {
+            //显示全部内容(内容小于view)
+            if (s > 1.0) {
+                w = mViewWidth;
+                h = (int) (mViewWidth / scaling);
+            } else {
+                h = mViewHeight;
+                w = (int) (mViewHeight * scaling);
+            }
+        }
+
+        mDrawViewport.width = w;
+        mDrawViewport.height = h;
+        mDrawViewport.x = (mViewWidth - mDrawViewport.width) / 2;
+        mDrawViewport.y = (mViewHeight - mDrawViewport.height) / 2;
+        Log.i(LOG_TAG, String.format("View port: %d, %d, %d, %d", mDrawViewport.x, mDrawViewport.y, mDrawViewport.width, mDrawViewport.height));
+    }
+}

+ 244 - 0
media/cge_library/src/main/java/org/wysaid/view/CameraGLSurfaceViewWithBuffer.java

@@ -0,0 +1,244 @@
+package org.wysaid.view;
+
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.opengl.GLES20;
+import android.util.AttributeSet;
+import android.util.Log;
+
+import org.wysaid.camera.CameraInstance;
+import org.wysaid.common.Common;
+import org.wysaid.gpuCodec.TextureDrawerNV12ToRGB;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * @Author: wangyang
+ * @Mail: admin@wysaid.org
+ * @Date: 10/04/2018
+ * @Description:
+ */
+
+// Preview with buffer.
+public class CameraGLSurfaceViewWithBuffer extends CameraGLSurfaceView implements Camera.PreviewCallback {
+
+    protected byte[] mPreviewBuffer0;
+    protected byte[] mPreviewBuffer1;
+    protected TextureDrawerNV12ToRGB mYUVDrawer;
+    protected int mTextureY, mTextureUV;
+    protected int mTextureWidth, mTextureHeight;
+    protected ByteBuffer mBufferY, mBufferUV;
+    protected int mYSize, mUVSize;
+    protected int mBufferSize;
+    protected SurfaceTexture mSurfaceTexture;
+    protected boolean mBufferUpdated = false;
+    protected final int[] mBufferUpdateLock = new int[0];
+
+    public CameraGLSurfaceViewWithBuffer(Context context, AttributeSet attrs) {
+        super(context, attrs);
+        setRenderMode(RENDERMODE_CONTINUOUSLY);
+    }
+
+    @Override
+    protected void onRelease() {
+        super.onRelease();
+        if(mYUVDrawer != null) {
+            mYUVDrawer.release();
+            mYUVDrawer = null;
+        }
+
+        if(mSurfaceTexture != null) {
+            mSurfaceTexture.release();
+            mSurfaceTexture = null;
+        }
+
+//        if(mTextureID != 0) {
+//            Common.deleteTextureID(mTextureID);
+//            mTextureID = 0;
+//        }
+
+        if(mTextureY != 0 || mTextureUV != 0) {
+            GLES20.glDeleteTextures(2, new int[]{mTextureY, mTextureUV}, 0);
+            mTextureY = mTextureUV = 0;
+            mTextureWidth = 0;
+            mTextureHeight = 0;
+        }
+    }
+
+    protected void resizeTextures() {
+
+        if (mTextureY == 0 || mTextureUV == 0) {
+            int[] textures = new int[2];
+            GLES20.glGenTextures(2, textures, 0);
+            mTextureY = textures[0];
+            mTextureUV = textures[1];
+
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY);
+            Common.texParamHelper(GLES20.GL_TEXTURE_2D, GLES20.GL_LINEAR, GLES20.GL_CLAMP_TO_EDGE);
+
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureUV);
+            Common.texParamHelper(GLES20.GL_TEXTURE_2D, GLES20.GL_LINEAR, GLES20.GL_CLAMP_TO_EDGE);
+        }
+
+        int width = cameraInstance().previewWidth();
+        int height = cameraInstance().previewHeight();
+
+        if (mTextureWidth != width || mTextureHeight != height) {
+            mTextureWidth = width;
+            mTextureHeight = height;
+
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY);
+            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mTextureWidth, mTextureHeight, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureUV);
+            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, mTextureWidth / 2, mTextureHeight / 2, 0, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, null);
+        }
+    }
+
+    protected void updateTextures() {
+        if(mBufferUpdated) {
+            synchronized (mBufferUpdateLock) {
+                GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY);
+                GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, mTextureWidth, mTextureHeight, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, mBufferY.position(0));
+                GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
+                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureUV);
+                GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, mTextureWidth / 2, mTextureHeight / 2, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, mBufferUV.position(0));
+                mBufferUpdated = false;
+            }
+        } else {
+            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY);
+            GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureUV);
+        }
+    }
+
+    @Override
+    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+        super.onSurfaceCreated(gl, config);
+
+        mYUVDrawer = TextureDrawerNV12ToRGB.create();
+        mYUVDrawer.setFlipScale(1.0f, 1.0f);
+        mYUVDrawer.setRotation((float) (Math.PI / 2.0));
+
+//        mTextureID = Common.genSurfaceTextureID();
+//        mSurfaceTexture = new SurfaceTexture(mTextureID);
+        mSurfaceTexture = new SurfaceTexture(0);
+//        mSurfaceTexture.setOnFrameAvailableListener(this);
+
+//        mTextureDrawer = TextureDrawer.create();
+    }
+
+    @Override
+    public void resumePreview() {
+        if(mYUVDrawer == null) {
+            return;
+        }
+
+        if (!cameraInstance().isCameraOpened()) {
+            int facing = mIsCameraBackForward ? Camera.CameraInfo.CAMERA_FACING_BACK : Camera.CameraInfo.CAMERA_FACING_FRONT;
+
+            cameraInstance().tryOpenCamera(new CameraInstance.CameraOpenCallback() {
+                @Override
+                public void cameraReady() {
+                    Log.i(LOG_TAG, "tryOpenCamera OK...");
+                }
+            }, facing);
+        }
+
+        if (!cameraInstance().isPreviewing()) {
+            Camera camera = cameraInstance().getCameraDevice();
+            Camera.Parameters parameters = camera.getParameters();
+            parameters.getPreviewFormat();
+            Camera.Size sz = parameters.getPreviewSize();
+            int format = parameters.getPreviewFormat();
+
+            if(format != ImageFormat.NV21)
+            {
+                try {
+                    parameters.setPreviewFormat(ImageFormat.NV21);
+                    camera.setParameters(parameters);
+                    format = ImageFormat.NV21;
+                } catch (Exception e) {
+                    e.printStackTrace();
+                    return ;
+                }
+            }
+
+            mYSize = sz.width * sz.height;
+            int newBufferSize = mYSize * ImageFormat.getBitsPerPixel(format) / 8;
+
+            if(mBufferSize != newBufferSize) {
+                mBufferSize = newBufferSize;
+                mUVSize = mBufferSize - mYSize;
+                mBufferY = ByteBuffer.allocateDirect(mYSize).order(ByteOrder.nativeOrder());
+                mBufferUV = ByteBuffer.allocateDirect(mUVSize).order(ByteOrder.nativeOrder());
+
+                mPreviewBuffer0 = new byte[mBufferSize];
+                mPreviewBuffer1 = new byte[mBufferSize];
+            }
+
+            camera.addCallbackBuffer(mPreviewBuffer0);
+            camera.addCallbackBuffer(mPreviewBuffer1);
+
+            cameraInstance().startPreview(mSurfaceTexture, this);
+        }
+
+        if(mIsCameraBackForward) {
+            mYUVDrawer.setFlipScale(-1.0f, 1.0f);
+            mYUVDrawer.setRotation((float) (Math.PI / 2.0));
+        } else {
+            mYUVDrawer.setFlipScale(1.0f, 1.0f);
+            mYUVDrawer.setRotation((float) (Math.PI / 2.0));
+        }
+
+        resizeTextures();
+    }
+
+    @Override
+    public void onSurfaceChanged(GL10 gl, int width, int height) {
+        super.onSurfaceChanged(gl, width, height);
+
+        if (!cameraInstance().isPreviewing()) {
+            resumePreview();
+        }
+    }
+
+    public void drawCurrentFrame() {
+        if(mYUVDrawer == null) {
+            return;
+        }
+
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+        GLES20.glClearColor(0,0,0,1);
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+        GLES20.glViewport(mDrawViewport.x, mDrawViewport.y, mDrawViewport.width, mDrawViewport.height);
+        updateTextures();
+        mYUVDrawer.drawTextures();
+    }
+
+    @Override
+    public void onDrawFrame(GL10 gl) {
+        drawCurrentFrame();
+    }
+
+    @Override
+    public void onPreviewFrame(byte[] data, Camera camera) {
+
+        synchronized (mBufferUpdateLock) {
+            mBufferY.position(0);
+            mBufferUV.position(0);
+            mBufferY.put(data, 0, mYSize);
+            mBufferUV.put(data, mYSize, mUVSize);
+            mBufferUpdated = true;
+        }
+
+        camera.addCallbackBuffer(data);
+    }
+}

+ 422 - 0
media/cge_library/src/main/java/org/wysaid/view/CameraGLSurfaceViewWithTexture.java

@@ -0,0 +1,422 @@
+package org.wysaid.view;
+
+/**
+ * Created by wangyang on 15/7/27.
+ */
+
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.media.ExifInterface;
+import android.opengl.GLES20;
+import android.util.AttributeSet;
+import android.util.Log;
+
+import org.wysaid.camera.CameraInstance;
+import org.wysaid.common.Common;
+import org.wysaid.common.FrameBufferObject;
+import org.wysaid.nativePort.CGEFrameRecorder;
+import org.wysaid.nativePort.CGEFrameRenderer;
+import org.wysaid.nativePort.CGENativeLibrary;
+
+import java.io.BufferedOutputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.IntBuffer;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * Created by wangyang on 15/7/17.
+ */
+public class CameraGLSurfaceViewWithTexture extends CameraGLSurfaceView implements SurfaceTexture.OnFrameAvailableListener {
+
+    protected SurfaceTexture mSurfaceTexture;
+    protected int mTextureID;
+    protected boolean mIsTransformMatrixSet = false;
+    protected CGEFrameRecorder mFrameRecorder;
+
+    public CGEFrameRecorder getRecorder() {
+        return mFrameRecorder;
+    }
+
+
+    public synchronized void setFilterWithConfig(final String config) {
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                if (mFrameRecorder != null) {
+                    mFrameRecorder.setFilterWidthConfig(config);
+                } else {
+                    Log.e(LOG_TAG, "setFilterWithConfig after release!!");
+                }
+            }
+        });
+    }
+
+    public void setFilterIntensity(final float intensity) {
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+                if (mFrameRecorder != null) {
+                    mFrameRecorder.setFilterIntensity(intensity);
+                } else {
+                    Log.e(LOG_TAG, "setFilterIntensity after release!!");
+                }
+            }
+        });
+    }
+
+    //定制一些初始化操作
+    public void setOnCreateCallback(final OnCreateCallback callback) {
+        if (mFrameRecorder == null || callback == null) {
+            mOnCreateCallback = callback;
+        } else {
+            // Already created, just run.
+            queueEvent(new Runnable() {
+                @Override
+                public void run() {
+                    callback.createOver();
+                }
+            });
+        }
+    }
+
+    public CameraGLSurfaceViewWithTexture(Context context, AttributeSet attrs) {
+        super(context, attrs);
+    }
+
+    @Override
+    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+        mFrameRecorder = new CGEFrameRecorder();
+        mIsTransformMatrixSet = false;
+        if (!mFrameRecorder.init(mRecordWidth, mRecordHeight, mRecordWidth, mRecordHeight)) {
+            Log.e(LOG_TAG, "Frame Recorder init failed!");
+        }
+
+        mFrameRecorder.setSrcRotation((float) (Math.PI / 2.0));
+        mFrameRecorder.setSrcFlipScale(1.0f, -1.0f);
+        mFrameRecorder.setRenderFlipScale(1.0f, -1.0f);
+
+        mTextureID = Common.genSurfaceTextureID();
+        mSurfaceTexture = new SurfaceTexture(mTextureID);
+        mSurfaceTexture.setOnFrameAvailableListener(this);
+
+        super.onSurfaceCreated(gl, config);
+    }
+
+    protected void onRelease() {
+        super.onRelease();
+        if(mSurfaceTexture != null) {
+            mSurfaceTexture.release();
+            mSurfaceTexture = null;
+        }
+
+        if(mTextureID != 0) {
+            Common.deleteTextureID(mTextureID);
+            mTextureID = 0;
+        }
+
+        if(mFrameRecorder != null) {
+            mFrameRecorder.release();
+            mFrameRecorder = null;
+        }
+    }
+
+
+    @Override
+    public void onSurfaceChanged(GL10 gl, int width, int height) {
+        super.onSurfaceChanged(gl, width, height);
+
+        if (!cameraInstance().isPreviewing()) {
+            resumePreview();
+        }
+    }
+
+    public void resumePreview() {
+
+        if (mFrameRecorder == null) {
+            Log.e(LOG_TAG, "resumePreview after release!!");
+            return;
+        }
+
+        if (!cameraInstance().isCameraOpened()) {
+
+            int facing = mIsCameraBackForward ? Camera.CameraInfo.CAMERA_FACING_BACK : Camera.CameraInfo.CAMERA_FACING_FRONT;
+
+            cameraInstance().tryOpenCamera(new CameraInstance.CameraOpenCallback() {
+                @Override
+                public void cameraReady() {
+                    Log.i(LOG_TAG, "tryOpenCamera OK...");
+                }
+            }, facing);
+        }
+
+        if (!cameraInstance().isPreviewing()) {
+            cameraInstance().startPreview(mSurfaceTexture);
+            mFrameRecorder.srcResize(cameraInstance().previewHeight(), cameraInstance().previewWidth());
+        }
+
+        requestRender();
+    }
+
+    protected float[] mTransformMatrix = new float[16];
+
+    @Override
+    public void onDrawFrame(GL10 gl) {
+
+        if (mSurfaceTexture == null || !cameraInstance().isPreviewing()) {
+            return;
+        }
+
+        mSurfaceTexture.updateTexImage();
+
+        mSurfaceTexture.getTransformMatrix(mTransformMatrix);
+        mFrameRecorder.update(mTextureID, mTransformMatrix);
+
+        mFrameRecorder.runProc();
+
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+        GLES20.glClearColor(0,0,0,0);
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+
+        mFrameRecorder.render(mDrawViewport.x, mDrawViewport.y, mDrawViewport.width, mDrawViewport.height);
+    }
+
+//    protected long mTimeCount2 = 0;
+//    protected long mFramesCount2 = 0;
+//    protected long mLastTimestamp2 = 0;
+
+    @Override
+    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+
+        requestRender();
+
+//        if (mLastTimestamp2 == 0)
+//            mLastTimestamp2 = System.currentTimeMillis();
+//
+//        long currentTimestamp = System.currentTimeMillis();
+//
+//        ++mFramesCount2;
+//        mTimeCount2 += currentTimestamp - mLastTimestamp2;
+//        mLastTimestamp2 = currentTimestamp;
+//        if (mTimeCount2 >= 1000) {
+//            Log.i(LOG_TAG, String.format("camera sample rate: %d", mFramesCount2));
+//            mTimeCount2 %= 1000;
+//            mFramesCount2 = 0;
+//        }
+    }
+
+    @Override
+    protected void onSwitchCamera() {
+        super.onSwitchCamera();
+        if(mFrameRecorder != null) {
+            mFrameRecorder.setSrcRotation((float) (Math.PI / 2.0));
+            mFrameRecorder.setRenderFlipScale(1.0f, -1.0f);
+        }
+    }
+
+    @Override
+    public void takeShot(final TakePictureCallback callback) {
+        assert callback != null : "callback must not be null!";
+
+        if (mFrameRecorder == null) {
+            Log.e(LOG_TAG, "Recorder not initialized!");
+            callback.takePictureOK(null);
+            return;
+        }
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                FrameBufferObject frameBufferObject = new FrameBufferObject();
+                int bufferTexID;
+                IntBuffer buffer;
+                Bitmap bmp;
+
+                bufferTexID = Common.genBlankTextureID(mRecordWidth, mRecordHeight);
+                frameBufferObject.bindTexture(bufferTexID);
+                GLES20.glViewport(0, 0, mRecordWidth, mRecordHeight);
+                mFrameRecorder.drawCache();
+                buffer = IntBuffer.allocate(mRecordWidth * mRecordHeight);
+                GLES20.glReadPixels(0, 0, mRecordWidth, mRecordHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
+                bmp = Bitmap.createBitmap(mRecordWidth, mRecordHeight, Bitmap.Config.ARGB_8888);
+                bmp.copyPixelsFromBuffer(buffer);
+                Log.i(LOG_TAG, String.format("w: %d, h: %d", mRecordWidth, mRecordHeight));
+
+                frameBufferObject.release();
+                GLES20.glDeleteTextures(1, new int[]{bufferTexID}, 0);
+
+                callback.takePictureOK(bmp);
+            }
+        });
+
+    }
+
+    //isBigger 为true 表示当宽高不满足时,取最近的较大值.
+    // 若为 false 则取较小的
+    public void setPictureSize(int width, int height, boolean isBigger) {
+        //默认会旋转90度.
+        cameraInstance().setPictureSize(height, width, isBigger);
+    }
+
+    public synchronized void takePicture(final TakePictureCallback photoCallback, Camera.ShutterCallback shutterCallback, final String config, final float intensity, final boolean isFrontMirror) {
+
+        Camera.Parameters params = cameraInstance().getParams();
+
+        if (photoCallback == null || params == null) {
+            Log.e(LOG_TAG, "takePicture after release!");
+            if (photoCallback != null) {
+                photoCallback.takePictureOK(null);
+            }
+            return;
+        }
+
+        try {
+            params.setRotation(90);
+            cameraInstance().setParams(params);
+        } catch (Exception e) {
+            Log.e(LOG_TAG, "Error when takePicture: " + e.toString());
+            if (photoCallback != null) {
+                photoCallback.takePictureOK(null);
+            }
+            return;
+        }
+
+        cameraInstance().getCameraDevice().takePicture(shutterCallback, null, new Camera.PictureCallback() {
+            @Override
+            public void onPictureTaken(final byte[] data, Camera camera) {
+
+                Camera.Parameters params = camera.getParameters();
+                Camera.Size sz = params.getPictureSize();
+
+                boolean shouldRotate;
+
+                Bitmap bmp;
+                int width, height;
+
+                //当拍出相片不为正方形时, 可以判断图片是否旋转
+                if (sz.width != sz.height) {
+                    //默认数据格式已经设置为 JPEG
+                    bmp = BitmapFactory.decodeByteArray(data, 0, data.length);
+                    width = bmp.getWidth();
+                    height = bmp.getHeight();
+                    shouldRotate = (sz.width > sz.height && width > height) || (sz.width < sz.height && width < height);
+                } else {
+                    Log.i(LOG_TAG, "Cache image to get exif.");
+
+                    try {
+                        String tmpFilename = getContext().getExternalCacheDir() + "/picture_cache000.jpg";
+                        FileOutputStream fileout = new FileOutputStream(tmpFilename);
+                        BufferedOutputStream bufferOutStream = new BufferedOutputStream(fileout);
+                        bufferOutStream.write(data);
+                        bufferOutStream.flush();
+                        bufferOutStream.close();
+
+                        ExifInterface exifInterface = new ExifInterface(tmpFilename);
+                        int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
+
+                        switch (orientation) {
+                            //被保存图片exif记录只有旋转90度, 和不旋转两种情况
+                            case ExifInterface.ORIENTATION_ROTATE_90:
+                                shouldRotate = true;
+                                break;
+                            default:
+                                shouldRotate = false;
+                                break;
+                        }
+
+                        bmp = BitmapFactory.decodeFile(tmpFilename);
+                        width = bmp.getWidth();
+                        height = bmp.getHeight();
+
+                    } catch (IOException e) {
+                        Log.e(LOG_TAG, "Err when saving bitmap...");
+                        e.printStackTrace();
+                        return;
+                    }
+                }
+
+
+                if (width > mMaxTextureSize || height > mMaxTextureSize) {
+                    float scaling = Math.max(width / (float) mMaxTextureSize, height / (float) mMaxTextureSize);
+                    Log.i(LOG_TAG, String.format("目标尺寸(%d x %d)超过当前设备OpenGL 能够处理的最大范围(%d x %d), 现在将图片压缩至合理大小!", width, height, mMaxTextureSize, mMaxTextureSize));
+
+                    bmp = Bitmap.createScaledBitmap(bmp, (int) (width / scaling), (int) (height / scaling), false);
+
+                    width = bmp.getWidth();
+                    height = bmp.getHeight();
+                }
+
+                Bitmap bmp2;
+
+                if (shouldRotate) {
+                    bmp2 = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888);
+
+                    Canvas canvas = new Canvas(bmp2);
+
+                    if (cameraInstance().getFacing() == Camera.CameraInfo.CAMERA_FACING_BACK) {
+                        Matrix mat = new Matrix();
+                        int halfLen = Math.min(width, height) / 2;
+                        mat.setRotate(90, halfLen, halfLen);
+                        canvas.drawBitmap(bmp, mat, null);
+                    } else {
+                        Matrix mat = new Matrix();
+
+                        if (isFrontMirror) {
+                            mat.postTranslate(-width / 2, -height / 2);
+                            mat.postScale(-1.0f, 1.0f);
+                            mat.postTranslate(width / 2, height / 2);
+                            int halfLen = Math.min(width, height) / 2;
+                            mat.postRotate(90, halfLen, halfLen);
+                        } else {
+                            int halfLen = Math.max(width, height) / 2;
+                            mat.postRotate(-90, halfLen, halfLen);
+                        }
+
+                        canvas.drawBitmap(bmp, mat, null);
+                    }
+
+                    bmp.recycle();
+                } else {
+                    if (cameraInstance().getFacing() == Camera.CameraInfo.CAMERA_FACING_BACK) {
+                        bmp2 = bmp;
+                    } else {
+
+                        bmp2 = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+                        Canvas canvas = new Canvas(bmp2);
+                        Matrix mat = new Matrix();
+                        if (isFrontMirror) {
+                            mat.postTranslate(-width / 2, -height / 2);
+                            mat.postScale(1.0f, -1.0f);
+                            mat.postTranslate(width / 2, height / 2);
+                        } else {
+                            mat.postTranslate(-width / 2, -height / 2);
+                            mat.postScale(-1.0f, -1.0f);
+                            mat.postTranslate(width / 2, height / 2);
+                        }
+
+                        canvas.drawBitmap(bmp, mat, null);
+                    }
+
+                }
+
+                if (config != null) {
+                    CGENativeLibrary.filterImage_MultipleEffectsWriteBack(bmp2, config, intensity);
+                }
+
+                photoCallback.takePictureOK(bmp2);
+
+                cameraInstance().getCameraDevice().startPreview();
+            }
+        });
+    }
+}

+ 255 - 0
media/cge_library/src/main/java/org/wysaid/view/CameraRecordGLSurfaceView.java

@@ -0,0 +1,255 @@
+package org.wysaid.view;
+
+/**
+ * Created by wangyang on 15/7/27.
+ */
+
+
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.AudioRecord;
+import android.media.MediaRecorder;
+import android.util.AttributeSet;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.ShortBuffer;
+
+/**
+ * Created by wangyang on 15/7/17.
+ */
+public class CameraRecordGLSurfaceView extends CameraGLSurfaceViewWithTexture {
+
+    public CameraRecordGLSurfaceView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+    }
+
+    private boolean mShouldRecord = false;
+
+    public synchronized boolean isRecording() {
+        return mShouldRecord;
+    }
+
+    private final Object mRecordStateLock = new Object();
+
+    private AudioRecordRunnable mAudioRecordRunnable;
+    private Thread mAudioThread;
+
+    public interface StartRecordingCallback {
+        void startRecordingOver(boolean success);
+    }
+
+    public void startRecording(final String filename) {
+        startRecording(filename, null);
+    }
+
+    public void startRecording(final String filename, final StartRecordingCallback recordingCallback) {
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                if (mFrameRecorder == null) {
+                    Log.e(LOG_TAG, "Error: startRecording after release!!");
+                    if (recordingCallback != null) {
+                        recordingCallback.startRecordingOver(false);
+                    }
+                    return;
+                }
+
+                if (!mFrameRecorder.startRecording(30, filename)) {
+                    Log.e(LOG_TAG, "start recording failed!");
+                    if (recordingCallback != null)
+                        recordingCallback.startRecordingOver(false);
+                    return;
+                }
+                Log.i(LOG_TAG, "glSurfaceView recording, file: " + filename);
+                synchronized (mRecordStateLock) {
+                    mShouldRecord = true;
+                    mAudioRecordRunnable = new AudioRecordRunnable(recordingCallback);
+                    if (mAudioRecordRunnable.audioRecord != null) {
+                        mAudioThread = new Thread(mAudioRecordRunnable);
+                        mAudioThread.start();
+                    }
+                }
+            }
+        });
+    }
+
+    public interface EndRecordingCallback {
+        void endRecordingOK();
+    }
+
+    public void endRecording() {
+        endRecording(null, true);
+    }
+
+    public void endRecording(final EndRecordingCallback callback) {
+        endRecording(callback, true);
+    }
+
+    // The video may be invalid if "shouldSave" is false;
+    public void endRecording(final EndRecordingCallback callback, final boolean shouldSave) {
+        Log.i(LOG_TAG, "notify quit...");
+        synchronized (mRecordStateLock) {
+            mShouldRecord = false;
+        }
+
+        if (mFrameRecorder == null) {
+            Log.e(LOG_TAG, "Error: endRecording after release!!");
+            return;
+        }
+
+        joinAudioRecording();
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+                if (mFrameRecorder != null)
+                    mFrameRecorder.endRecording(shouldSave);
+                if (callback != null) {
+                    callback.endRecordingOK();
+                }
+            }
+        });
+    }
+
+    @Override
+    protected void onRelease() {
+        synchronized (mRecordStateLock) {
+            mShouldRecord = false;
+        }
+
+        joinAudioRecording();
+        super.onRelease();
+    }
+
+    @Override
+    public void stopPreview() {
+
+        synchronized (mRecordStateLock) {
+            if (mShouldRecord) {
+                Log.e(LOG_TAG, "The camera is recording! cannot stop!");
+                return;
+            }
+        }
+
+        super.stopPreview();
+    }
+
+    public void joinAudioRecording() {
+
+        if (mAudioThread != null) {
+            try {
+                mAudioThread.join();
+                mAudioThread = null;
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    class AudioRecordRunnable implements Runnable {
+
+        int bufferSize;
+        //        short[] audioData;
+        int bufferReadResult;
+        public AudioRecord audioRecord;
+        public volatile boolean isInitialized;
+        private static final int sampleRate = 44100;
+        ByteBuffer audioBufferRef;
+        ShortBuffer audioBuffer;
+        StartRecordingCallback recordingCallback;
+
+        private AudioRecordRunnable(StartRecordingCallback callback) {
+            recordingCallback = callback;
+            try {
+                bufferSize = AudioRecord.getMinBufferSize(sampleRate,
+                        AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
+                Log.i(LOG_TAG, "audio min buffer size: " + bufferSize);
+                audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate,
+                        AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
+//                audioData = new short[bufferSize];
+                audioBufferRef = ByteBuffer.allocateDirect(bufferSize * 2).order(ByteOrder.nativeOrder());
+                audioBuffer = audioBufferRef.asShortBuffer();
+            } catch (Exception e) {
+                if (audioRecord != null) {
+                    audioRecord.release();
+                    audioRecord = null;
+                }
+            }
+
+            if (audioRecord == null && recordingCallback != null) {
+                recordingCallback.startRecordingOver(false);
+                recordingCallback = null;
+            }
+        }
+
+        public void run() {
+            android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
+            this.isInitialized = false;
+
+            if (this.audioRecord == null) {
+                recordingCallback.startRecordingOver(false);
+                recordingCallback = null;
+                return;
+            }
+
+            //判断音频录制是否被初始化
+            while (this.audioRecord.getState() == 0) {
+                try {
+                    Thread.sleep(100L);
+                } catch (InterruptedException localInterruptedException) {
+                    localInterruptedException.printStackTrace();
+                }
+            }
+            this.isInitialized = true;
+
+            try {
+                this.audioRecord.startRecording();
+            } catch (Exception e) {
+                if (recordingCallback != null) {
+                    recordingCallback.startRecordingOver(false);
+                    recordingCallback = null;
+                }
+                return;
+            }
+
+            if (this.audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
+                if (recordingCallback != null) {
+                    recordingCallback.startRecordingOver(false);
+                    recordingCallback = null;
+                }
+                return;
+            }
+
+            if (recordingCallback != null) {
+                recordingCallback.startRecordingOver(true);
+                recordingCallback = null;
+            }
+
+
+            while (true) {
+                synchronized (mRecordStateLock) {
+                    if (!mShouldRecord) //&& mFrameRecorder.getVideoStreamtime() <= mFrameRecorder.getAudioStreamtime()
+                        break;
+                }
+
+                audioBufferRef.position(0);
+                bufferReadResult = this.audioRecord.read(audioBufferRef, bufferSize * 2);
+                if (mShouldRecord && bufferReadResult > 0 && mFrameRecorder != null &&
+                        mFrameRecorder.getTimestamp() > mFrameRecorder.getAudioStreamtime()) {
+//                    Log.e(LOG_TAG, "buffer Result: " + bufferReadResult);
+                    audioBuffer.position(0);
+//                    audioBuffer.put(audioData).position(0);
+                    mFrameRecorder.recordAudioFrame(audioBuffer, bufferReadResult / 2);
+                }
+            }
+            this.audioRecord.stop();
+            this.audioRecord.release();
+            Log.i(LOG_TAG, "Audio thread end!");
+        }
+
+    }
+}

+ 411 - 0
media/cge_library/src/main/java/org/wysaid/view/ImageGLSurfaceView.java

@@ -0,0 +1,411 @@
+package org.wysaid.view;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.PixelFormat;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.opengl.GLSurfaceView.Renderer;
+import android.util.AttributeSet;
+import android.util.Log;
+
+import org.wysaid.common.Common;
+import org.wysaid.nativePort.CGEImageHandler;
+import org.wysaid.texUtils.TextureRenderer;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * Created by wysaid on 15/12/23.
+ * Mail: admin@wysaid.org
+ * blog: wysaid.org
+ */
+public class ImageGLSurfaceView extends GLSurfaceView implements Renderer {
+
+    public static final String LOG_TAG = Common.LOG_TAG;
+
+    public enum DisplayMode {
+        DISPLAY_SCALE_TO_FILL,
+        DISPLAY_ASPECT_FILL,
+        DISPLAY_ASPECT_FIT,
+    }
+
+    protected CGEImageHandler mImageHandler;
+    protected float mFilterIntensity = 1.0f;
+
+    public CGEImageHandler getImageHandler() {
+        return mImageHandler;
+    }
+
+    protected TextureRenderer.Viewport mRenderViewport = new TextureRenderer.Viewport();
+
+    public TextureRenderer.Viewport getRenderViewport() {
+        return mRenderViewport;
+    }
+
+    protected int mImageWidth;
+    protected int mImageHeight;
+    protected int mViewWidth;
+    protected int mViewHeight;
+
+    public int getImageWidth() {
+        return mImageWidth;
+    }
+
+    public int getImageheight() {
+        return mImageHeight;
+    }
+
+    protected DisplayMode mDisplayMode = DisplayMode.DISPLAY_SCALE_TO_FILL;
+
+    public DisplayMode getDisplayMode() {
+        return mDisplayMode;
+    }
+
+    public void setDisplayMode(DisplayMode displayMode) {
+        mDisplayMode = displayMode;
+        calcViewport();
+        requestRender();
+    }
+
+    public void setFilterWithConfig(final String config) {
+
+        if (mImageHandler == null)
+            return;
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+                if (mImageHandler == null) {
+                    Log.e(LOG_TAG, "set config after release!!");
+                    return;
+                }
+
+                mImageHandler.setFilterWithConfig(config);
+                requestRender();
+            }
+        });
+    }
+
+    protected final Object mSettingIntensityLock = new Object();
+    protected int mSettingIntensityCount = 1;
+
+    public void setFilterIntensityForIndex(final float intensity, final int index) {
+        setFilterIntensityForIndex(intensity, index, true);
+    }
+
+    //See: CGEImageHandler.setFilterIntensityAtIndex
+    public void setFilterIntensityForIndex(final float intensity, final int index, final  boolean shouldProcess) {
+        if (mImageHandler == null)
+            return;
+
+        mFilterIntensity = intensity;
+
+        synchronized (mSettingIntensityLock) {
+
+            if (mSettingIntensityCount <= 0) {
+                Log.i(LOG_TAG, "Too fast, skipping...");
+                return;
+            }
+            --mSettingIntensityCount;
+        }
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                if (mImageHandler == null) {
+                    Log.e(LOG_TAG, "set intensity after release!!");
+                } else {
+                    mImageHandler.setFilterIntensityAtIndex(mFilterIntensity, index, shouldProcess);
+                    if(shouldProcess) {
+                        requestRender();
+                    }
+                }
+
+                synchronized (mSettingIntensityLock) {
+                    ++mSettingIntensityCount;
+                }
+            }
+        });
+    }
+
+    public void setFilterIntensity(final float intensity) {
+        if (mImageHandler == null)
+            return;
+
+        mFilterIntensity = intensity;
+
+        synchronized (mSettingIntensityLock) {
+
+            if (mSettingIntensityCount <= 0) {
+                Log.i(LOG_TAG, "Too fast, skipping...");
+                return;
+            }
+            --mSettingIntensityCount;
+        }
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                if (mImageHandler == null) {
+                    Log.e(LOG_TAG, "set intensity after release!!");
+                } else {
+                    mImageHandler.setFilterIntensity(mFilterIntensity, true);
+                    requestRender();
+                }
+
+                synchronized (mSettingIntensityLock) {
+                    ++mSettingIntensityCount;
+                }
+            }
+        });
+    }
+
+    public void flush(final boolean runFilter, final Runnable runnable) {
+        if (mImageHandler == null || runnable == null)
+            return;
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+                if (mImageHandler == null) {
+                    Log.e(LOG_TAG, "flush after release!!");
+                } else {
+                    runnable.run();
+                    if(runFilter) {
+                        mImageHandler.revertImage();
+                        mImageHandler.processFilters();
+                    }
+                    requestRender();
+                }
+            }
+        });
+    }
+
+    // The runnable may be skipped when busy.
+    public void lazyFlush(final boolean runFilter, final Runnable runnable) {
+        if (mImageHandler == null || runnable == null)
+            return;
+
+        synchronized (mSettingIntensityLock) {
+
+            if (mSettingIntensityCount <= 0) {
+                Log.i(LOG_TAG, "Too fast, skipping...");
+                return;
+            }
+            --mSettingIntensityCount;
+        }
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                if (mImageHandler == null) {
+                    Log.e(LOG_TAG, "flush after release!!");
+                } else {
+                    if(runFilter) {
+                        mImageHandler.revertImage();
+                        mImageHandler.processFilters();
+                    }
+                    runnable.run();
+                    requestRender();
+                }
+
+                synchronized (mSettingIntensityLock) {
+                    ++mSettingIntensityCount;
+                }
+            }
+        });
+    }
+
+    public void setImageBitmap(final Bitmap bmp) {
+
+        if (bmp == null)
+            return;
+
+        if (mImageHandler == null) {
+            Log.e(LOG_TAG, "Handler not initialized!");
+            return;
+        }
+
+        mImageWidth = bmp.getWidth();
+        mImageHeight = bmp.getHeight();
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                if (mImageHandler == null) {
+                    Log.e(LOG_TAG, "set image after release!!");
+                    return;
+                }
+
+                if (mImageHandler.initWithBitmap(bmp)) {
+
+                    calcViewport();
+                    requestRender();
+
+                } else {
+                    Log.e(LOG_TAG, "setImageBitmap: init handler failed!");
+                }
+            }
+        });
+    }
+
+    public interface QueryResultBitmapCallback {
+        void get(Bitmap bmp);
+    }
+
+    public void getResultBitmap(final QueryResultBitmapCallback callback) {
+
+        if (callback == null)
+            return;
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                Bitmap bmp = mImageHandler.getResultBitmap();
+                callback.get(bmp);
+            }
+        });
+    }
+
+
+    public ImageGLSurfaceView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+
+        setEGLContextClientVersion(2);
+        setEGLConfigChooser(8, 8, 8, 8, 8, 0);
+        getHolder().setFormat(PixelFormat.RGBA_8888);
+        setRenderer(this);
+        setRenderMode(RENDERMODE_WHEN_DIRTY);
+//        setZOrderMediaOverlay(true);
+
+        Log.i(LOG_TAG, "ImageGLSurfaceView Construct...");
+    }
+
+    public interface OnSurfaceCreatedCallback {
+        void surfaceCreated();
+    }
+
+    protected OnSurfaceCreatedCallback mSurfaceCreatedCallback;
+
+    public void setSurfaceCreatedCallback(OnSurfaceCreatedCallback callback) {
+        mSurfaceCreatedCallback = callback;
+    }
+
+    @Override
+    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+        Log.i(LOG_TAG, "ImageGLSurfaceView onSurfaceCreated...");
+
+        GLES20.glDisable(GLES20.GL_DEPTH_TEST);
+        GLES20.glDisable(GLES20.GL_STENCIL_TEST);
+
+        mImageHandler = new CGEImageHandler();
+
+        mImageHandler.setDrawerFlipScale(1.0f, -1.0f);
+
+        if (mSurfaceCreatedCallback != null) {
+            mSurfaceCreatedCallback.surfaceCreated();
+        }
+    }
+
+    @Override
+    public void onSurfaceChanged(GL10 gl, int width, int height) {
+        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+        mViewWidth = width;
+        mViewHeight = height;
+        calcViewport();
+    }
+
+    @Override
+    public void onDrawFrame(GL10 gl) {
+
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+
+        if (mImageHandler == null)
+            return;
+
+        GLES20.glViewport(mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height);
+        mImageHandler.drawResult();
+    }
+
+    public void release() {
+
+        if (mImageHandler != null) {
+            queueEvent(new Runnable() {
+                @Override
+                public void run() {
+                    Log.i(LOG_TAG, "ImageGLSurfaceView release...");
+
+                    if (mImageHandler != null) {
+                        mImageHandler.release();
+                        mImageHandler = null;
+                    }
+                }
+            });
+        }
+    }
+
+    protected void calcViewport() {
+
+        if (mDisplayMode == DisplayMode.DISPLAY_SCALE_TO_FILL) {
+            mRenderViewport.x = 0;
+            mRenderViewport.y = 0;
+            mRenderViewport.width = mViewWidth;
+            mRenderViewport.height = mViewHeight;
+            return;
+        }
+
+        float scaling;
+
+        scaling = mImageWidth / (float) mImageHeight;
+
+        float viewRatio = mViewWidth / (float) mViewHeight;
+        float s = scaling / viewRatio;
+
+        int w, h;
+
+        switch (mDisplayMode) {
+            case DISPLAY_ASPECT_FILL: {
+                //AspectFill
+                if (s > 1.0) {
+                    w = (int) (mViewHeight * scaling);
+                    h = mViewHeight;
+                } else {
+                    w = mViewWidth;
+                    h = (int) (mViewWidth / scaling);
+                }
+            }
+            break;
+            case DISPLAY_ASPECT_FIT: {
+                //AspectFit
+                if (s < 1.0) {
+                    w = (int) (mViewHeight * scaling);
+                    h = mViewHeight;
+                } else {
+                    w = mViewWidth;
+                    h = (int) (mViewWidth / scaling);
+                }
+            }
+            break;
+
+            default:
+                Log.i(LOG_TAG, "Error occured, please check the code...");
+                return;
+        }
+
+
+        mRenderViewport.width = w;
+        mRenderViewport.height = h;
+        mRenderViewport.x = (mViewWidth - w) / 2;
+        mRenderViewport.y = (mViewHeight - h) / 2;
+
+        Log.i(LOG_TAG, String.format("View port: %d, %d, %d, %d", mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height));
+    }
+}

+ 616 - 0
media/cge_library/src/main/java/org/wysaid/view/SimplePlayerGLSurfaceView.java

@@ -0,0 +1,616 @@
+package org.wysaid.view;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.graphics.PixelFormat;
+import android.graphics.SurfaceTexture;
+import android.media.MediaPlayer;
+import android.net.Uri;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.opengl.GLUtils;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.Surface;
+
+import org.wysaid.common.Common;
+import org.wysaid.texUtils.TextureRenderer;
+import org.wysaid.texUtils.TextureRendererDrawOrigin;
+import org.wysaid.texUtils.TextureRendererMask;
+
+import java.nio.IntBuffer;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * Created by wangyang on 15/8/20.
+ */
+public class SimplePlayerGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
+
+    public static final String LOG_TAG = Common.LOG_TAG;
+
+    private SurfaceTexture mSurfaceTexture;
+    private int mVideoTextureID;
+    private TextureRenderer mDrawer;
+
+    //setTextureRenderer 必须在OpenGL 线程调用!
+    public void setTextureRenderer(TextureRenderer drawer) {
+        if (mDrawer == null) {
+            Log.e(LOG_TAG, "Invalid Drawer!");
+            return;
+        }
+
+        if (mDrawer != drawer) {
+            mDrawer.release();
+            mDrawer = drawer;
+            calcViewport();
+        }
+    }
+
+    private TextureRenderer.Viewport mRenderViewport = new TextureRenderer.Viewport();
+    private float[] mTransformMatrix = new float[16];
+    private boolean mIsUsingMask = false;
+
+    public boolean isUsingMask() {
+        return mIsUsingMask;
+    }
+
+    private float mMaskAspectRatio = 1.0f;
+    private float mDrawerFlipScaleX = 1.0f;
+    private float mDrawerFlipScaleY = 1.0f;
+
+    private int mViewWidth = 1000;
+    private int mViewHeight = 1000;
+
+    public int getViewWidth() {
+        return mViewWidth;
+    }
+
+    public int getViewheight() {
+        return mViewHeight;
+    }
+
+    private int mVideoWidth = 1000;
+    private int mVideoHeight = 1000;
+
+    private boolean mFitFullView = false;
+
+    public void setFitFullView(boolean fit) {
+        mFitFullView = fit;
+        if (mDrawer != null)
+            calcViewport();
+    }
+
+    private MediaPlayer mPlayer;
+
+    private Uri mVideoUri;
+
+    public interface PlayerInitializeCallback {
+
+        //对player 进行初始化设置, 设置未默认启动的listener, 比如 bufferupdateListener.
+        void initPlayer(MediaPlayer player);
+    }
+
+    public void setPlayerInitializeCallback(PlayerInitializeCallback callback) {
+        mPlayerInitCallback = callback;
+    }
+
+    PlayerInitializeCallback mPlayerInitCallback;
+
+    public interface PlayPreparedCallback {
+        void playPrepared(MediaPlayer player);
+    }
+
+    PlayPreparedCallback mPreparedCallback;
+
+    public interface PlayCompletionCallback {
+        void playComplete(MediaPlayer player);
+
+
+        /*
+
+        what 取值: MEDIA_ERROR_UNKNOWN,
+                  MEDIA_ERROR_SERVER_DIED
+
+        extra 取值 MEDIA_ERROR_IO
+                  MEDIA_ERROR_MALFORMED
+                  MEDIA_ERROR_UNSUPPORTED
+                  MEDIA_ERROR_TIMED_OUT
+
+        returning false would cause the 'playComplete' to be called
+        */
+        boolean playFailed(MediaPlayer mp, int what, int extra);
+    }
+
+    PlayCompletionCallback mPlayCompletionCallback;
+
+    public synchronized void setVideoUri(final Uri uri, final PlayPreparedCallback preparedCallback, final PlayCompletionCallback completionCallback) {
+
+        mVideoUri = uri;
+        mPreparedCallback = preparedCallback;
+        mPlayCompletionCallback = completionCallback;
+
+        if (mDrawer != null) {
+
+            queueEvent(new Runnable() {
+                @Override
+                public void run() {
+                    Log.i(LOG_TAG, "setVideoUri...");
+
+                    if (mSurfaceTexture == null || mVideoTextureID == 0) {
+                        mVideoTextureID = Common.genSurfaceTextureID();
+                        mSurfaceTexture = new SurfaceTexture(mVideoTextureID);
+                        mSurfaceTexture.setOnFrameAvailableListener(SimplePlayerGLSurfaceView.this);
+                    }
+                    _useUri();
+                }
+            });
+        }
+    }
+
+    //根据传入bmp回调不同
+    //若设置之后使用mask, 则调用 setMaskOK
+    //否则调用 unsetMaskOK
+    public interface SetMaskBitmapCallback {
+        void setMaskOK(TextureRendererMask renderer);
+
+        void unsetMaskOK(TextureRenderer renderer);
+    }
+
+    public void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle) {
+        setMaskBitmap(bmp, shouldRecycle, null);
+    }
+
+    public synchronized void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle, final SetMaskBitmapCallback callback) {
+
+        if (mDrawer == null) {
+            Log.e(LOG_TAG, "setMaskBitmap after release!");
+            return;
+        }
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                if (bmp == null) {
+                    Log.i(LOG_TAG, "Cancel Mask Bitmap!");
+
+                    setMaskTexture(0, 1.0f);
+
+                    if (callback != null) {
+                        callback.unsetMaskOK(mDrawer);
+                    }
+
+                    return;
+                }
+
+                Log.i(LOG_TAG, "Use Mask Bitmap!");
+
+                int texID[] = {0};
+                GLES20.glGenTextures(1, texID, 0);
+                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texID[0]);
+                GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
+                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
+                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
+                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+
+                setMaskTexture(texID[0], bmp.getWidth() / (float) bmp.getHeight());
+
+                if (callback != null && mDrawer instanceof TextureRendererMask) {
+                    callback.setMaskOK((TextureRendererMask) mDrawer);
+                }
+                if (shouldRecycle)
+                    bmp.recycle();
+
+            }
+        });
+    }
+
+    public synchronized void setMaskTexture(int texID, float aspectRatio) {
+        Log.i(LOG_TAG, "setMaskTexture... ");
+
+        if (texID == 0) {
+            if (mDrawer instanceof TextureRendererMask) {
+                mDrawer.release();
+                mDrawer = TextureRendererDrawOrigin.create(true);
+            }
+            mIsUsingMask = false;
+        } else {
+            if (!(mDrawer instanceof TextureRendererMask)) {
+                mDrawer.release();
+                TextureRendererMask drawer = TextureRendererMask.create(true);
+                assert drawer != null : "Drawer Create Failed!";
+                drawer.setMaskTexture(texID);
+                mDrawer = drawer;
+            }
+            mIsUsingMask = true;
+        }
+
+        mMaskAspectRatio = aspectRatio;
+        calcViewport();
+    }
+
+    public synchronized MediaPlayer getPlayer() {
+        if (mPlayer == null) {
+            Log.e(LOG_TAG, "Player is not initialized!");
+        }
+        return mPlayer;
+    }
+
+    public interface OnCreateCallback {
+        void createOK();
+    }
+
+    private OnCreateCallback mOnCreateCallback;
+
+    //定制一些初始化操作
+    public void setOnCreateCallback(final OnCreateCallback callback) {
+
+        assert callback != null : "无意义操作!";
+
+        if (mDrawer == null) {
+            mOnCreateCallback = callback;
+        } else {
+            // 已经创建完毕, 直接执行
+            queueEvent(new Runnable() {
+                @Override
+                public void run() {
+                    callback.createOK();
+                }
+            });
+        }
+    }
+
+    public SimplePlayerGLSurfaceView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+
+        Log.i(LOG_TAG, "MyGLSurfaceView Construct...");
+
+        setEGLContextClientVersion(2);
+        setEGLConfigChooser(8, 8, 8, 8, 8, 0);
+        getHolder().setFormat(PixelFormat.RGBA_8888);
+        setRenderer(this);
+        setRenderMode(RENDERMODE_WHEN_DIRTY);
+        setZOrderOnTop(true);
+
+        Log.i(LOG_TAG, "MyGLSurfaceView Construct OK...");
+    }
+
+    @Override
+    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+
+        Log.i(LOG_TAG, "video player onSurfaceCreated...");
+
+        GLES20.glDisable(GLES20.GL_DEPTH_TEST);
+        GLES20.glDisable(GLES20.GL_STENCIL_TEST);
+
+        mDrawer = TextureRendererDrawOrigin.create(true);
+        if (mDrawer == null) {
+            Log.e(LOG_TAG, "Create Drawer Failed!");
+            return;
+        }
+        if (mOnCreateCallback != null) {
+            mOnCreateCallback.createOK();
+        }
+
+        if (mVideoUri != null && (mSurfaceTexture == null || mVideoTextureID == 0)) {
+            mVideoTextureID = Common.genSurfaceTextureID();
+            mSurfaceTexture = new SurfaceTexture(mVideoTextureID);
+            mSurfaceTexture.setOnFrameAvailableListener(SimplePlayerGLSurfaceView.this);
+            _useUri();
+        }
+    }
+
+    @Override
+    public void onSurfaceChanged(GL10 gl, int width, int height) {
+        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+
+        mViewWidth = width;
+        mViewHeight = height;
+
+        calcViewport();
+    }
+
+    //must be in the OpenGL thread!
+    public void release() {
+
+        Log.i(LOG_TAG, "Video player view release...");
+
+        if (mPlayer != null) {
+            queueEvent(new Runnable() {
+                @Override
+                public void run() {
+
+                    Log.i(LOG_TAG, "Video player view release run...");
+
+                    if (mPlayer != null) {
+
+                        mPlayer.setSurface(null);
+                        if (mPlayer.isPlaying())
+                            mPlayer.stop();
+                        mPlayer.release();
+                        mPlayer = null;
+                    }
+
+                    if (mDrawer != null) {
+                        mDrawer.release();
+                        mDrawer = null;
+                    }
+
+                    if (mSurfaceTexture != null) {
+                        mSurfaceTexture.release();
+                        mSurfaceTexture = null;
+                    }
+
+                    if (mVideoTextureID != 0) {
+                        GLES20.glDeleteTextures(1, new int[]{mVideoTextureID}, 0);
+                        mVideoTextureID = 0;
+                    }
+
+                    mIsUsingMask = false;
+                    mPreparedCallback = null;
+                    mPlayCompletionCallback = null;
+
+                    Log.i(LOG_TAG, "Video player view release OK");
+                }
+            });
+        }
+    }
+
+    @Override
+    public void onPause() {
+        Log.i(LOG_TAG, "surfaceview onPause ...");
+
+        super.onPause();
+    }
+
+    @Override
+    public void onDrawFrame(GL10 gl) {
+
+        if (mSurfaceTexture == null) {
+            return;
+        }
+
+        mSurfaceTexture.updateTexImage();
+
+        if (!mPlayer.isPlaying()) {
+            return;
+        }
+
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+        GLES20.glViewport(0, 0, mViewWidth, mViewHeight);
+
+        mSurfaceTexture.getTransformMatrix(mTransformMatrix);
+        mDrawer.setTransform(mTransformMatrix);
+
+        mDrawer.renderTexture(mVideoTextureID, mRenderViewport);
+
+    }
+
+    private long mTimeCount2 = 0;
+    private long mFramesCount2 = 0;
+    private long mLastTimestamp2 = 0;
+
+    @Override
+    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+        requestRender();
+
+        if (mLastTimestamp2 == 0)
+            mLastTimestamp2 = System.currentTimeMillis();
+
+        long currentTimestamp = System.currentTimeMillis();
+
+        ++mFramesCount2;
+        mTimeCount2 += currentTimestamp - mLastTimestamp2;
+        mLastTimestamp2 = currentTimestamp;
+        if (mTimeCount2 >= 1e3) {
+            Log.i(LOG_TAG, String.format("播放帧率: %d", mFramesCount2));
+            mTimeCount2 -= 1e3;
+            mFramesCount2 = 0;
+        }
+    }
+
+    private void calcViewport() {
+        float scaling;
+
+        if (mIsUsingMask) {
+            flushMaskAspectRatio();
+            scaling = mMaskAspectRatio;
+        } else {
+            mDrawer.setFlipscale(mDrawerFlipScaleX, mDrawerFlipScaleY);
+            scaling = mVideoWidth / (float) mVideoHeight;
+        }
+
+        float viewRatio = mViewWidth / (float) mViewHeight;
+        float s = scaling / viewRatio;
+
+        int w, h;
+
+        if (mFitFullView) {
+            //撑满全部view(内容大于view)
+            if (s > 1.0) {
+                w = (int) (mViewHeight * scaling);
+                h = mViewHeight;
+            } else {
+                w = mViewWidth;
+                h = (int) (mViewWidth / scaling);
+            }
+        } else {
+            //显示全部内容(内容小于view)
+            if (s > 1.0) {
+                w = mViewWidth;
+                h = (int) (mViewWidth / scaling);
+            } else {
+                h = mViewHeight;
+                w = (int) (mViewHeight * scaling);
+            }
+        }
+
+        mRenderViewport.width = w;
+        mRenderViewport.height = h;
+        mRenderViewport.x = (mViewWidth - mRenderViewport.width) / 2;
+        mRenderViewport.y = (mViewHeight - mRenderViewport.height) / 2;
+        Log.i(LOG_TAG, String.format("View port: %d, %d, %d, %d", mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height));
+    }
+
+    private void _useUri() {
+
+        if (mPlayer != null) {
+
+            mPlayer.stop();
+            mPlayer.reset();
+
+        } else {
+            mPlayer = new MediaPlayer();
+        }
+
+        try {
+            mPlayer.setDataSource(getContext(), mVideoUri);
+
+//            mPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+
+            mPlayer.setSurface(new Surface(mSurfaceTexture));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Log.e(LOG_TAG, "useUri failed");
+
+            if (mPlayCompletionCallback != null) {
+                this.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        if (mPlayCompletionCallback != null) {
+                            if (!mPlayCompletionCallback.playFailed(mPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, MediaPlayer.MEDIA_ERROR_UNSUPPORTED))
+                                mPlayCompletionCallback.playComplete(mPlayer);
+                        }
+                    }
+                });
+            }
+            return;
+        }
+
+        if (mPlayerInitCallback != null) {
+            mPlayerInitCallback.initPlayer(mPlayer);
+        }
+
+        mPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
+            @Override
+            public void onCompletion(MediaPlayer mp) {
+                if (mPlayCompletionCallback != null) {
+                    mPlayCompletionCallback.playComplete(mPlayer);
+                }
+                Log.i(LOG_TAG, "Video Play Over");
+            }
+        });
+
+        mPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
+            @Override
+            public void onPrepared(MediaPlayer mp) {
+                mVideoWidth = mp.getVideoWidth();
+                mVideoHeight = mp.getVideoHeight();
+
+                queueEvent(new Runnable() {
+                    @Override
+                    public void run() {
+                        calcViewport();
+                    }
+                });
+
+                if (mPreparedCallback != null) {
+                    mPreparedCallback.playPrepared(mPlayer);
+                } else {
+                    mp.start();
+                }
+//                requestRender(); //可能导致第一帧过快渲染 (先于surface texture 准备完成
+                Log.i(LOG_TAG, String.format("Video resolution 1: %d x %d", mVideoWidth, mVideoHeight));
+            }
+        });
+
+        mPlayer.setOnErrorListener(new MediaPlayer.OnErrorListener() {
+            @Override
+            public boolean onError(MediaPlayer mp, int what, int extra) {
+
+                if (mPlayCompletionCallback != null)
+                    return mPlayCompletionCallback.playFailed(mp, what, extra);
+                return false;
+            }
+        });
+
+        try {
+            mPlayer.prepareAsync();
+        } catch (Exception e) {
+            Log.i(LOG_TAG, String.format("Error handled: %s, play failure handler would be called!", e.toString()));
+            if (mPlayCompletionCallback != null) {
+                this.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        if (mPlayCompletionCallback != null) {
+                            if (!mPlayCompletionCallback.playFailed(mPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, MediaPlayer.MEDIA_ERROR_UNSUPPORTED))
+                                mPlayCompletionCallback.playComplete(mPlayer);
+                        }
+                    }
+                });
+            }
+        }
+
+    }
+
+    private void flushMaskAspectRatio() {
+
+        float dstRatio = mVideoWidth / (float) mVideoHeight;
+
+        float s = dstRatio / mMaskAspectRatio;
+
+        if (s > 1.0f) {
+            mDrawer.setFlipscale(mDrawerFlipScaleX / s, mDrawerFlipScaleY);
+        } else {
+            mDrawer.setFlipscale(mDrawerFlipScaleX, s * mDrawerFlipScaleY);
+        }
+    }
+
+    public interface TakeShotCallback {
+        //You can recycle the bmp.
+        void takeShotOK(Bitmap bmp);
+    }
+
+    public synchronized void takeShot(final TakeShotCallback callback) {
+        assert callback != null : "callback must not be null!";
+
+        if (mDrawer == null) {
+            Log.e(LOG_TAG, "Drawer not initialized!");
+            callback.takeShotOK(null);
+            return;
+        }
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                IntBuffer buffer = IntBuffer.allocate(mRenderViewport.width * mRenderViewport.height);
+
+                GLES20.glReadPixels(mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
+                Bitmap bmp = Bitmap.createBitmap(mRenderViewport.width, mRenderViewport.height, Bitmap.Config.ARGB_8888);
+                bmp.copyPixelsFromBuffer(buffer);
+
+                Bitmap bmp2 = Bitmap.createBitmap(mRenderViewport.width, mRenderViewport.height, Bitmap.Config.ARGB_8888);
+
+                Canvas canvas = new Canvas(bmp2);
+                Matrix mat = new Matrix();
+                mat.setTranslate(0.0f, -mRenderViewport.height / 2.0f);
+                mat.postScale(1.0f, -1.0f);
+                mat.postTranslate(0.0f, mRenderViewport.height / 2.0f);
+
+                canvas.drawBitmap(bmp, mat, null);
+                bmp.recycle();
+
+                callback.takeShotOK(bmp2);
+            }
+        });
+
+    }
+}

+ 88 - 0
media/cge_library/src/main/java/org/wysaid/view/TrackingCameraGLSurfaceView.java

@@ -0,0 +1,88 @@
+package org.wysaid.view;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.util.AttributeSet;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * @Author: wangyang
+ * @Mail: admin@wysaid.org
+ * @Date: 2017/10/29
+ * @Description:
+ */
+
+// A simple case for extra tracking.
+
+public class TrackingCameraGLSurfaceView extends CameraGLSurfaceViewWithBuffer {
+
+    public TrackingCameraGLSurfaceView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+    }
+
+    public interface TrackingProc {
+        boolean setup(int width, int height);
+        void resize(int width, int height);
+        void processTracking(ByteBuffer luminanceBuffer);
+        void render(TrackingCameraGLSurfaceView glView);
+        void release();
+    }
+
+    protected TrackingProc mTrackingProc;
+
+    public TrackingProc getTrackingProc() {
+        return mTrackingProc;
+    }
+
+    //must be called in the gl thread.
+    public boolean setTrackingProc(final TrackingProc proc) {
+        if (mTrackingProc != null) {
+            mTrackingProc.release();
+            mTrackingProc = null;
+        }
+
+        if (proc == null)
+            return true;
+
+        if(!proc.setup(mRecordWidth, mRecordHeight)) {
+            Log.e(LOG_TAG, "setup proc failed!");
+            proc.release();
+            return false;
+        }
+        mTrackingProc = proc;
+        return true;
+    }
+
+    @Override
+    protected void onRelease() {
+        super.onRelease();
+        if (mTrackingProc != null) {
+            mTrackingProc.release();
+            mTrackingProc = null;
+        }
+    }
+
+    @Override
+    public void onDrawFrame(GL10 gl) {
+
+        if(mSurfaceTexture == null || !cameraInstance().isPreviewing()) {
+            return;
+        }
+
+        if(mBufferUpdated && mTrackingProc != null) {
+            synchronized (mBufferUpdateLock) {
+                mTrackingProc.processTracking(mBufferY);
+            }
+        }
+
+        if(mTrackingProc == null) {
+            super.onDrawFrame(gl);
+        } else {
+            mTrackingProc.render(this);
+        }
+    }
+}

+ 582 - 0
media/cge_library/src/main/java/org/wysaid/view/VideoPlayerGLSurfaceView.java

@@ -0,0 +1,582 @@
+package org.wysaid.view;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.graphics.PixelFormat;
+import android.graphics.SurfaceTexture;
+import android.media.MediaPlayer;
+import android.net.Uri;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.Surface;
+
+import org.wysaid.common.Common;
+import org.wysaid.nativePort.CGEFrameRenderer;
+import org.wysaid.texUtils.TextureRenderer;
+
+import java.nio.IntBuffer;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * Created by wangyang on 15/11/26.
+ */
+
+public class VideoPlayerGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
+
+    public static final String LOG_TAG = Common.LOG_TAG;
+
+    private SurfaceTexture mSurfaceTexture;
+    private int mVideoTextureID;
+    private CGEFrameRenderer mFrameRenderer;
+
+
+    private TextureRenderer.Viewport mRenderViewport = new TextureRenderer.Viewport();
+    private float[] mTransformMatrix = new float[16];
+    private boolean mIsUsingMask = false;
+
+    public boolean isUsingMask() {
+        return mIsUsingMask;
+    }
+
+    private float mMaskAspectRatio = 1.0f;
+
+    private int mViewWidth = 1000;
+    private int mViewHeight = 1000;
+
+    public int getViewWidth() {
+        return mViewWidth;
+    }
+
+    public int getViewheight() {
+        return mViewHeight;
+    }
+
+    private int mVideoWidth = 1000;
+    private int mVideoHeight = 1000;
+
+    private boolean mFitFullView = false;
+
+    public void setFitFullView(boolean fit) {
+        mFitFullView = fit;
+        if (mFrameRenderer != null)
+            calcViewport();
+    }
+
+    private MediaPlayer mPlayer;
+
+    private Uri mVideoUri;
+
+    public interface PlayerInitializeCallback {
+
+        //对player 进行初始化设置, 设置未默认启动的listener, 比如 bufferupdateListener.
+        void initPlayer(MediaPlayer player);
+    }
+
+    public void setPlayerInitializeCallback(PlayerInitializeCallback callback) {
+        mPlayerInitCallback = callback;
+    }
+
+    PlayerInitializeCallback mPlayerInitCallback;
+
+    public interface PlayPreparedCallback {
+        void playPrepared(MediaPlayer player);
+    }
+
+    PlayPreparedCallback mPreparedCallback;
+
+    public interface PlayCompletionCallback {
+        void playComplete(MediaPlayer player);
+
+
+        /*
+
+        what 取值: MEDIA_ERROR_UNKNOWN,
+                  MEDIA_ERROR_SERVER_DIED
+
+        extra 取值 MEDIA_ERROR_IO
+                  MEDIA_ERROR_MALFORMED
+                  MEDIA_ERROR_UNSUPPORTED
+                  MEDIA_ERROR_TIMED_OUT
+
+        returning false would cause the 'playComplete' to be called
+        */
+        boolean playFailed(MediaPlayer mp, int what, int extra);
+    }
+
+    PlayCompletionCallback mPlayCompletionCallback;
+
+    public synchronized void setVideoUri(final Uri uri, final PlayPreparedCallback preparedCallback, final PlayCompletionCallback completionCallback) {
+
+        mVideoUri = uri;
+        mPreparedCallback = preparedCallback;
+        mPlayCompletionCallback = completionCallback;
+
+        if (mFrameRenderer != null) {
+
+            queueEvent(new Runnable() {
+                @Override
+                public void run() {
+                    Log.i(LOG_TAG, "setVideoUri...");
+
+                    if (mSurfaceTexture == null || mVideoTextureID == 0) {
+                        mVideoTextureID = Common.genSurfaceTextureID();
+                        mSurfaceTexture = new SurfaceTexture(mVideoTextureID);
+                        mSurfaceTexture.setOnFrameAvailableListener(VideoPlayerGLSurfaceView.this);
+                    }
+                    _useUri();
+                }
+            });
+        }
+    }
+
+    public synchronized void setFilterWithConfig(final String config) {
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                if (mFrameRenderer != null) {
+                    mFrameRenderer.setFilterWidthConfig(config);
+                } else {
+                    Log.e(LOG_TAG, "setFilterWithConfig after release!!");
+                }
+            }
+        });
+    }
+
+    public void setFilterIntensity(final float intensity) {
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+                if (mFrameRenderer != null) {
+                    mFrameRenderer.setFilterIntensity(intensity);
+                } else {
+                    Log.e(LOG_TAG, "setFilterIntensity after release!!");
+                }
+            }
+        });
+    }
+
+    public interface SetMaskBitmapCallback {
+        void setMaskOK(CGEFrameRenderer recorder);
+    }
+
+    public void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle) {
+        setMaskBitmap(bmp, shouldRecycle, null);
+    }
+
+    //注意, 当传入的bmp为null时, SetMaskBitmapCallback 不会执行.
+    public void setMaskBitmap(final Bitmap bmp, final boolean shouldRecycle, final SetMaskBitmapCallback callback) {
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                if (mFrameRenderer == null) {
+                    Log.e(LOG_TAG, "setMaskBitmap after release!!");
+                    return;
+                }
+
+                if (bmp == null) {
+                    mFrameRenderer.setMaskTexture(0, 1.0f);
+                    mIsUsingMask = false;
+                    calcViewport();
+                    return;
+                }
+
+                int texID = Common.genNormalTextureID(bmp, GLES20.GL_NEAREST, GLES20.GL_CLAMP_TO_EDGE);
+
+                mFrameRenderer.setMaskTexture(texID, bmp.getWidth() / (float) bmp.getHeight());
+                mIsUsingMask = true;
+                mMaskAspectRatio = bmp.getWidth() / (float) bmp.getHeight();
+
+                if (callback != null) {
+                    callback.setMaskOK(mFrameRenderer);
+                }
+
+                if (shouldRecycle)
+                    bmp.recycle();
+
+                calcViewport();
+            }
+        });
+    }
+
+    public synchronized MediaPlayer getPlayer() {
+        if (mPlayer == null) {
+            Log.e(LOG_TAG, "Player is not initialized!");
+        }
+        return mPlayer;
+    }
+
+    public interface OnCreateCallback {
+        void createOK();
+    }
+
+    private OnCreateCallback mOnCreateCallback;
+
+    //定制一些初始化操作
+    public void setOnCreateCallback(final OnCreateCallback callback) {
+
+        assert callback != null : "无意义操作!";
+
+        if (mFrameRenderer == null) {
+            mOnCreateCallback = callback;
+        } else {
+            // 已经创建完毕, 直接执行
+            queueEvent(new Runnable() {
+                @Override
+                public void run() {
+                    callback.createOK();
+                }
+            });
+        }
+    }
+
+    public VideoPlayerGLSurfaceView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+
+        Log.i(LOG_TAG, "MyGLSurfaceView Construct...");
+
+        setEGLContextClientVersion(2);
+        setEGLConfigChooser(8, 8, 8, 8, 8, 0);
+        getHolder().setFormat(PixelFormat.RGBA_8888);
+        setRenderer(this);
+        setRenderMode(RENDERMODE_WHEN_DIRTY);
+        setZOrderOnTop(true);
+
+        Log.i(LOG_TAG, "MyGLSurfaceView Construct OK...");
+    }
+
+    @Override
+    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+
+        Log.i(LOG_TAG, "video player onSurfaceCreated...");
+
+        GLES20.glDisable(GLES20.GL_DEPTH_TEST);
+        GLES20.glDisable(GLES20.GL_STENCIL_TEST);
+
+        if (mOnCreateCallback != null) {
+            mOnCreateCallback.createOK();
+        }
+
+        if (mVideoUri != null && (mSurfaceTexture == null || mVideoTextureID == 0)) {
+            mVideoTextureID = Common.genSurfaceTextureID();
+            mSurfaceTexture = new SurfaceTexture(mVideoTextureID);
+            mSurfaceTexture.setOnFrameAvailableListener(VideoPlayerGLSurfaceView.this);
+            _useUri();
+        }
+    }
+
+    @Override
+    public void onSurfaceChanged(GL10 gl, int width, int height) {
+        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+
+        mViewWidth = width;
+        mViewHeight = height;
+
+        calcViewport();
+    }
+
+    //must be in the OpenGL thread!
+    public void release() {
+
+        Log.i(LOG_TAG, "Video player view release...");
+
+        if (mPlayer != null) {
+            queueEvent(new Runnable() {
+                @Override
+                public void run() {
+
+                    Log.i(LOG_TAG, "Video player view release run...");
+
+                    if (mPlayer != null) {
+
+                        mPlayer.setSurface(null);
+                        if (mPlayer.isPlaying())
+                            mPlayer.stop();
+                        mPlayer.release();
+                        mPlayer = null;
+                    }
+
+                    if (mFrameRenderer != null) {
+                        mFrameRenderer.release();
+                        mFrameRenderer = null;
+                    }
+
+                    if (mSurfaceTexture != null) {
+                        mSurfaceTexture.release();
+                        mSurfaceTexture = null;
+                    }
+
+                    if (mVideoTextureID != 0) {
+                        GLES20.glDeleteTextures(1, new int[]{mVideoTextureID}, 0);
+                        mVideoTextureID = 0;
+                    }
+
+                    mIsUsingMask = false;
+                    mPreparedCallback = null;
+                    mPlayCompletionCallback = null;
+
+                    Log.i(LOG_TAG, "Video player view release OK");
+                }
+            });
+        }
+    }
+
+    @Override
+    public void onPause() {
+        Log.i(LOG_TAG, "surfaceview onPause ...");
+
+        super.onPause();
+    }
+
+    @Override
+    public void onDrawFrame(GL10 gl) {
+
+        if (mSurfaceTexture == null || mFrameRenderer == null) {
+            return;
+        }
+
+        mSurfaceTexture.updateTexImage();
+
+        if (!mPlayer.isPlaying()) {
+            return;
+        }
+
+        mSurfaceTexture.getTransformMatrix(mTransformMatrix);
+        mFrameRenderer.update(mVideoTextureID, mTransformMatrix);
+
+        mFrameRenderer.runProc();
+
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+
+        GLES20.glEnable(GLES20.GL_BLEND);
+        mFrameRenderer.render(mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height);
+        GLES20.glDisable(GLES20.GL_BLEND);
+
+    }
+
+    private long mTimeCount2 = 0;
+    private long mFramesCount2 = 0;
+    private long mLastTimestamp2 = 0;
+
+    @Override
+    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+        requestRender();
+
+        if (mLastTimestamp2 == 0)
+            mLastTimestamp2 = System.currentTimeMillis();
+
+        long currentTimestamp = System.currentTimeMillis();
+
+        ++mFramesCount2;
+        mTimeCount2 += currentTimestamp - mLastTimestamp2;
+        mLastTimestamp2 = currentTimestamp;
+        if (mTimeCount2 >= 1e3) {
+            Log.i(LOG_TAG, String.format("播放帧率: %d", mFramesCount2));
+            mTimeCount2 -= 1e3;
+            mFramesCount2 = 0;
+        }
+    }
+
+    private void calcViewport() {
+        float scaling;
+
+        if (mIsUsingMask) {
+            scaling = mMaskAspectRatio;
+        } else {
+            scaling = mVideoWidth / (float) mVideoHeight;
+        }
+
+        float viewRatio = mViewWidth / (float) mViewHeight;
+        float s = scaling / viewRatio;
+
+        int w, h;
+
+        if (mFitFullView) {
+            //撑满全部view(内容大于view)
+            if (s > 1.0) {
+                w = (int) (mViewHeight * scaling);
+                h = mViewHeight;
+            } else {
+                w = mViewWidth;
+                h = (int) (mViewWidth / scaling);
+            }
+        } else {
+            //显示全部内容(内容小于view)
+            if (s > 1.0) {
+                w = mViewWidth;
+                h = (int) (mViewWidth / scaling);
+            } else {
+                h = mViewHeight;
+                w = (int) (mViewHeight * scaling);
+            }
+        }
+
+        mRenderViewport.width = w;
+        mRenderViewport.height = h;
+        mRenderViewport.x = (mViewWidth - mRenderViewport.width) / 2;
+        mRenderViewport.y = (mViewHeight - mRenderViewport.height) / 2;
+        Log.i(LOG_TAG, String.format("View port: %d, %d, %d, %d", mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height));
+    }
+
+    private void _useUri() {
+
+        if (mPlayer != null) {
+
+            mPlayer.stop();
+            mPlayer.reset();
+
+        } else {
+            mPlayer = new MediaPlayer();
+        }
+
+        try {
+            mPlayer.setDataSource(getContext(), mVideoUri);
+            mPlayer.setSurface(new Surface(mSurfaceTexture));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Log.e(LOG_TAG, "useUri failed");
+
+            if (mPlayCompletionCallback != null) {
+                this.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        if (mPlayCompletionCallback != null) {
+                            if (!mPlayCompletionCallback.playFailed(mPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, MediaPlayer.MEDIA_ERROR_UNSUPPORTED))
+                                mPlayCompletionCallback.playComplete(mPlayer);
+                        }
+                    }
+                });
+            }
+            return;
+        }
+
+        if (mPlayerInitCallback != null) {
+            mPlayerInitCallback.initPlayer(mPlayer);
+        }
+
+        mPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
+            @Override
+            public void onCompletion(MediaPlayer mp) {
+                if (mPlayCompletionCallback != null) {
+                    mPlayCompletionCallback.playComplete(mPlayer);
+                }
+                Log.i(LOG_TAG, "Video Play Over");
+            }
+        });
+
+        mPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
+            @Override
+            public void onPrepared(MediaPlayer mp) {
+                mVideoWidth = mp.getVideoWidth();
+                mVideoHeight = mp.getVideoHeight();
+
+                queueEvent(new Runnable() {
+                    @Override
+                    public void run() {
+
+                        if (mFrameRenderer == null) {
+                            mFrameRenderer = new CGEFrameRenderer();
+                        }
+
+                        if (mFrameRenderer.init(mVideoWidth, mVideoHeight, mVideoWidth, mVideoHeight)) {
+                            //Keep right orientation for source texture blending
+                            mFrameRenderer.setSrcFlipScale(1.0f, -1.0f);
+                            mFrameRenderer.setRenderFlipScale(1.0f, -1.0f);
+                        } else {
+                            Log.e(LOG_TAG, "Frame Recorder init failed!");
+                        }
+
+                        calcViewport();
+                    }
+                });
+
+                if (mPreparedCallback != null) {
+                    mPreparedCallback.playPrepared(mPlayer);
+                } else {
+                    mp.start();
+                }
+
+                Log.i(LOG_TAG, String.format("Video resolution 1: %d x %d", mVideoWidth, mVideoHeight));
+            }
+        });
+
+        mPlayer.setOnErrorListener(new MediaPlayer.OnErrorListener() {
+            @Override
+            public boolean onError(MediaPlayer mp, int what, int extra) {
+
+                if (mPlayCompletionCallback != null)
+                    return mPlayCompletionCallback.playFailed(mp, what, extra);
+                return false;
+            }
+        });
+
+        try {
+            mPlayer.prepareAsync();
+        } catch (Exception e) {
+            Log.i(LOG_TAG, String.format("Error handled: %s, play failure handler would be called!", e.toString()));
+            if (mPlayCompletionCallback != null) {
+                this.post(new Runnable() {
+                    @Override
+                    public void run() {
+                        if (mPlayCompletionCallback != null) {
+                            if (!mPlayCompletionCallback.playFailed(mPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, MediaPlayer.MEDIA_ERROR_UNSUPPORTED))
+                                mPlayCompletionCallback.playComplete(mPlayer);
+                        }
+                    }
+                });
+            }
+        }
+
+    }
+
+    public interface TakeShotCallback {
+        //传入的bmp可以由接收者recycle
+        void takeShotOK(Bitmap bmp);
+    }
+
+    public synchronized void takeShot(final TakeShotCallback callback) {
+        assert callback != null : "callback must not be null!";
+
+        if (mFrameRenderer == null) {
+            Log.e(LOG_TAG, "Drawer not initialized!");
+            callback.takeShotOK(null);
+            return;
+        }
+
+        queueEvent(new Runnable() {
+            @Override
+            public void run() {
+
+                IntBuffer buffer = IntBuffer.allocate(mRenderViewport.width * mRenderViewport.height);
+
+                GLES20.glReadPixels(mRenderViewport.x, mRenderViewport.y, mRenderViewport.width, mRenderViewport.height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
+                Bitmap bmp = Bitmap.createBitmap(mRenderViewport.width, mRenderViewport.height, Bitmap.Config.ARGB_8888);
+                bmp.copyPixelsFromBuffer(buffer);
+
+                Bitmap bmp2 = Bitmap.createBitmap(mRenderViewport.width, mRenderViewport.height, Bitmap.Config.ARGB_8888);
+
+                Canvas canvas = new Canvas(bmp2);
+                Matrix mat = new Matrix();
+                mat.setTranslate(0.0f, -mRenderViewport.height / 2.0f);
+                mat.postScale(1.0f, -1.0f);
+                mat.postTranslate(0.0f, mRenderViewport.height / 2.0f);
+
+                canvas.drawBitmap(bmp, mat, null);
+                bmp.recycle();
+
+                callback.takeShotOK(bmp2);
+            }
+        });
+
+    }
+}

+ 185 - 0
media/cge_library/src/main/jni/Android.mk

@@ -0,0 +1,185 @@
+#
+# Created on: 2015-7-9
+#     Author: Wang Yang
+#       Mail: admin@wysaid.org
+#
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE    := CGE
+
+#*********************** CGE Library ****************************
+
+CGE_ROOT=$(LOCAL_PATH)
+
+CGE_SOURCE=$(CGE_ROOT)/cge
+
+CGE_INCLUDE=$(CGE_ROOT)/include
+
+#### CGE Library headers ###########
+LOCAL_C_INCLUDES := \
+					$(CGE_ROOT)/interface \
+					$(CGE_INCLUDE) \
+					$(CGE_INCLUDE)/filters \
+
+
+#### CGE Library native source  ###########
+
+LOCAL_SRC_FILES :=  \
+			$(CGE_SOURCE)/common/cgeCommonDefine.cpp \
+			$(CGE_SOURCE)/common/cgeGLFunctions.cpp \
+			$(CGE_SOURCE)/common/cgeImageFilter.cpp \
+			$(CGE_SOURCE)/common/cgeImageHandler.cpp \
+			$(CGE_SOURCE)/common/cgeShaderFunctions.cpp \
+			$(CGE_SOURCE)/common/cgeGlobal.cpp \
+			$(CGE_SOURCE)/common/cgeTextureUtils.cpp \
+			\
+			$(CGE_SOURCE)/filters/cgeAdvancedEffects.cpp \
+			$(CGE_SOURCE)/filters/cgeAdvancedEffectsCommon.cpp \
+			$(CGE_SOURCE)/filters/cgeBilateralBlurFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeMosaicBlurFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeBeautifyFilter.cpp \
+			\
+			$(CGE_SOURCE)/filters/cgeBrightnessAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeColorLevelAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeContrastAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeCurveAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeExposureAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeFilterBasic.cpp \
+			$(CGE_SOURCE)/filters/cgeHueAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeMonochromeAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeSaturationAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeSelectiveColorAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeShadowHighlightAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeSharpenBlurAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeTiltshiftAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeVignetteAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeWhiteBalanceAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeColorBalanceAdjust.cpp \
+			$(CGE_SOURCE)/filters/cgeLookupFilter.cpp \
+			\
+			$(CGE_SOURCE)/filters/cgeBlendFilter.cpp \
+			\
+			$(CGE_SOURCE)/filters/cgeDataParsingEngine.cpp \
+			$(CGE_SOURCE)/filters/cgeMultipleEffects.cpp \
+			$(CGE_SOURCE)/filters/cgeMultipleEffectsCommon.cpp \
+			\
+			$(CGE_SOURCE)/filters/cgeHazeFilter.cpp \
+			$(CGE_SOURCE)/filters/cgePolarPixellateFilter.cpp \
+			$(CGE_SOURCE)/filters/cgePolkaDotFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeHalftoneFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeEdgeFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeEmbossFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeCrosshatchFilter.cpp \
+			$(CGE_SOURCE)/filters/CGELiquifyFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeRandomBlurFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeMinValueFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeMaxValueFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeSketchFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeLerpblurFilter.cpp \
+			\
+			$(CGE_SOURCE)/filters/cgeDynamicFilters.cpp \
+			$(CGE_SOURCE)/filters/cgeDynamicWaveFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeMotionFlowFilter.cpp \
+			$(CGE_SOURCE)/filters/cgeColorMappingFilter.cpp \
+			$(CGE_SOURCE)/extends/cgeThread.cpp \
+			\
+			$(CGE_ROOT)/interface/cgeNativeLibrary.cpp \
+			$(CGE_ROOT)/interface/cgeFFmpegNativeLibrary.cpp \
+			$(CGE_ROOT)/interface/cgeSharedGLContext.cpp \
+			$(CGE_ROOT)/interface/cgeFrameRenderer.cpp \
+			$(CGE_ROOT)/interface/cgeFrameRendererWrapper.cpp \
+			$(CGE_ROOT)/interface/cgeFrameRecorder.cpp \
+			$(CGE_ROOT)/interface/cgeFrameRecorderWrapper.cpp \
+			$(CGE_ROOT)/interface/cgeVideoEncoder.cpp \
+			$(CGE_ROOT)/interface/cgeUtilFunctions.cpp \
+			$(CGE_ROOT)/interface/cgeVideoDecoder.cpp \
+			$(CGE_ROOT)/interface/cgeVideoPlayer.cpp \
+			$(CGE_ROOT)/interface/cgeImageHandlerAndroid.cpp \
+			$(CGE_ROOT)/interface/cgeImageHandlerWrapper.cpp \
+			$(CGE_ROOT)/interface/cgeDeformFilterWrapper.cpp \
+
+
+LOCAL_CPPFLAGS := -frtti -std=gnu++11
+LOCAL_LDLIBS :=  -llog -lEGL -lGLESv2 -ljnigraphics -latomic
+
+# 'CGE_USE_VIDEO_MODULE' determines if the project should compile with ffmpeg.
+
+ifdef CGE_USE_VIDEO_MODULE
+
+VIDEO_MODULE_DEFINE = -D_CGE_USE_FFMPEG_ 
+
+endif
+
+ifndef CGE_RELEASE_MODE
+BUILD_MODE = -D_CGE_LOGS_
+ifdef CGE_DEBUG_MODE
+BUILD_MODE += -DDEBUG
+endif
+endif
+
+ifdef CGE_USE_LEAK_TEST
+BUILD_MODE += -D_CGE_GENERAL_ERROR_TEST_
+endif
+
+LOCAL_CFLAGS    := ${VIDEO_MODULE_DEFINE} ${BUILD_MODE} -DANDROID_NDK -DCGE_LOG_TAG=\"libCGE\" -DCGE_TEXTURE_PREMULTIPLIED=1 -D__STDC_CONSTANT_MACROS -D_CGE_DISABLE_GLOBALCONTEXT_ -O3 -ffast-math -D_CGE_ONLY_FILTERS_
+
+ifdef CGE_USE_FACE_MODULE
+
+LOCAL_CFLAGS := $(LOCAL_CFLAGS) -D_CGE_USE_FACE_MODULE_
+
+endif
+
+ifndef CGE_USE_VIDEO_MODULE
+
+#LOCAL_CFLAGS := $(LOCAL_CFLAGS) -D_CGE_ONLY_FILTERS_
+
+include $(BUILD_SHARED_LIBRARY)
+
+else 
+
+LOCAL_SHARED_LIBRARIES := ffmpeg
+
+include $(BUILD_SHARED_LIBRARY)
+
+################################
+
+# include $(CLEAR_VARS)
+# LOCAL_MODULE := x264
+# LOCAL_CFLAGS := -march=armv7-a -mfloat-abi=softfp -mfpu=neon -O3 -ffast-math -funroll-loops
+# LOCAL_SRC_FILES := ffmpeg/libx264.142.so
+# #LOCAL_EXPORT_C_INCLUDES := $(CGE_ROOT)/ffmpeg
+# include $(PREBUILT_SHARED_LIBRARY)
+
+###############################
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := ffmpeg
+LOCAL_CFLAGS := -mfloat-abi=softfp -mfpu=vfp -O3 -ffast-math -funroll-loops -fPIC
+ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
+LOCAL_CFLAGS := $(LOCAL_CFLAGS) march=armv7-a -mfpu=neon
+endif
+LOCAL_SRC_FILES := ffmpeg/$(TARGET_ARCH_ABI)/libffmpeg.so
+LOCAL_EXPORT_C_INCLUDES := $(CGE_ROOT)/ffmpeg
+
+# LOCAL_SHARED_LIBRARIES := x264
+
+include $(PREBUILT_SHARED_LIBRARY)
+
+endif
+
+###############################
+
+ifdef CGE_USE_FACE_MODULE
+
+include $(CLEAR_VARS)
+include $(CGE_ROOT)/faceTracker/jni/Android.mk
+
+endif
+###############################
+
+# Call user defined module
+include $(CLEAR_VARS)
+include $(CGE_ROOT)/source/source.mk

+ 14 - 0
media/cge_library/src/main/jni/Application.mk

@@ -0,0 +1,14 @@
+
+APP_ABI := armeabi-v7a arm64-v8a x86 
+# armeabi & mips are deprecated
+#APP_ABI :=  armeabi-v7a
+
+APP_PLATFORM := android-14
+
+APP_STL := gnustl_static
+
+#APP_CPPFLAGS := -frtti -fexceptions
+#APP_CPPFLAGS := -fpermissive
+APP_CPPFLAGS := -frtti -std=gnu++11
+
+APP_OPTIM := release

+ 6 - 0
media/cge_library/src/main/jni/README.md

@@ -0,0 +1,6 @@
+# How To Build
+
+```
+export NDK=/path/of/your/ndk
+./buildJNI
+```

+ 70 - 0
media/cge_library/src/main/jni/buildJNI

@@ -0,0 +1,70 @@
+#!/bin/bash
+
+#replace the path below with your ndk-dir 
+#NDK=/Users/wangyang/android_dev/android-ndk
+
+if [[ "$NDK" == "" ]]; then
+
+echo "Please specify the NDK variable to your ndk-dir"
+exit
+
+fi
+
+cd $(dirname $0)
+
+for i in $*
+do
+
+if [ "${i}" == "-r" ] || [ "${i}" == "--release" ]; then
+
+export CGE_RELEASE_MODE=1
+
+echo "release mode enabled!"
+
+fi
+
+if [ "${i}" == "-d" ] || [ "${i}" == "--debug" ]; then
+
+export CGE_DEBUG_MODE=1
+
+echo "debug mode enabled!"
+
+fi
+
+if [ "${i}" == "-b" ] || [ "${i}" == "-B" ]; then
+
+BUILD_ARG=-B
+
+echo "force rebuild!"
+
+fi
+
+if [ "${i}" == "--noface" ] || [ "${i}" == "-n" ]; then
+
+NO_FACE=1
+
+echo "disable face module!"
+
+fi
+
+if [ "${i}" == "--leaktest" ] || [ "${i}" == "-lt" ] || [ "${i}" == "--lt" ]; then
+
+export CGE_USE_LEAK_TEST=1
+
+echo "leak test enabled!"
+
+fi
+
+done
+
+if [[ "$NO_FACE" == "" ]]; then
+
+export CGE_USE_FACE_MODULE=1
+
+fi
+
+export CGE_USE_VIDEO_MODULE=1
+
+echo "The NDK dir is: ${NDK}, If the shell can not run normally, you should set the NDK variable to your local ndk-dir"
+
+$NDK/ndk-build ${BUILD_ARG} -j8

+ 309 - 0
media/cge_library/src/main/jni/cge/common/cgeCommonDefine.cpp

@@ -0,0 +1,309 @@
+/*
+* cgeCommonDefine.cpp
+*
+*  Created on: 2013-12-6
+*      Author: Wang Yang
+*        Mail: admin@wysaid.org
+*/
+
+#include "cgeCommonDefine.h"
+#include <cassert>
+
+#define CGE_VERSION "3.0.0"
+
+extern "C"
+{
+    
+    bool _cgeCheckGLError(const char* name, const char* file, int line)
+    {
+        int loopCnt = 0;
+        for (GLenum error = glGetError(); loopCnt < 32 && error; error = glGetError(), ++loopCnt)
+        {
+            const char* pMsg;
+            switch (error)
+            {
+                case GL_INVALID_ENUM: pMsg = "invalid enum"; break;
+                case GL_INVALID_VALUE: pMsg = "invalid value"; break;
+                case GL_INVALID_OPERATION: pMsg = "invalid operation"; break;
+                case GL_OUT_OF_MEMORY: pMsg = "out of memory"; break;
+                case GL_INVALID_FRAMEBUFFER_OPERATION: pMsg = "invalid framebuffer operation"; break;
+                default: pMsg = "unknown error";
+            }
+            CGE_LOG_ERROR("After \"%s\" glGetError %s(0x%x) at %s:%d\n", name, pMsg, error, file, line);
+        }
+        
+        return loopCnt != 0;
+    }
+    
+    const char* cgeGetVersion()
+    {
+        return CGE_VERSION;
+    }
+    
+    void cgePrintGLString(const char* name, GLenum em)
+    {
+        CGE_LOG_INFO("GL_INFO %s = %s\n", name, glGetString(em));
+    }
+    
+    const char* cgeGetBlendModeName(CGETextureBlendMode mode, bool withChinese)
+    {
+        if(mode < 0 || mode >= CGE_BLEND_TYPE_MAX_NUM)
+            return nullptr;
+        
+        static CGEConstString s_blendModeName[] =
+        {
+            "mix", // 0
+            "dissolve", // 1
+            
+            "darken", // 2
+            "multiply", // 3
+            "colorburn", // 4
+            "linearburn", // 5
+            "darkercolor", // 6
+            
+            "lighten", // 7
+            "screen", // 8
+            "colordodge", // 9
+            "lineardodge", // 10
+            "lightercolor", // 11
+            
+            "overlay", // 12
+            "softlight", // 13
+            "hardlight", // 14
+            "vividlight", // 15
+            "linearlight", // 16
+            "pinlight", // 17
+            "hardmix", // 18
+            
+            "difference", // 19
+            "exclude", // 20
+            "subtract", // 21
+            "divide", // 22
+            
+            "hue", // 23
+            "saturation", // 24
+            "color", // 25
+            "luminosity", // 26
+            
+            /////////////    More blend mode below (You can't see them in Adobe Photoshop)    //////////////
+            
+            "add",			// 19
+            "addrev",	// 21
+            "colorbw",		// 22
+            
+            /////////////    More blend mode above     //////////////
+        };
+        
+        static CGEConstString s_blendModeNameWithChinese[] =
+        {
+            "mix(正常)", // 0
+            "dissolve(溶解)", // 1
+            
+            "darken(变暗)", // 2
+            "multiply(正片叠底)", // 3
+            "color burn(颜色加深)", // 4
+            "linear burn(线性加深)", // 5
+            "darker color(深色)", // 6
+            
+            "lighten(变亮)", // 7
+            "screen(滤色)", // 8
+            "color dodge(颜色减淡)", // 9
+            "linear dodge(线性减淡)", // 10
+            "lighter color(浅色)", // 11
+            
+            "overlay(叠加)", // 12
+            "soft light(柔光)", // 13
+            "hard light(强光)", // 14
+            "vivid light(亮光)", // 15
+            "linear light(线性光)", // 16
+            "pin light(点光)", // 17
+            "hard mix(实色混合)", // 18
+            
+            "difference(差值)", // 19
+            "exclude(排除)", // 20
+            "subtract(减去)", // 21
+            "divide(划分)", // 22
+            
+            "hue(色相)", // 23
+            "saturation(饱和度)", // 24
+            "color(颜色)", // 25
+            "luminosity(明度)", // 26
+            
+            /////////////    More blend mode below (You can't see them in Adobe Photoshop)    //////////////
+            
+            "add(相加)",			// 19
+            "addrev(反向加)",	// 21
+            "colorbw(黑白)",		// 22
+            
+            /////////////    More blend mode above     //////////////
+        };
+        
+        return withChinese ? s_blendModeNameWithChinese[mode] : s_blendModeName[mode];
+    }
+    
+    void cgeSetGlobalBlendMode(const CGEGlobalBlendMode mode)
+    {
+#if CGE_TEXTURE_PREMULTIPLIED
+        
+        const GLenum BLEND_SRC = GL_ONE;
+        
+#else
+        
+        const GLenum BLEND_SRC = GL_SRC_ALPHA;
+        
+#endif
+        
+        switch (mode)
+        {
+            case CGEGLOBAL_BLEND_ALPHA:
+                glBlendFunc(BLEND_SRC, GL_ONE_MINUS_SRC_ALPHA);
+                break;
+            case CGEGLOBAL_BLEND_ALPHA_SEPERATE:
+                glBlendFuncSeparate(BLEND_SRC, GL_ONE_MINUS_SRC_ALPHA, GL_ONE_MINUS_DST_ALPHA, GL_ONE);
+                break;
+            case CGEGLOBAL_BLEND_ADD:
+                glBlendFunc(BLEND_SRC, GL_ONE);
+                break;
+            case CGEGLOBAL_BLEND_ADD_SEPARATE:
+                glBlendFuncSeparate(BLEND_SRC, GL_ONE, GL_ONE_MINUS_DST_ALPHA, GL_ONE);
+                break;
+            case CGEGLOBAL_BLEND_ADD_SEPARATE_EXT:
+                glBlendFuncSeparate(GL_ONE, GL_ONE, GL_ONE_MINUS_DST_ALPHA, GL_ONE);
+                break;
+            case CGEGLOBAL_BLEND_MULTIPLY:
+                glBlendFunc(GL_ZERO, GL_SRC_COLOR);
+                break;
+            case CGEGLOBAL_BLEND_MULTIPLY_SEPERATE:
+                glBlendFuncSeparate(BLEND_SRC, GL_SRC_COLOR, GL_ONE_MINUS_DST_ALPHA, GL_ONE);
+                break;
+            case CGEGLOBAL_BLEND_SCREEN:
+                glBlendFunc(BLEND_SRC, GL_ONE_MINUS_SRC_COLOR);
+                break;
+            case CGEGLOBAL_BLEND_SCREEN_EXT:
+                glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_COLOR);
+                break;
+            case CGEGLOBAL_BLEND_NONE:
+                //fall through
+            default:
+                glDisable(GL_BLEND);
+                return ;
+        }
+        
+        glEnable(GL_BLEND);
+    }
+    
+    void cgeGetDataAndChannelByFormat(CGEBufferFormat fmt, GLenum* dataFmt, GLenum* channelFmt, GLint* channel)
+    {
+        GLenum df, cf;
+        GLint c;
+        switch(fmt)
+        {
+            case CGE_FORMAT_RGB_INT8:
+                df = GL_UNSIGNED_BYTE;
+                cf = GL_RGB;
+                c = 3;
+                break;
+            case CGE_FORMAT_RGBA_INT8:
+                df = GL_UNSIGNED_BYTE;
+                cf = GL_RGBA;
+                c = 4;
+                break;
+            case CGE_FORMAT_RGB_INT16:
+                df = GL_UNSIGNED_SHORT;
+                cf = GL_RGB;
+                c = 3;
+                break;
+            case CGE_FORMAT_RGBA_INT16:
+                df = GL_UNSIGNED_SHORT;
+                cf = GL_RGBA;
+                c = 4;
+                break;
+            case CGE_FORMAT_RGB_FLOAT32:
+                df = GL_FLOAT;
+                cf = GL_RGB;
+                c = 3;
+                break;
+            case CGE_FORMAT_RGBA_FLOAT32:
+                df = GL_FLOAT;
+                cf = GL_RGB;
+                c = 4;
+                break;
+                
+#ifdef GL_BGR
+            case CGE_FORMAT_BGR_INT8:
+                df = GL_UNSIGNED_BYTE;
+                cf = GL_BGR;
+                c = 3;
+                break;
+            case CGE_FORMAT_BGR_INT16:
+                df = GL_UNSIGNED_SHORT;
+                cf = GL_BGR;
+                c = 3;
+                break;
+            case CGE_FORMAT_BGR_FLOAT32:
+                df = GL_FLOAT;
+                cf = GL_BGR;
+                c = 3;
+                break;
+#endif
+#ifdef GL_BGRA
+            case CGE_FORMAT_BGRA_INT8:
+                df = GL_UNSIGNED_BYTE;
+                cf = GL_BGRA;
+                c = 4;
+                break;
+            case CGE_FORMAT_BGRA_INT16:
+                df = GL_UNSIGNED_SHORT;
+                cf = GL_BGRA;
+                c = 4;
+                break;
+            case CGE_FORMAT_BGRA_FLOAT32:
+                df = GL_FLOAT;
+                cf = GL_BGRA;
+                c = 4;
+                break;
+#endif
+#ifdef GL_RED_EXT
+            case CGE_FORMAT_LUMINANCE:
+                df = GL_UNSIGNED_BYTE;
+                cf = GL_LUMINANCE;
+                c = 1;
+#endif
+#ifdef GL_LUMINANCE_ALPHA
+            case CGE_FORMAT_LUMINANCE_ALPHA:
+                df = GL_UNSIGNED_BYTE;
+                cf = GL_LUMINANCE_ALPHA;
+                c = 2;
+#endif
+            default:
+                df = GL_FALSE;
+                cf = GL_FALSE;
+                c = 0;
+        }
+        
+        if(dataFmt != nullptr) *dataFmt = df;
+        if(channelFmt != nullptr) *channelFmt = cf;
+        if(channel != nullptr) *channel = c;
+    }
+    
+    GLuint cgeGenTextureWithBuffer(const void* bufferData, GLint w, GLint h, GLenum channelFmt, GLenum dataFmt, GLint channel, GLint bindID, GLenum texFilter, GLenum texWrap)
+    {
+        assert(w != 0 && h != 0);
+        GLuint tex;
+        static const GLenum eArrs[] = { GL_LUMINANCE, GL_LUMINANCE_ALPHA, GL_RGB, GL_RGBA};
+        if(channel <= 0 || channel > 4)
+            return 0;
+        const GLenum& internalFormat = eArrs[channel - 1];
+        glActiveTexture(GL_TEXTURE0 + bindID);
+        glGenTextures(1, &tex);
+        glBindTexture(GL_TEXTURE_2D, tex);
+        glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+        glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, w, h, 0, channelFmt, dataFmt, bufferData);
+        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, texFilter);
+        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, texFilter);
+        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, texWrap);
+        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, texWrap);
+        return tex;
+    }
+    
+}

+ 306 - 0
media/cge_library/src/main/jni/cge/common/cgeGLFunctions.cpp

@@ -0,0 +1,306 @@
+/*
+* cgeGLFunctions.cpp
+*
+*  Created on: 2013-12-5
+*      Author: Wang Yang
+*/
+
+#include "cgeGLFunctions.h"
+#include <cmath>
+
+CGE_LOG_CODE
+(
+ static int sTextureCount;
+ )
+
+namespace CGE
+{
+    
+#if !(defined(_CGE_DISABLE_GLOBALCONTEXT_) && _CGE_DISABLE_GLOBALCONTEXT_)
+
+	static CGEEnableGLContextFunction s_enableGLFunc = nullptr;
+	static CGEDisableGLContextFunction s_disableGLFunc = nullptr;
+	static void* s_enableGLParam;
+	static void* s_disableGLParam;
+	static bool s_stopGlobalGLEnableFunc = false;
+
+	void cgeSetGLContextEnableFunction(CGEEnableGLContextFunction func, void* param)
+	{
+		s_enableGLFunc = func;
+		s_enableGLParam = param;
+	}
+
+	void cgeSetGLContextDisableFunction(CGEDisableGLContextFunction func, void* param)
+	{
+		s_disableGLFunc = func;
+		s_disableGLParam = param;
+	}
+
+	void* cgeGetGLEnableParam()
+	{
+		return s_enableGLParam;
+	}
+
+	void* cgeGetGLDisableParam()
+	{
+		return s_disableGLParam;
+	}
+
+	void cgeStopGlobalGLEnableFunction()
+	{
+		s_stopGlobalGLEnableFunc = true;
+	}
+
+	void cgeRestoreGlobalGLEnableFunction()
+	{
+		s_stopGlobalGLEnableFunc = false;
+	}
+
+	void cgeEnableGlobalGLContext()
+	{
+		if(s_enableGLFunc)
+			s_enableGLFunc(s_enableGLParam);
+	}
+
+	void cgeDisableGlobalGLContext()
+	{
+		if(s_disableGLFunc)
+			s_disableGLFunc(s_disableGLParam);
+	}
+
+#endif
+
+	//////////////////////////////////////////////////////////////////////////
+
+	static CGEBufferLoadFun s_loadDataWithSourceNameCommon = nullptr;
+	static CGEBufferUnloadFun s_unloadBufferDataCommon = nullptr;
+	static void* s_cgeLoadParam = nullptr;
+	static void* s_cgeUnloadParam = nullptr;
+	
+
+	void cgeSetCommonLoadFunction(CGEBufferLoadFun fun, void* arg)
+	{
+		s_loadDataWithSourceNameCommon = fun;
+		s_cgeLoadParam = arg;
+
+	}
+	void cgeSetCommonUnloadFunction(CGEBufferUnloadFun fun, void* arg)
+	{
+		s_unloadBufferDataCommon = fun;
+		s_cgeUnloadParam = arg;
+	}
+
+	void* cgeLoadResourceCommon(const char* sourceName, void** bufferData, CGEBufferFormat* fmt, GLint* w, GLint* h)
+	{
+		if(s_loadDataWithSourceNameCommon != nullptr)
+			return s_loadDataWithSourceNameCommon(sourceName, bufferData, w, h, fmt, &s_cgeLoadParam);
+		return nullptr;
+	}
+
+	bool cgeUnloadResourceCommon(void* bufferArg)
+	{
+		if(s_unloadBufferDataCommon != nullptr)
+			return s_unloadBufferDataCommon(bufferArg, s_cgeUnloadParam);
+		return false;
+	}
+
+	CGEBufferLoadFun cgeGetCommonLoadFunc()
+	{
+		return s_loadDataWithSourceNameCommon;
+	}
+
+	CGEBufferUnloadFun cgeGetCommonUnloadFunc()
+	{
+		return s_unloadBufferDataCommon;
+	}
+
+	void* cgeGetCommonLoadArg()
+	{
+		return s_cgeLoadParam;
+	}
+
+	void* cgeGetCommonUnloadArg()
+	{
+		return s_cgeUnloadParam;
+	}
+
+	char* cgeGetScaledBufferOutofSize(const void* buffer, int& w, int& h, int channel, int minSizeX, int minSizeY)
+	{
+		if((minSizeX < w && minSizeY < h) || buffer == nullptr) return nullptr;
+		char *tmpbuffer = nullptr;
+		const char* data = (const char*)buffer;
+
+		int width = w;
+		double scale = CGE_MIN(w / (float)minSizeX, h / (float)minSizeY);
+		w = ceilf(w / scale);
+		h = ceilf(h / scale);
+
+		int len = w * h;
+		tmpbuffer = new char[len * channel];
+		if(channel == 4)
+		{
+			for(int i = 0; i != h; ++i)
+			{
+				for(int j = 0; j != w; ++j)
+				{
+					const int L = (j + i * w) * channel;
+					const int R = (static_cast<int>(j * scale) + static_cast<int>(i * scale) * width) * channel;
+					tmpbuffer[L] = data[R];
+					tmpbuffer[L+1] = data[R+1];
+					tmpbuffer[L+2] = data[R+2];
+					tmpbuffer[L+3] = data[R+3];
+				}
+			}
+		}
+		else if(channel == 3)
+		{
+			for(int i = 0; i != h; ++i)
+			{
+				for(int j = 0; j != w; ++j)
+				{
+					const int L = (j + i * w) * channel;
+					const int R = (static_cast<int>(j * scale) + static_cast<int>(i * scale) * width) * channel;
+					tmpbuffer[L] = data[R];
+					tmpbuffer[L+1] = data[R+1];
+					tmpbuffer[L+2] = data[R+2];
+				}
+			}
+		}
+		return tmpbuffer;
+	}
+
+	char* cgeGetScaledBufferInSize(const void* buffer, int& w, int& h, int channel, int maxSizeX, int maxSizeY)
+	{
+		if((maxSizeX > w && maxSizeY > h) || buffer == nullptr) return nullptr;
+		char *tmpbuffer = nullptr;
+		const char* data = (const char*)buffer;
+
+		int width = w;
+		double scale = CGE_MAX(w / (float)maxSizeX, h / (float)maxSizeY);
+		w = w / scale;
+		h = h / scale;
+		int len = w * h;
+		tmpbuffer = new char[len * channel];
+		if(channel == 4)
+		{
+			for(int i = 0; i != h; ++i)
+			{
+				for(int j = 0; j != w; ++j)
+				{
+					const int L = (j + i * w) * channel;
+					const int R = (static_cast<int>(j * scale) + static_cast<int>(i * scale) * width) * channel;
+					tmpbuffer[L] = data[R];
+					tmpbuffer[L+1] = data[R+1];
+					tmpbuffer[L+2] = data[R+2];
+					tmpbuffer[L+3] = data[R+3];
+				}
+			}
+		}
+		else if (channel == 3)
+		{
+			for(int i = 0; i != h; ++i)
+			{
+				for(int j = 0; j != w; ++j)
+				{
+					const int L = (j + i * w) * channel;
+					const int R = (static_cast<int>(j * scale) + static_cast<int>(i * scale) * width) * channel;
+					tmpbuffer[L] = data[R];
+					tmpbuffer[L+1] = data[R+1];
+					tmpbuffer[L+2] = data[R+2];
+				}
+			}
+		}
+		return tmpbuffer;
+	}
+
+    /////////////////////////////////////////////////
+    
+    SharedTexture::SharedTexture(GLuint textureID, int w, int h)
+    {
+        m_textureID = textureID;
+        m_refCount = new int(1);
+        width = w;
+        height = h;
+        CGE_LOG_CODE
+        (
+         if(m_textureID == 0)
+         CGE_LOG_ERROR("CGESharedTexture : Invalid TextureID!");
+         else
+         {
+             CGE_LOG_INFO("---CGESharedTexture creating, textureID %d, total : %d ###\n", textureID, ++sTextureCount);
+         }
+         );
+    }
+    
+    SharedTexture::~SharedTexture()
+    {
+        if(m_refCount == nullptr)
+        {
+            CGE_LOG_CODE
+            (
+             if(m_textureID != 0)
+             {
+                 CGE_LOG_ERROR("SharedTexture : Error occurred!");
+             });
+            return;
+        }
+        
+        --*m_refCount;
+        if(*m_refCount <= 0)
+        {
+            clear();
+        }
+        CGE_LOG_CODE
+        (
+         else
+         {
+             CGE_LOG_INFO("@@@ Texture %d deRef count: %d\n", m_textureID, *m_refCount);
+         })
+    }
+    
+    void SharedTexture::forceRelease(bool bDelTexture)
+    {
+        assert(m_refCount == nullptr || *m_refCount == 1); // 使用 forceRelease 时 SharedTexture 必须保证只存在一个实例
+        if(bDelTexture)
+            glDeleteTextures(1, &m_textureID);
+        m_textureID = 0;
+        CGE_DELETE(m_refCount);
+        width = 0;
+        height = 0;
+        CGE_LOG_CODE
+        (
+         --sTextureCount;
+         );
+    }
+    
+    void SharedTexture::clear()
+    {
+        CGE_LOG_CODE
+        (
+         if(m_textureID == 0)
+         {
+             CGE_LOG_ERROR("!!!CGESharedTexture - Invalid TextureID To Release!\n");
+         }
+         else
+         {
+             CGE_LOG_INFO("###CGESharedTexture deleting, textureID %d, now total : %d ###\n", m_textureID, --sTextureCount);
+         });
+        
+        assert(*m_refCount == 0); // 未知错误
+        
+        glDeleteTextures(1, &m_textureID);
+        m_textureID = 0;
+        
+        CGE_DELETE(m_refCount);
+        width = 0;
+        height = 0;
+    }
+}
+
+
+
+
+
+
+
+

+ 245 - 0
media/cge_library/src/main/jni/cge/common/cgeGlobal.cpp

@@ -0,0 +1,245 @@
+/*
+* cgeGlobal.cpp
+*
+*  Created on: 2014-9-9
+*      Author: Wang Yang
+*        Mail: admin@wysaid.org
+*/
+
+#include "cgeGlobal.h"
+#include "cgeCommonDefine.h"
+
+#ifndef _CGE_ONLY_FILTERS_
+
+#include "cgeSprite2d.h"
+#include "cgeAction.h"
+
+#endif
+
+#include <cstring>
+
+namespace CGE
+{
+	bool g_isFastFilterImpossible = true;
+
+#ifdef _CGE_USE_ES_API_3_0_
+	bool g_shouldUsePBO = true;
+#endif
+
+	int CGEGlobalConfig::viewWidth = 1024;
+	int CGEGlobalConfig::viewHeight = 768;
+	GLuint CGEGlobalConfig::sVertexBufferCommon = 0;
+	float CGEGlobalConfig::sVertexDataCommon[8] = {-1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f};
+
+	void cgeInitFilterStatus()
+	{
+		GLint iParam;
+
+#ifdef GL_MAX_FRAGMENT_UNIFORM_VECTORS //GL ES
+		glGetIntegerv(GL_MAX_FRAGMENT_UNIFORM_VECTORS, &iParam);
+#endif
+#ifdef GL_MAX_FRAGMENT_UNIFORM_COMPONENTS //GL Desktop
+		glGetIntegerv(GL_MAX_FRAGMENT_UNIFORM_COMPONENTS, &iParam);
+#endif
+
+		g_isFastFilterImpossible = (iParam < 300);
+
+		CGE_LOG_INFO("Use Fast Filter: %d\n", !g_isFastFilterImpossible);
+
+		cgeCheckGLError("cgeInitFilterStatus - before 'GL_PIXEL_PACK_BUFFER'");
+
+#if defined(_CGE_USE_ES_API_3_0_) && defined(GL_PIXEL_PACK_BUFFER)
+
+		//使用此段代码此时 GL_PIXEL_PACK_BUFFER 是否被支持
+		glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
+		if(glGetError() == GL_FALSE)
+		{
+			g_shouldUsePBO = true;
+		}
+		else
+		{
+			g_shouldUsePBO = false;
+			CGE_LOG_ERROR("GL_PIXEL_PACK_BUFFER is not supported!\n");
+		}
+#endif
+	}
+
+	bool cgeInitialize(int width, int height, CGEGlobalConfig::InitArguments arg)
+	{
+		cgeInitFilterStatus();
+
+		CGEGlobalConfig::viewWidth = width;
+		CGEGlobalConfig::viewHeight = height;
+
+		if(arg == CGEGlobalConfig::CGE_INIT_LEAST)
+		{
+			CGE_LOG_INFO("You chosed CGE_INIT_LEAST for initialize, so the sprites could not be used! Once you used, unexpected problem would be happen\n");
+			return true;
+		}
+
+        if((arg & CGEGlobalConfig::CGE_INIT_COMMONVERTEXBUFFER) && CGEGlobalConfig::sVertexBufferCommon == 0)
+		{
+			glGenBuffers(1, &CGEGlobalConfig::sVertexBufferCommon);
+			if(CGEGlobalConfig::sVertexBufferCommon == 0)
+				return false;
+
+			glBindBuffer(GL_ARRAY_BUFFER, CGEGlobalConfig::sVertexBufferCommon);
+			glBufferData(GL_ARRAY_BUFFER, sizeof(CGEGlobalConfig::sVertexDataCommon), CGEGlobalConfig::sVertexDataCommon, GL_STATIC_DRAW);
+			glBindBuffer(GL_ARRAY_BUFFER, 0);
+		}
+
+#ifndef _CGE_ONLY_FILTERS_
+
+		if(width > 0 && height > 0)
+		{
+			SpriteCommonSettings::sSetCanvasSize(width, height);
+		}
+
+		if(arg & CGEGlobalConfig::CGE_INIT_SPRITEBUILTIN)
+			cgeSpritesInitBuiltin();
+
+#endif
+
+		cgeCheckGLError("cgeInitialize");
+
+		return true;
+	}
+
+	void cgeCleanup()
+	{
+		using namespace CGE;
+		glDeleteBuffers(1, &CGEGlobalConfig::sVertexBufferCommon);
+		CGEGlobalConfig::sVertexBufferCommon = 0;
+
+#ifndef _CGE_ONLY_FILTERS_
+
+		cgeSpritesCleanupBuiltin();
+
+		CGE_LOG_CODE
+        (
+         if(!CGE::SpriteCommonSettings::getDebugManager().empty())
+         {
+             CGE_LOG_ERROR("Warning! %d sprites are not deleted!\n", (int)CGE::SpriteCommonSettings::getDebugManager().size());
+         }
+         
+         if(!CGE::TimeActionInterfaceAbstract::getDebugManager().empty())
+         {
+             CGE_LOG_ERROR("Warning! %d sprites are not deleted!\n", (int)CGE::TimeActionInterfaceAbstract::getDebugManager().size());
+         }
+         );
+#endif
+	}
+
+	void cgeSetGlobalViewSize(int width, int height)
+	{
+		CGEGlobalConfig::viewWidth = width;
+		CGEGlobalConfig::viewHeight = height;
+
+#ifndef _CGE_ONLY_FILTERS_
+
+		CGE::SpriteCommonSettings::sSetCanvasSize(width, height);
+
+#endif
+	}
+}
+
+extern "C"
+{
+    
+    void cgePrintGLInfo()
+    {
+        CGE_LOG_INFO("===== Here are some information of your device =====\n\n");
+        
+        cgePrintGLString("Vendor", GL_VENDOR);
+        cgePrintGLString("Renderer", GL_RENDERER);
+        cgePrintGLString("GL Version", GL_VERSION);
+        cgePrintGLString("GL Extension", GL_EXTENSIONS);
+        cgePrintGLString("Shading Language Version", GL_SHADING_LANGUAGE_VERSION);
+        
+        GLint iParam[2];
+        
+#ifdef GL_MAX_VERTEX_UNIFORM_VECTORS //GL ES
+        glGetIntegerv(GL_MAX_VERTEX_UNIFORM_VECTORS, iParam);
+#endif
+#ifdef GL_MAX_VERTEX_UNIFORM_COMPONENTS //GL Desktop
+        glGetIntegerv(GL_MAX_VERTEX_UNIFORM_COMPONENTS, iParam);
+#endif
+        CGE_LOG_INFO("Max Vertex Uniform Vectors: %d\n", iParam[0]);
+        
+#ifdef GL_MAX_FRAGMENT_UNIFORM_VECTORS
+        glGetIntegerv(GL_MAX_FRAGMENT_UNIFORM_VECTORS, iParam);
+#endif
+#ifdef GL_MAX_FRAGMENT_UNIFORM_COMPONENTS
+        glGetIntegerv(GL_MAX_FRAGMENT_UNIFORM_COMPONENTS, iParam);
+#endif
+        CGE_LOG_INFO("Max Fragment Uniform Vectors : %d\n", iParam[0]);
+        
+#ifdef GL_MAX_VARYING_VECTORS
+        glGetIntegerv(GL_MAX_VARYING_VECTORS, iParam);
+#endif
+#ifdef GL_MAX_VARYING_COMPONENTS
+        glGetIntegerv(GL_MAX_VARYING_COMPONENTS, iParam);
+#endif
+        CGE_LOG_INFO("Max Varying Vectors: %d\n", iParam[0]);
+        
+        glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, iParam);
+        CGE_LOG_INFO("Max Texture Image Units : %d\n", iParam[0]);
+        
+        glGetIntegerv(GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, iParam);
+        CGE_LOG_INFO("Max Combined Texture Image Units : %d\n", iParam[0]);
+        
+        glGetIntegerv(GL_MAX_TEXTURE_SIZE, iParam);
+        CGE_LOG_INFO("Max Texture Size : %d\n", iParam[0]);
+        
+        glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, iParam);
+        CGE_LOG_INFO("Max Vertex Attribs: %d\n", iParam[0]);
+        
+        glGetIntegerv(GL_MAX_VIEWPORT_DIMS, iParam);
+        CGE_LOG_INFO("Max Viewport Dims : [%d, %d]\n", iParam[0], iParam[1]);
+        
+        glGetIntegerv(GL_MAX_RENDERBUFFER_SIZE, iParam);
+        CGE_LOG_INFO("Max Render Buffer Size: %d\n", iParam[0]);
+        
+        glGetIntegerv(GL_STENCIL_BITS, iParam);
+        CGE_LOG_INFO("Stencil Buffer Bits : %d\n", iParam[0]);
+        
+        glGetIntegerv(GL_ALIASED_POINT_SIZE_RANGE, iParam);
+        CGE_LOG_INFO("Point Size Range: [%d, %d]\n", iParam[0], iParam[1]);
+        
+        glGetIntegerv(GL_ALIASED_LINE_WIDTH_RANGE, iParam);
+        CGE_LOG_INFO("Line Width Range: [%d, %d]\n", iParam[0], iParam[1]);
+        
+        glGetIntegerv(GL_DEPTH_BITS, iParam);
+        CGE_LOG_INFO("Depth Bits: %d\n", iParam[0]);
+        
+        cgeCheckGLError("cgePrintGLInfo");
+        CGE_LOG_INFO("\n===== Information end =====\n\n");
+    }
+    
+    const char* cgeQueryGLExtensions()
+    {
+        return (const char*)glGetString(GL_EXTENSIONS);
+    }
+    
+    bool cgeCheckGLExtension(const char* ext)
+    {
+        const char* extString = cgeQueryGLExtensions();
+        return strstr(extString, ext) != nullptr;
+    }
+    
+    GLuint cgeGenCommonQuadArrayBuffer()
+	{
+		GLuint bufferID = 0;
+		glGenBuffers(1, &bufferID);
+		if(bufferID == 0)
+			return 0;
+
+		glBindBuffer(GL_ARRAY_BUFFER, bufferID);
+		glBufferData(GL_ARRAY_BUFFER, sizeof(CGE::CGEGlobalConfig::sVertexDataCommon), CGE::CGEGlobalConfig::sVertexDataCommon, GL_STATIC_DRAW);
+
+		return bufferID;
+	}
+    
+}
+
+

+ 283 - 0
media/cge_library/src/main/jni/cge/common/cgeImageFilter.cpp

@@ -0,0 +1,283 @@
+/*
+* cgeImageProcessing.cpp
+*
+*  Created on: 2013-12-13
+*      Author: Wang Yang
+*/
+
+#include "cgeImageFilter.h"
+
+CGE_UNEXPECTED_ERR_MSG
+(
+ static int sFilterCount = 0;
+ )
+
+namespace CGE
+{
+
+	CGEConstString g_vshDefault = CGE_SHADER_STRING
+	(
+	attribute vec2 vPosition;
+	attribute vec2 aTexCoord;
+	varying vec2 textureCoordinate;
+	void main()
+	{
+		gl_Position = vec4(vPosition, 0.0, 1.0);
+		textureCoordinate = aTexCoord;
+	}
+	);
+
+	CGEConstString g_vshDefaultWithoutTexCoord = CGE_SHADER_STRING
+	(
+	attribute vec2 vPosition;
+	varying vec2 textureCoordinate;
+	void main()
+	{
+		gl_Position = vec4(vPosition, 0.0, 1.0);
+		//An opportunism code. Do not use it unless you know what it means.
+		textureCoordinate = (vPosition.xy + 1.0) / 2.0;
+	}
+	);
+
+	CGEConstString g_vshDrawToScreen = CGE_SHADER_STRING
+	(
+	attribute vec2 vPosition;
+	varying vec2 textureCoordinate;
+	void main()
+	{
+		gl_Position = vec4(vPosition, 0.0, 1.0);
+		textureCoordinate = (vec2(vPosition.x, -vPosition.y) + 1.0) / 2.0;
+	}
+	);
+
+	CGEConstString g_vshDrawToScreenRot90 = CGE_SHADER_STRING
+	(
+	attribute vec2 vPosition;
+	varying vec2 textureCoordinate;
+	const mat2 mRot = mat2(0.0, 1.0, -1.0, 0.0);
+	void main()
+	{
+		gl_Position = vec4(vPosition, 0.0, 1.0);
+		textureCoordinate = vec2(vPosition.x, -vPosition.y) / 2.0 * mRot + 0.5;
+	}
+	);
+
+	CGEConstString g_fshDefault = CGE_SHADER_STRING_PRECISION_L
+	(
+	varying vec2 textureCoordinate;
+	uniform sampler2D inputImageTexture;
+	void main()
+	{
+		gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
+	}
+	);
+
+	//////////////////////////////////////////////////////////////////////////
+
+	static CGEConstString s_fshFastAdjust = CGE_SHADER_STRING_PRECISION_M
+	(
+	varying vec2 textureCoordinate;
+	uniform sampler2D inputImageTexture;
+	uniform vec3 curveArray[%d]; // The array size is defined by "CURVE_PRECISION".
+
+	const float curvePrecision = %.1f; //The same to above.
+
+	void main()
+	{
+		vec4 src = texture2D(inputImageTexture, textureCoordinate);
+		gl_FragColor = vec4(curveArray[int(src.r * curvePrecision)].r,
+			curveArray[int(src.g * curvePrecision)].g,
+			curveArray[int(src.b * curvePrecision)].b,
+			src.a);
+	});
+
+	static char s_fshFastAdjustBuffer[512];
+	CGEConstString g_fshFastAdjust = s_fshFastAdjustBuffer;
+
+	CGEConstString g_paramFastAdjustArrayName = "curveArray";
+
+	//////////////////////////////////////////////////////////////////////////
+
+	static CGEConstString s_fshFastAdjustRGB = CGE_SHADER_STRING_PRECISION_M
+	(
+	varying vec2 textureCoordinate;
+	uniform sampler2D inputImageTexture;
+	uniform float curveArrayRGB[%d]; // The array size is defined by "CURVE_PRECISION".
+
+	const float curvePrecision = %.1f; //The same to above.
+
+	void main()
+	{
+        vec4 src = texture2D(inputImageTexture, textureCoordinate);
+		gl_FragColor = vec4(curveArrayRGB[int(src.r * curvePrecision)],
+			curveArrayRGB[int(src.g * curvePrecision)],
+			curveArrayRGB[int(src.b * curvePrecision)],
+            src.a);
+	});
+
+	static char s_fshFastAdjustRGBBuffer[512];
+	CGEConstString g_fshFastAdjustRGB = s_fshFastAdjustRGBBuffer;
+
+	CGEConstString g_paramFastAdjustRGBArrayName = "curveArrayRGB";
+
+	//////////////////////////////////////////////////////////////////////////
+
+	static inline int s_genFastAdjustShader(char* dst, const char* src, int precision)
+	{
+		return sprintf(dst, src, precision, precision - 1.0f);
+	}
+
+	//程序启动时对 CGE_CURVE_PRECISION 作预处理
+	static bool _dummy = s_genFastAdjustShader(s_fshFastAdjustBuffer, s_fshFastAdjust, CGE_CURVE_PRECISION) && s_genFastAdjustShader(s_fshFastAdjustRGBBuffer, s_fshFastAdjustRGB, CGE_CURVE_PRECISION);
+
+	//////////////////////////////////////////////////////////////////////////
+
+
+	CGEConstString g_fshCurveMapNoIntensity = CGE_SHADER_STRING_PRECISION_M
+	(
+	varying vec2 textureCoordinate;
+	uniform sampler2D inputImageTexture;
+	uniform sampler2D curveTexture; //We do not use sampler1D because GLES dosenot support that.
+
+	void main()
+	{
+        vec4 src = texture2D(inputImageTexture, textureCoordinate);
+		gl_FragColor = vec4(texture2D(curveTexture, vec2(src.r, 0.0)).r,
+			texture2D(curveTexture, vec2(src.g, 0.0)).g,
+			texture2D(curveTexture, vec2(src.b, 0.0)).b,
+            src.a);
+	}
+	);
+
+	CGEConstString g_paramCurveMapTextureName = "curveTexture";
+
+	//////////////////////////////////////////////////////////////////////////
+
+    CGEImageFilterInterfaceAbstract::CGEImageFilterInterfaceAbstract()
+    {
+        CGE_ENABLE_GLOBAL_GLCONTEXT();
+        CGE_UNEXPECTED_ERR_MSG
+        (
+         CGE_LOG_KEEP("Filter create, total: %d\n", ++sFilterCount);
+        )
+    }
+    
+    CGEImageFilterInterfaceAbstract::~CGEImageFilterInterfaceAbstract()
+    {
+        CGE_UNEXPECTED_ERR_MSG
+        (
+         CGE_LOG_KEEP("Filter release, remain: %d\n", --sFilterCount);
+         )
+    }
+    
+    //////////////////////////////////////////////////////////////////////////
+
+	CGEConstString CGEImageFilterInterface::paramInputImageName = "inputImageTexture";
+	CGEConstString CGEImageFilterInterface::paramPositionIndexName = "vPosition";
+
+	CGEImageFilterInterface::CGEImageFilterInterface() : m_uniformParam(nullptr)
+	{
+		//将所有filter的 paramPositionIndexName 绑定到 0
+		m_program.bindAttribLocation(paramPositionIndexName, 0);
+	}
+
+	CGEImageFilterInterface::~CGEImageFilterInterface()
+	{
+		delete m_uniformParam;
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+	bool CGEImageFilterInterface::initShadersFromString(const char* vsh, const char* fsh)
+	{
+		return m_program.initWithShaderStrings(vsh, fsh);
+	}
+
+	void CGEImageFilterInterface::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+	{
+		handler->setAsTarget();
+		m_program.bind();
+		
+		glEnableVertexAttribArray(0);
+		glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);		
+		glActiveTexture(GL_TEXTURE0);
+		glBindTexture(GL_TEXTURE_2D, srcTexture);
+
+		if(m_uniformParam != nullptr)
+			m_uniformParam->assignUniforms(handler, m_program.programID());
+
+		glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+		cgeCheckGLError("glDrawArrays");
+	}
+
+	void CGEImageFilterInterface::setAdditionalUniformParameter(UniformParameters* param)
+	{
+		if(m_uniformParam == param) return;
+		delete m_uniformParam;
+		m_uniformParam = param;
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+
+	CGEConstString CGEFastAdjustFilter::paramArray = g_paramFastAdjustArrayName;
+
+	bool CGEFastAdjustFilter::init()
+	{
+		initCurveArrays();
+		return initShadersFromString(CGE::g_vshDefaultWithoutTexCoord, g_fshFastAdjust);
+	}
+
+	void CGEFastAdjustFilter::assignCurveArrays()
+	{
+		m_program.bind();
+		GLint index = glGetUniformLocation(m_program.programID(), paramArray);
+		if(index < 0)
+		{
+			CGE_LOG_ERROR("CGEFastAdjustFilter: Failed when assignCurveArray();\n");
+			return;
+		}
+		glUniform3fv(index, (GLsizei)m_curve.size(), &m_curve[0][0]);
+	}
+
+	void CGEFastAdjustFilter::initCurveArrays()
+	{
+		m_curve.resize(CGE_CURVE_PRECISION);
+
+		for(std::vector<float>::size_type t = 0; t != CGE_CURVE_PRECISION; ++t)
+		{
+			m_curve[t][0] = float(t) / (CGE_CURVE_PRECISION - 1.0f);
+			m_curve[t][1] = float(t) / (CGE_CURVE_PRECISION - 1.0f);
+			m_curve[t][2] = float(t) / (CGE_CURVE_PRECISION - 1.0f);
+		}
+	}
+
+	CGEConstString CGEFastAdjustRGBFilter::paramArrayRGB = g_paramFastAdjustRGBArrayName;
+
+	bool CGEFastAdjustRGBFilter::init()
+	{
+		initCurveArray();
+		return initShadersFromString(CGE::g_vshDefaultWithoutTexCoord, g_fshFastAdjustRGB);
+	}
+
+	void CGEFastAdjustRGBFilter::assignCurveArray()
+	{
+		m_program.bind();
+		GLint index = glGetUniformLocation(m_program.programID(), paramArrayRGB);
+		if(index < 0)
+		{
+			CGE_LOG_ERROR("CGEFastAdjustRGBFilter: Failed when assignCurveArray();\n");
+			return ;
+		}
+		glUniform1fv(index, (GLsizei)m_curveRGB.size(), m_curveRGB.data());
+	}
+
+	void CGEFastAdjustRGBFilter::initCurveArray()
+	{
+		m_curveRGB.resize(CGE_CURVE_PRECISION);
+		//	m_curveRGB.shrink_to_fit();
+		for(std::vector<float>::size_type t = 0; t != CGE_CURVE_PRECISION; ++t)
+		{
+			m_curveRGB[t] = float(t) / (CGE_CURVE_PRECISION - 1.0f);
+		}
+	}
+
+}

+ 790 - 0
media/cge_library/src/main/jni/cge/common/cgeImageHandler.cpp

@@ -0,0 +1,790 @@
+/*
+* cgeImageHandler.cpp
+*
+*  Created on: 2013-12-13
+*      Author: Wang Yang
+*        Mail: admin@wysaid.org
+*/
+
+#include "cgeImageHandler.h"
+#include "cgeTextureUtils.h"
+
+#include <cassert>
+
+CGE_UNEXPECTED_ERR_MSG
+(
+ static int sHandlerCount = 0;
+ )
+
+namespace CGE
+{
+	CGEImageHandlerInterface::CGEImageHandlerInterface() : m_srcTexture(0), m_dstFrameBuffer(0), m_vertexArrayBuffer(0)
+	{
+		m_dstImageSize.set(0, 0);
+		m_bufferTextures[0] = 0;
+		m_bufferTextures[1] = 0;
+        
+        CGE_UNEXPECTED_ERR_MSG
+        (
+         CGE_LOG_KEEP("Handler create, total: %d\n", ++sHandlerCount);
+         )
+	}
+
+	CGEImageHandlerInterface::~CGEImageHandlerInterface()
+	{
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		glDeleteTextures(1, &m_srcTexture);
+		clearImageFBO();
+		glDeleteBuffers(1, &m_vertexArrayBuffer);
+        m_vertexArrayBuffer = 0;
+        
+        CGE_UNEXPECTED_ERR_MSG
+        (
+         CGE_LOG_KEEP("Handler release, remain: %d\n", --sHandlerCount);
+         )
+	}
+
+	GLuint CGEImageHandlerInterface::getResultTextureAndClearHandler()
+	{
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		glFinish();
+		GLuint texID = m_bufferTextures[0];
+		m_bufferTextures[0] = 0;
+		clearImageFBO();
+		glDeleteTextures(1, &m_srcTexture);
+        m_srcTexture = 0;
+		return texID;
+	}
+
+	size_t CGEImageHandlerInterface::getOutputBufferLen(size_t channel)
+	{
+		return m_dstImageSize.width * m_dstImageSize.height * channel;
+	}
+
+	size_t CGEImageHandlerInterface::getOutputBufferBytesPerRow(size_t channel)
+	{
+		return m_dstImageSize.width * channel;
+	}
+
+    void CGEImageHandlerInterface::copyTextureData(void* data, int w, int h, GLuint texID, GLenum dataFmt, GLenum channelFmt)
+    {
+        assert(texID != 0); //Invalid Texture ID
+
+        CGE_ENABLE_GLOBAL_GLCONTEXT();
+
+        glBindFramebuffer(GL_FRAMEBUFFER, m_dstFrameBuffer);
+        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texID, 0);
+        glFinish();
+		if(channelFmt != GL_RGBA)
+			glPixelStorei(GL_PACK_ALIGNMENT, 1);
+        glReadPixels(0, 0, w, h, channelFmt, dataFmt, data);
+        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+        cgeCheckGLError("CGEImageHandlerInterface::copyTextureData");
+    }
+
+	bool CGEImageHandlerInterface::initImageFBO(const void* data, int w, int h, GLenum channelFmt, GLenum dataFmt, int channel)
+	{
+		clearImageFBO();
+
+		if(m_vertexArrayBuffer == 0)
+			m_vertexArrayBuffer = cgeGenCommonQuadArrayBuffer();
+		CGE_LOG_INFO("Vertex Array Buffer id: %d\n", m_vertexArrayBuffer);
+
+		//Set the swap buffer textures.
+		m_bufferTextures[0] = cgeGenTextureWithBuffer(data, w, h, channelFmt, dataFmt, channel);
+		m_bufferTextures[1] = cgeGenTextureWithBuffer(nullptr, w, h, channelFmt, dataFmt, channel);
+
+		CGE_LOG_INFO("FBO buffer texture id: %d and %d\n", m_bufferTextures[0], m_bufferTextures[1]);
+
+		glGenFramebuffers(1, &m_dstFrameBuffer);
+		glBindFramebuffer(GL_FRAMEBUFFER, m_dstFrameBuffer);
+		glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+		if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
+		{
+			clearImageFBO();
+			CGE_LOG_ERROR("Image Handler initImageFBO failed! %x\n", glCheckFramebufferStatus(GL_FRAMEBUFFER));
+			cgeCheckGLError("CGEImageHandlerInterface::initImageFBO");
+			return false;
+		}
+		CGE_LOG_INFO("FBO Framebuffer id: %d\n", m_dstFrameBuffer);
+		return true;
+	}
+
+	void CGEImageHandlerInterface::clearImageFBO()
+	{
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		glBindTexture(GL_TEXTURE_2D, 0);
+		glDeleteTextures(2, m_bufferTextures);
+		m_bufferTextures[0] = 0;
+		m_bufferTextures[1] = 0;
+		glDeleteFramebuffers(1, &m_dstFrameBuffer);
+		m_dstFrameBuffer = 0;	
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+
+	CGEImageHandler::CGEImageHandler() : m_bRevertEnabled(false), m_drawer(nullptr), m_resultDrawer(nullptr)
+#ifdef _CGE_USE_ES_API_3_0_
+		,m_pixelPackBuffer(0), m_pixelPackBufferSize(0)
+#endif
+	{}
+
+	CGEImageHandler::~CGEImageHandler()
+	{
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		clearImageFilters();
+		delete m_drawer;
+		delete m_resultDrawer;
+
+#ifdef _CGE_USE_ES_API_3_0_
+		clearPixelBuffer();
+#endif
+
+	}
+
+	bool CGEImageHandler::initWithRawBufferData(const void* imgData, GLint w, GLint h, CGEBufferFormat format, bool bEnableReversion)
+	{
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		int channel;
+		GLenum dataFmt, channelFmt;
+		cgeGetDataAndChannelByFormat(format, &dataFmt, &channelFmt, &channel);
+		if(channel == 0) return false;
+		char *tmpbuffer = cgeGetScaledBufferInSize(imgData, w, h, channel, cgeGetMaxTextureSize(), cgeGetMaxTextureSize());
+		const char* bufferdata = (tmpbuffer == nullptr) ? (const char*)imgData : tmpbuffer;
+
+		m_dstImageSize.set(w, h);
+		CGE_LOG_INFO("Image Handler Init With RawBufferData %d x %d, %d channel\n", w, h, channel);
+
+		glDeleteTextures(1, &m_srcTexture); //Delete last texture to avoid reinit error.
+		m_bRevertEnabled = bEnableReversion;
+		if(m_bRevertEnabled)
+		{
+			m_srcTexture = cgeGenTextureWithBuffer(bufferdata, w, h, channelFmt, dataFmt, channel);
+			CGE_LOG_INFO("Input Image Texture id %d\n", m_srcTexture);
+		}
+		else m_srcTexture = 0;
+
+		bool status = initImageFBO(bufferdata, w, h, channelFmt, dataFmt, channel);
+
+		delete[] tmpbuffer;
+
+		cgeCheckGLError("CGEImageHandler::initWithRawBufferData");
+		return status;
+	}
+
+	bool CGEImageHandler::updateData(const void* data, int w, int h, CGEBufferFormat format)
+	{
+		int channel;
+		GLenum dataFmt, channelFmt;
+		cgeGetDataAndChannelByFormat(format, &dataFmt, &channelFmt, &channel);
+		if(!(w == m_dstImageSize.width && h == m_dstImageSize.height && channel == 4))
+			return false;
+
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+
+		glBindTexture(GL_TEXTURE_2D, m_bufferTextures[0]);
+		glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, channelFmt, dataFmt, data);
+		return true;
+	}
+
+	bool CGEImageHandler::initWithTexture(GLuint textureID, GLint w, GLint h, CGEBufferFormat format, bool bEnableReversion)
+	{
+		if(textureID == 0 || w < 1 || h < 1)
+			return false;
+
+		m_srcTexture = textureID;
+		m_dstImageSize.set(w, h);
+
+		GLenum dataFmt, channelFmt;
+		GLint channel;
+		cgeGetDataAndChannelByFormat(format, &dataFmt, &channelFmt, &channel);
+
+		initImageFBO(nullptr, w, h, channelFmt, dataFmt, channel);
+		
+		m_bRevertEnabled = true;
+		revertToKeptResult(false);
+		m_bRevertEnabled = bEnableReversion;
+
+		if(!m_bRevertEnabled)
+			m_srcTexture = 0;
+		return true;
+	}
+
+#ifdef _CGE_USE_ES_API_3_0_
+	extern bool g_shouldUsePBO;
+
+	bool CGEImageHandler::initPixelBuffer()
+	{
+		cgeCheckGLError("before CGEImageHandlerInterface::initPixelBuffer");
+
+		bool ret = false;
+
+		if(g_shouldUsePBO)
+		{
+			glDeleteBuffers(1, &m_pixelPackBuffer);
+			glGenBuffers(1, &m_pixelPackBuffer);
+			glBindBuffer(GL_PIXEL_PACK_BUFFER, m_pixelPackBuffer);
+
+			GLenum err = glGetError();
+
+			if(err == GL_FALSE && m_pixelPackBuffer != 0 && m_pixelPackBufferSize != 0)
+			{
+				glBufferData(GL_PIXEL_PACK_BUFFER, m_pixelPackBufferSize, 0, GL_DYNAMIC_READ);
+				ret = true;
+			}
+			else
+			{
+				g_shouldUsePBO = false;
+				glDeleteBuffers(1, &m_pixelPackBuffer);
+				m_pixelPackBuffer = 0;
+				m_pixelPackBuffer = 0;
+				m_pixelPackBufferSize = 0;
+				CGE_LOG_ERROR("GL_PIXEL_PACK_BUFFER - failed! Error code %x\n", err);
+			}
+
+			glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
+		}
+
+		return ret;
+	}
+
+	bool CGEImageHandler::initImageFBO(const void* data, int w, int h, GLenum channelFmt, GLenum dataFmt, int channel)
+	{
+		m_pixelPackBufferSize = m_dstImageSize.width * m_dstImageSize.height * channel;
+		initPixelBuffer();
+		return CGEImageHandlerInterface::initImageFBO(data, w, h, channelFmt, dataFmt, channel);
+	}
+
+	const void* CGEImageHandler::mapOutputBuffer(CGEBufferFormat fmt)
+	{
+		if(!g_shouldUsePBO || m_pixelPackBuffer == 0)
+			return nullptr;
+
+		int channel;
+		GLenum channelFmt, dataFmt;
+		cgeGetDataAndChannelByFormat(fmt, &dataFmt, &channelFmt, &channel);
+
+		if(m_pixelPackBufferSize != m_dstImageSize.width * m_dstImageSize.height * channel)
+		{
+			CGE_LOG_ERROR("Invalid format!\n");
+			return nullptr;
+		}
+
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		useImageFBO();
+		glFinish();
+
+		glReadBuffer(GL_COLOR_ATTACHMENT0);
+		glBindBuffer(GL_PIXEL_PACK_BUFFER, m_pixelPackBuffer);
+		glReadPixels(0, 0, m_dstImageSize.width, m_dstImageSize.height, channelFmt, dataFmt, 0);
+		const void* ret = glMapBufferRange(GL_PIXEL_PACK_BUFFER, 0, m_pixelPackBufferSize, GL_MAP_READ_BIT);
+		glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
+		return ret;
+	}
+
+	void CGEImageHandler::unmapOutputBuffer()
+	{
+		glBindBuffer(GL_PIXEL_PACK_BUFFER, m_pixelPackBuffer);
+		glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
+		glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
+	}
+
+#endif
+
+	bool CGEImageHandler::getOutputBufferData(void* data, CGEBufferFormat fmt)
+	{
+		int channel;
+		GLenum channelFmt, dataFmt;
+		cgeGetDataAndChannelByFormat(fmt, &dataFmt, &channelFmt, &channel);
+		size_t len = getOutputBufferLen(channel);
+		if(nullptr == data || len == 0 || channel != 4)
+		{
+			CGE_LOG_ERROR("%s\n", nullptr == data ? "data is NULL" : (channel == 4 ? "Handler is not initialized!" : "Channel must be 4!") );
+			return false;
+		}
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		setAsTarget();
+		glFinish();
+		glPixelStorei(GL_PACK_ALIGNMENT, 1);
+
+#ifdef _CGE_USE_ES_API_3_0_
+
+		cgeCheckGLError("CGEImageHandlerInterface::CGEImageHandlerInterface");
+
+		if(g_shouldUsePBO)
+		{
+			if(m_pixelPackBuffer == 0 || m_pixelPackBufferSize != m_dstImageSize.width * m_dstImageSize.height * channel)
+			{
+				m_pixelPackBufferSize = m_dstImageSize.width * m_dstImageSize.height * channel;
+				initPixelBuffer();
+			}
+
+			if(m_pixelPackBuffer != 0)
+			{
+				glReadBuffer(GL_COLOR_ATTACHMENT0);
+				glBindBuffer(GL_PIXEL_PACK_BUFFER, m_pixelPackBuffer);
+				glReadPixels(0, 0, m_dstImageSize.width, m_dstImageSize.height, channelFmt, dataFmt, 0);
+				GLubyte* bytes =(GLubyte*)glMapBufferRange(GL_PIXEL_PACK_BUFFER, 0, m_pixelPackBufferSize, GL_MAP_READ_BIT);
+
+				if(bytes != nullptr)
+				{
+					memcpy(data, bytes, m_pixelPackBufferSize);
+				}
+				else
+				{
+					CGE_LOG_ERROR("glMapBufferRange failed! Use normal read pixels instead...\n");
+					glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
+					glReadPixels(0, 0, m_dstImageSize.width, m_dstImageSize.height, channelFmt, dataFmt, data);
+				}
+				glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
+				glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
+			}
+		}
+		else
+		{
+			glReadPixels(0, 0, m_dstImageSize.width, m_dstImageSize.height, channelFmt, dataFmt, data);
+		}
+
+#else
+		glReadPixels(0, 0, m_dstImageSize.width, m_dstImageSize.height, channelFmt, dataFmt, data);
+#endif		
+		return true;
+	}
+
+#ifdef _CGE_USE_ES_API_3_0_
+
+	void CGEImageHandler::clearPixelBuffer()
+	{
+		glDeleteBuffers(1, &m_pixelPackBuffer);
+		m_pixelPackBuffer = 0;
+		m_pixelPackBufferSize = 0;
+	}
+
+#endif
+
+	void CGEImageHandler::setAsTarget()
+	{
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+        glBindFramebuffer(GL_FRAMEBUFFER, m_dstFrameBuffer);
+		glViewport(0, 0, m_dstImageSize.width, m_dstImageSize.height);
+		CGE_LOG_CODE(if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
+		{
+            CGE_LOG_ERROR("CGEImageHandler::setAsTarget failed!\n");
+		});
+	}
+
+	void CGEImageHandler::useImageFBO()
+	{
+		glBindFramebuffer(GL_FRAMEBUFFER, m_dstFrameBuffer);
+	}
+
+	size_t CGEImageHandler::getOutputBufferLen(size_t channel)
+	{
+		if(m_bufferTextures[0] == 0 || m_dstFrameBuffer == 0)
+			return 0;
+		return m_dstImageSize.width * m_dstImageSize.height * channel;
+	}
+
+	size_t CGEImageHandler::getOutputBufferBytesPerRow(size_t channel)
+	{
+		if(m_bufferTextures[0] == 0 || m_dstFrameBuffer == 0)
+			return 0;
+		return m_dstImageSize.width * channel;
+	}
+
+	void CGEImageHandler::swapBufferFBO()
+	{
+		useImageFBO();
+		std::swap(m_bufferTextures[0], m_bufferTextures[1]);
+
+		glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+        
+        CGE_LOG_CODE
+        (
+         if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
+         {
+             CGE_LOG_ERROR("Image Handler swapBufferFBO failed!\n");
+         }
+         else
+         {
+             CGE_LOG_INFO("Swapping buffer FBO...\n");
+         }
+        )
+	}
+
+	bool CGEImageHandler::copyTexture(GLuint dst, GLuint src)
+	{
+		if(m_drawer == nullptr)
+		{
+			m_drawer = TextureDrawer::create();
+			if(m_drawer == nullptr)
+			{
+				CGE_LOG_ERROR("Texture Drawer create failed!");
+				return false;
+			}
+		}
+
+		GLboolean hasBlending = glIsEnabled(GL_BLEND);
+		GLboolean hasDepth = glIsEnabled(GL_DEPTH_TEST);
+
+		if(hasBlending)
+			glDisable(GL_BLEND);
+		if(hasDepth)
+			glDisable(GL_DEPTH_TEST);
+
+		useImageFBO();
+		glFlush();
+		glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, dst, 0);
+		glViewport(0, 0, m_dstImageSize.width, m_dstImageSize.height);
+		glClear(GL_COLOR_BUFFER_BIT);
+		m_drawer->drawTexture(src);
+		glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+		
+		if(hasBlending)
+			glEnable(GL_BLEND);
+		if(hasDepth)
+			glEnable(GL_DEPTH_TEST);
+
+		return true;
+	}
+
+	bool CGEImageHandler::copyTexture(GLuint dst, GLuint src, int x, int y, int w, int h)
+	{
+		return copyTexture(dst, src, 0, 0, x, y, w, h);
+	}
+
+	bool CGEImageHandler::copyTexture(GLuint dst, GLuint src, int xOffset, int yOffset, int x, int y, int w, int h)
+	{
+		assert(dst != 0 && src != 0);
+		useImageFBO();
+		glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, src, 0);
+		glBindTexture(GL_TEXTURE_2D, dst);
+		glFinish();
+		glCopyTexSubImage2D(GL_TEXTURE_2D, 0, xOffset, yOffset, x, y, w, h);
+		glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+		return true; // Always success.
+	}
+
+	void CGEImageHandler::drawResult()
+	{
+		if(m_resultDrawer == nullptr)
+		{
+			m_resultDrawer = TextureDrawer::create();
+			if(m_resultDrawer == nullptr)
+			{
+				CGE_LOG_ERROR("Create Texture Drawer Failed!\n");
+				return ;
+			}
+		}
+		m_resultDrawer->drawTexture(m_bufferTextures[0]);
+	}
+
+	TextureDrawer* CGEImageHandler::getResultDrawer()
+	{
+		if(m_resultDrawer == nullptr)
+			m_resultDrawer = TextureDrawer::create();
+		return m_resultDrawer;
+	}
+
+	void CGEImageHandler::setResultDrawer(TextureDrawer* drawer)
+	{
+		if(m_resultDrawer != nullptr)
+			delete m_resultDrawer;
+		m_resultDrawer = drawer;
+	}
+
+	GLuint CGEImageHandler::copyLastResultTexture(GLuint texID)
+	{
+		if(m_bufferTextures[1] == 0 || m_dstFrameBuffer == 0)
+			return texID;
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+
+		if(texID == 0)
+			texID = cgeGenTextureWithBuffer(nullptr, m_dstImageSize.width, m_dstImageSize.height, GL_RGBA, GL_UNSIGNED_BYTE);
+
+		if(!copyTexture(texID, m_bufferTextures[1]))
+		{
+			useImageFBO();
+			glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[1], 0);
+			glBindTexture(GL_TEXTURE_2D, texID);
+			glFinish();
+			glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0, m_dstImageSize.width, m_dstImageSize.height);
+			glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+		}
+
+		return texID;
+	}
+
+	GLuint CGEImageHandler::copyResultTexture(GLuint texID)
+	{
+		if(m_bufferTextures[1] == 0 || m_dstFrameBuffer == 0)
+			return texID;
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+
+		if(texID == 0)
+			texID = cgeGenTextureWithBuffer(nullptr, m_dstImageSize.width, m_dstImageSize.height, GL_RGBA, GL_UNSIGNED_BYTE);
+
+		if(!copyTexture(texID, m_bufferTextures[0]))
+		{
+			useImageFBO();
+			glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+			glBindTexture(GL_TEXTURE_2D, texID);
+			glFinish();
+			glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0, m_dstImageSize.width, m_dstImageSize.height);
+			glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+		}
+
+		return texID;
+	}
+
+	void CGEImageHandler::addImageFilter(CGEImageFilterInterfaceAbstract* proc)
+	{
+		if(proc == nullptr)
+		{
+			CGE_LOG_ERROR("CGEImageHandler: a null filter is sent. Skipping...\n");
+			return;
+		}
+		if(!proc->isWrapper())
+		{
+			m_vecFilters.push_back(proc);
+			return;
+		}
+		
+		auto&& filters = proc->getFilters(true);
+		for(auto filter : filters)
+		{
+			m_vecFilters.push_back(filter);
+		}
+		delete proc;
+	}
+
+	void CGEImageHandler::clearImageFilters(bool bDelMem)
+	{	
+		if(bDelMem)
+		{
+			CGE_ENABLE_GLOBAL_GLCONTEXT();
+			for(std::vector<CGEImageFilterInterfaceAbstract*>::iterator iter = m_vecFilters.begin();
+				iter != m_vecFilters.end(); ++iter)
+			{
+				delete *iter;
+			}
+		}
+		m_vecFilters.clear();
+	}
+
+	void CGEImageHandler::processingFilters()
+	{
+		if(m_vecFilters.empty() || m_bufferTextures[0] == 0)
+		{
+			CGE_LOG_INFO("No filter or image to handle\n");
+			return;
+		}
+
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		assert(m_vertexArrayBuffer != 0);
+
+        glDisable(GL_BLEND);
+
+        CGE_LOG_CODE(int index = 0;);
+        CGE_LOG_CODE(clock_t total = clock(););
+        for(std::vector<CGEImageFilterInterfaceAbstract*>::iterator iter = m_vecFilters.begin();
+            iter < m_vecFilters.end(); ++iter)
+        {
+            swapBufferFBO();
+            CGE_LOG_CODE(clock_t t = clock();)
+            CGE_LOG_INFO("####Start Processing step %d...\n", ++index);
+			glBindBuffer(GL_ARRAY_BUFFER, m_vertexArrayBuffer);
+            (*iter)->render2Texture(this, m_bufferTextures[1], m_vertexArrayBuffer);
+            glFlush();
+            CGE_LOG_INFO("####Processing step %d finished. Time: %gs .\n", index, float(clock() - t) / CLOCKS_PER_SEC);
+        }
+		glFinish();
+        CGE_LOG_INFO("####Finished Processing All! Total time: %gs \n", float(clock() - total) / CLOCKS_PER_SEC);
+    }
+
+	bool CGEImageHandler::processingWithFilter(GLint index)
+	{
+		if(index == -1)
+			index = (GLint)m_vecFilters.size() - 1;
+		return processingWithFilter(getFilterByIndex(index));
+	}
+
+	bool CGEImageHandler::processingWithFilter(CGEImageFilterInterfaceAbstract* proc)
+	{
+		if(proc == nullptr)
+			return false;
+
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		assert(m_vertexArrayBuffer != 0);
+
+		glDisable(GL_BLEND);
+		glBindBuffer(GL_ARRAY_BUFFER, m_vertexArrayBuffer);
+		swapBufferFBO();
+
+//		CGE_LOG_CODE(clock_t t = clock());
+//		CGE_LOG_INFO("####Start Processing...");
+		proc->render2Texture(this, m_bufferTextures[1], m_vertexArrayBuffer);
+		glFlush();
+//		CGE_LOG_INFO("####Finished Processing! Time: %gs \n", float(clock() - t) / CLOCKS_PER_SEC);
+		return true;
+	}
+
+	void CGEImageHandler::disableReversion()
+	{
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		glDeleteTextures(1, &m_srcTexture);
+		m_srcTexture = 0;
+		m_bRevertEnabled = false;
+		CGE_LOG_INFO("Reversion isdisabled");
+	}
+
+	bool CGEImageHandler::keepCurrentResult()
+	{
+		if(!m_bRevertEnabled ||	m_bufferTextures[0] == 0 || m_dstFrameBuffer == 0)
+			return false;
+
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		useImageFBO();
+
+		glBindTexture(GL_TEXTURE_2D, m_srcTexture);
+        glFinish();
+		glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0, m_dstImageSize.width, m_dstImageSize.height);
+		return true;
+	}
+
+	bool CGEImageHandler::revertToKeptResult(bool bRevert2Target)
+	{
+		if(!m_bRevertEnabled ||	m_bufferTextures[0] == 0 || m_dstFrameBuffer == 0)
+			return false;
+
+		CGE_ENABLE_GLOBAL_GLCONTEXT();
+		useImageFBO();
+
+		if(m_drawer == nullptr)
+		{
+			m_drawer = TextureDrawer::create();
+		}
+
+		if(m_drawer == nullptr)
+		{
+			glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_srcTexture, 0);
+			if(bRevert2Target)
+			{
+				glBindTexture(GL_TEXTURE_2D, m_bufferTextures[1]);
+
+				// glCopyTexSubImage2D 会block cpu
+				glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0, m_dstImageSize.width, m_dstImageSize.height);
+				glFlush();
+			}
+			glBindTexture(GL_TEXTURE_2D, m_bufferTextures[0]);
+			glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0, m_dstImageSize.width, m_dstImageSize.height);
+			glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+		}
+		else
+		{
+			glViewport(0, 0, m_dstImageSize.width, m_dstImageSize.height);
+			if(bRevert2Target)
+			{
+				glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[1], 0);
+				m_drawer->drawTexture(m_srcTexture);
+			}
+
+			glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_bufferTextures[0], 0);
+			m_drawer->drawTexture(m_srcTexture);
+		}
+
+		return true;
+	}
+
+	int CGEImageHandler::getFilterIndexByAddr(const void* addr)
+	{
+		int sz = (int)m_vecFilters.size();
+		for(int i = 0; i != sz; ++i)
+		{
+			if(addr == m_vecFilters[i])
+				return i;
+		}
+		return -1;
+	}
+
+	void CGEImageHandler::peekFilters(std::vector<CGEImageFilterInterfaceAbstract*>* vTrans)
+	{
+		*vTrans = m_vecFilters;
+	}
+
+	void CGEImageHandler::popImageFilter()
+	{
+		if(!m_vecFilters.empty())
+		{
+			CGE_ENABLE_GLOBAL_GLCONTEXT();
+			std::vector<CGEImageFilterInterfaceAbstract*>::iterator iter = m_vecFilters.end()-1;
+			delete *iter;
+			m_vecFilters.erase(iter);
+		} 
+	}
+
+	bool CGEImageHandler::insertFilterAtIndex(CGEImageFilterInterfaceAbstract* proc, GLuint index)
+	{
+		if(index > m_vecFilters.size()) return false;
+		m_vecFilters.insert(m_vecFilters.begin() + index, proc);
+		return true;
+	}
+
+	bool CGEImageHandler::deleteFilterByAddr(const void* addr, bool bDelMem)
+	{
+		if(m_vecFilters.empty())
+			return false;
+
+		for(std::vector<CGEImageFilterInterfaceAbstract*>::iterator iter = m_vecFilters.begin();
+			iter < m_vecFilters.end(); ++iter)
+		{
+			if(*iter == addr)
+			{
+				if(bDelMem)
+				{
+					CGE_ENABLE_GLOBAL_GLCONTEXT();
+					delete *iter;
+				}
+				m_vecFilters.erase(iter);
+				return true;
+			}
+		}
+		return false;
+	}
+
+	bool CGEImageHandler::deleteFilterByIndex(GLuint index, bool bDelMem)
+	{
+		if(index >= m_vecFilters.size())
+			return false;
+		if(bDelMem)
+		{
+			CGE_ENABLE_GLOBAL_GLCONTEXT();
+			delete m_vecFilters[index];
+		}
+		m_vecFilters.erase(m_vecFilters.begin() + index);
+		return true;
+	}
+
+	bool CGEImageHandler::replaceFilterAtIndex(CGEImageFilterInterfaceAbstract* proc, GLuint index, bool bDelMem)
+	{
+		if(index >= m_vecFilters.size())
+			return false;
+		std::vector<CGEImageFilterInterfaceAbstract*>::iterator iter = m_vecFilters.begin() + index;
+		if(bDelMem)
+		{
+			CGE_ENABLE_GLOBAL_GLCONTEXT();
+			delete *iter;
+		}
+		*iter = proc;
+		return true;
+	}
+
+	bool CGEImageHandler::swapFilterByIndex(GLuint left, GLuint right)
+	{
+		if(left == right || left >= m_vecFilters.size() || right >= m_vecFilters.size())
+			return false;
+		std::swap(*(m_vecFilters.begin() + left), *(m_vecFilters.begin() + right));
+		return true;
+	}
+
+}

+ 387 - 0
media/cge_library/src/main/jni/cge/common/cgeShaderFunctions.cpp

@@ -0,0 +1,387 @@
+/*
+* cgeShaderFunctions.cpp
+*
+*  Created on: 2013-12-5
+*      Author: Wang Yang
+*        Mail: admin@wysaid.org
+*/
+
+#include "cgeCommonDefine.h"
+#include "cgeGLFunctions.h"
+#include "cgeShaderFunctions.h"
+
+CGE_UNEXPECTED_ERR_MSG
+(
+ static int sProgramCount = 0;
+ )
+
+namespace CGE
+{
+	ShaderObject::ShaderObject() : m_shaderType(GL_FALSE), m_shaderID(0) {}
+	ShaderObject::~ShaderObject() { clear(); }
+
+	bool ShaderObject::init(GLenum shaderType)
+	{
+		m_shaderType = shaderType;
+		if(m_shaderID == 0)
+			m_shaderID = glCreateShader(m_shaderType);
+		return m_shaderID != 0;
+	}
+
+	void ShaderObject::clear()
+	{
+		if(m_shaderID == 0) return;
+		glDeleteShader(m_shaderID);
+		m_shaderID = 0;
+		m_shaderType = GL_FALSE;
+	}
+
+	bool ShaderObject::loadShaderSourceFromString(const char* shaderString)
+	{
+		if(m_shaderID == 0)
+		{
+			m_shaderID = glCreateShader(m_shaderType);
+			CGE_LOG_CODE(
+			if(m_shaderID == 0) 
+			{
+				CGE_LOG_ERROR("glCreateShader Failed!");
+				return false;
+			})
+		}
+		glShaderSource(m_shaderID, 1, (const GLchar**)&shaderString, nullptr);
+		glCompileShader(m_shaderID);
+		GLint compiled = 0;
+		glGetShaderiv(m_shaderID, GL_COMPILE_STATUS, &compiled);
+
+		if(compiled == GL_TRUE) return true;
+
+		CGE_LOG_CODE(
+		GLint logLen;
+		glGetShaderiv(m_shaderID, GL_INFO_LOG_LENGTH, &logLen);
+		if(logLen > 0)
+		{
+			char *buf = new char[logLen];
+			if(buf != nullptr)
+			{
+				glGetShaderInfoLog(m_shaderID, logLen, &logLen, buf);
+				CGE_LOG_ERROR("Shader %d compile faild: \n%s\n", m_shaderID, buf);
+				delete[] buf;
+			}
+		})
+		return false;
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+
+	UniformParameters::~UniformParameters()
+	{
+		clear();
+	}
+
+	void UniformParameters::pushi(const char* name, GLint x)
+	{
+		UniformData* data = new UniformData(name, uniformINT);
+		data->setValuesi(x);
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::pushi(const char* name, GLint x, GLint y)
+	{
+		UniformData* data = new UniformData(name, uniformINTV2);
+		data->setValuesi(x, y);
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::pushi(const char* name, GLint x, GLint y, GLint z)
+	{
+		UniformData* data = new UniformData(name, uniformINTV3);
+		data->setValuesi(x, y, z);
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::pushi(const char* name, GLint x, GLint y, GLint z, GLint w)
+	{
+		UniformData* data = new UniformData(name, uniformINTV4);
+		data->setValuesi(x, y, z, w);
+		m_vecUniforms.push_back(data);
+	}
+
+
+	void UniformParameters::pushf(const char* name, GLfloat x)
+	{
+		UniformData* data = new UniformData(name, uniformFLOAT);
+		data->setValuesf(x);
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::pushf(const char* name, GLfloat x, GLfloat y)
+	{
+		UniformData* data = new UniformData(name, uniformFLOATV2);
+		data->setValuesf(x, y);
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::pushf(const char* name, GLfloat x, GLfloat y, GLfloat z)
+	{
+		UniformData* data = new UniformData(name, uniformFLOATV3);
+		data->setValuesf(x, y, z);
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::pushf(const char* name, GLfloat x, GLfloat y, GLfloat z, GLfloat w)
+	{
+		UniformData* data = new UniformData(name, uniformFLOATV4);
+		data->setValuesf(x, y, z, w);
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::pushSampler1D(const char* name, GLuint* textureID, GLint textureBindID)
+	{
+		UniformData* data = new UniformData(name, uniformSAMPLER1D);
+		data->uniformValue[0].valueuPtr = textureID;
+		data->uniformValue[1].valuei = textureBindID;
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::pushSampler2D(const char* name, GLuint* textureID, GLint textureBindID)
+	{
+		UniformData* data = new UniformData(name, uniformSAMPLER2D);
+		data->uniformValue[0].valueuPtr = textureID;
+		data->uniformValue[1].valuei = textureBindID;
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::requireStepsFactor(const char* name)
+	{
+		UniformData* data = new UniformData(name, uniformStepsFactor);
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::requireRatioAspect(const char* name, GLfloat texAspectRatio)
+	{
+		UniformData* data = new UniformData(name, uniformRatioAspect);
+		data->uniformValue[0].valuef = texAspectRatio;
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::requireStepsRatio(const char* name, GLfloat texAspectRatio)
+	{
+		UniformData* data = new UniformData(name, uniformStepsRatio);
+		data->uniformValue[0].valuef = texAspectRatio;
+		m_vecUniforms.push_back(data);
+	}
+
+	void UniformParameters::clear()
+	{
+		for(std::vector<UniformData*>::iterator iter = m_vecUniforms.begin();
+			iter != m_vecUniforms.end(); ++iter)
+			delete *iter;
+		m_vecUniforms.clear();
+	}
+
+	void UniformParameters::assignUniforms(CGEImageHandlerInterface* hander, GLuint program)
+	{
+		for(std::vector<UniformData*>::iterator iter = m_vecUniforms.begin();
+			iter != m_vecUniforms.end(); ++iter)
+		{
+			GLint uniformID = glGetUniformLocation(program, (*iter)->uniformName);
+			if(uniformID < 0)
+			{
+				CGE_LOG_ERROR("Uniform name %s does not exist!\n", (*iter)->uniformName);
+				return ;
+			}
+			switch ((*iter)->uniformType)
+			{
+			case uniformINT:
+				glUniform1i(uniformID, (*iter)->uniformValue[0].valuei);
+				break;
+			case uniformINTV2:
+				glUniform2i(uniformID, (*iter)->uniformValue[0].valuei, (*iter)->uniformValue[1].valuei);
+				break;
+			case uniformINTV3:
+				glUniform3i(uniformID, (*iter)->uniformValue[0].valuei, (*iter)->uniformValue[1].valuei, (*iter)->uniformValue[3].valuei);
+				break;
+			case uniformINTV4:
+				glUniform4i(uniformID, (*iter)->uniformValue[0].valuei, (*iter)->uniformValue[1].valuei, (*iter)->uniformValue[2].valuei, (*iter)->uniformValue[3].valuei);
+				break;
+			case uniformFLOAT:
+				glUniform1f(uniformID, (*iter)->uniformValue[0].valuef);
+				break;
+			case uniformFLOATV2:
+				glUniform2f(uniformID, (*iter)->uniformValue[0].valuef, (*iter)->uniformValue[1].valuef);
+				break;
+			case uniformFLOATV3:
+				glUniform3f(uniformID, (*iter)->uniformValue[0].valuef, (*iter)->uniformValue[1].valuef, (*iter)->uniformValue[2].valuef);
+				break;
+			case uniformFLOATV4:
+				glUniform4f(uniformID, (*iter)->uniformValue[0].valuef, (*iter)->uniformValue[1].valuef, (*iter)->uniformValue[2].valuef, (*iter)->uniformValue[3].valuef);
+				break;
+			case uniformSAMPLER1D:
+				{
+#ifdef GL_TEXTURE_1D
+					int texutreBindID = CGE_TEXTURE_START + (*iter)->uniformValue[1].valuei;
+					glActiveTexture(texutreBindID);
+					glBindTexture(GL_TEXTURE_1D, *(*iter)->uniformValue[0].valueuPtr);
+					glUniform1i(uniformID, texutreBindID - GL_TEXTURE0);
+#endif
+				}
+				break;
+			case uniformSAMPLER2D:
+				{
+					int texutreBindID = CGE_TEXTURE_START + (*iter)->uniformValue[1].valuei;
+					glActiveTexture(texutreBindID);
+					glBindTexture(GL_TEXTURE_2D, *(*iter)->uniformValue[0].valueuPtr);
+					glUniform1i(uniformID, texutreBindID - GL_TEXTURE0);
+				}
+				break;
+			case uniformStepsFactor:
+				{
+					CGESizei sz = hander->getOutputFBOSize();
+					glUniform2f(uniformID, 1.0f / sz.width, 1.0f / sz.height);
+				}
+				break;
+			case uniformRatioAspect:
+				{
+					CGESizei sz = hander->getOutputFBOSize();
+					GLfloat x, y, z, w;
+					GLfloat asSrc = (GLfloat)sz.width / sz.height;
+					GLfloat asTex = (*iter)->uniformValue[0].valuef;
+					if(asSrc > asTex)
+					{
+						x = 1.0f;
+						y = asTex / asSrc;
+						z = 0.0f;
+						w = (1.0f - y) / 2.0f;
+					}
+					else
+					{
+						x = asSrc / asTex;
+						y = 1.0;
+						z = (1.0f - x) / 2.0f;
+						w = 0.0f;
+					}
+					glUniform4f(uniformID, x, y, z, w);
+				}
+				break;
+			case uniformStepsRatio:
+				{
+					CGESizei sz = hander->getOutputFBOSize();
+					glUniform1f(uniformID, float(sz.width) / sz.height / (*iter)->uniformValue[0].valuef);
+				}
+				break;
+			default:
+				CGE_LOG_ERROR("UniformParameters::assignUniforms: Uniform Type Not Supported!");
+				break;
+			}
+		}
+	}
+
+	UniformParameters::UniformData* UniformParameters::getDataPointerByName(const char* name)
+	{
+		for(std::vector<UniformData*>::iterator iter = m_vecUniforms.begin();
+			iter != m_vecUniforms.end(); ++iter)
+		{
+			if(strcmp((*iter)->uniformName, name) == 0)
+			{
+				return *iter;
+			}
+		}
+		return nullptr;
+	}
+
+	ProgramObject::ProgramObject()
+	{
+		m_programID = glCreateProgram();
+        
+        CGE_UNEXPECTED_ERR_MSG
+        (
+         CGE_LOG_KEEP("ProgramObject create, total: %d\n", ++sProgramCount);
+         )
+	}
+
+	ProgramObject::~ProgramObject()
+	{
+        CGE_UNEXPECTED_ERR_MSG
+        (
+         CGE_LOG_KEEP("ProgramObject release, remain: %d\n", --sProgramCount);
+         )
+        
+		if(m_programID == 0)
+			return;
+		GLuint attachedShaders[32];
+		int numAttachedShaders = 0;
+		glGetAttachedShaders(m_programID, 32, &numAttachedShaders, attachedShaders);
+		for(int i = 0; i < numAttachedShaders; ++i)
+		{
+			glDetachShader(m_programID, attachedShaders[i]);
+		}
+		glDeleteProgram(m_programID);
+	}
+
+	bool ProgramObject::initFragmentShaderSourceFromString(const char* fragShader)
+	{
+		return m_fragObj.init(GL_FRAGMENT_SHADER) && m_fragObj.loadShaderSourceFromString(fragShader);
+	}
+
+	bool ProgramObject::initVertexShaderSourceFromString(const char* vertShader)
+	{
+		return m_vertObj.init(GL_VERTEX_SHADER) && m_vertObj.loadShaderSourceFromString(vertShader);
+	}
+
+	bool ProgramObject::initWithShaderStrings(const char* vsh, const char* fsh)
+	{
+		return initVertexShaderSourceFromString(vsh) && initFragmentShaderSourceFromString(fsh) && link();
+	}
+
+	bool ProgramObject::linkWithShaderObject(ShaderObject& vertObj, ShaderObject& fragObj, bool shouldClear)
+	{
+		if(m_programID != 0)
+		{
+            GLuint attachedShaders[32] = {0};
+			int numAttachedShaders = 0;
+			glGetAttachedShaders(m_programID, 32, &numAttachedShaders, attachedShaders);
+			for(int i = 0; i < numAttachedShaders; ++i)
+			{
+				glDetachShader(m_programID, attachedShaders[i]);
+			}
+			cgeCheckGLError("Detach Shaders in useProgram");
+		}
+		else
+		{
+			m_programID = glCreateProgram();
+		}
+		GLint programStatus;
+		glAttachShader(m_programID, vertObj.shaderID());
+		glAttachShader(m_programID, fragObj.shaderID());
+		cgeCheckGLError("Attach Shaders in useProgram");
+		glLinkProgram(m_programID);
+		glGetProgramiv(m_programID, GL_LINK_STATUS, &programStatus);
+
+		if(shouldClear)
+		{
+			m_vertObj.clear();
+			m_fragObj.clear();
+		}
+
+		if(programStatus != GL_TRUE)
+		{
+			GLint logLen = 0;
+			glGetProgramiv(m_programID, GL_INFO_LOG_LENGTH, &logLen);
+			if(logLen != 0)
+			{
+				char *buf = new char[logLen];
+				if(buf != nullptr)
+				{
+					glGetProgramInfoLog(m_programID, logLen, &logLen, buf);
+					CGE_LOG_ERROR("Failed to link the program!\n%s", buf);
+					delete[] buf;
+				}
+			}
+			CGE_LOG_ERROR("LINK %d Failed\n", m_programID);
+			return false;
+		}
+		cgeCheckGLError("Link Program");
+		return true;
+	}
+
+}

+ 868 - 0
media/cge_library/src/main/jni/cge/common/cgeTextureUtils.cpp

@@ -0,0 +1,868 @@
+/*
+* cgeTextureUtils.cpp
+*
+*  Created on: 2015-7-29
+*      Author: Wang Yang
+*/
+
+//#include <GLES2/gl2.h>
+#include <cmath>
+#include "cgeTextureUtils.h"
+#include "cgeGlobal.h"
+#include "cgeMat.h"
+
+static CGEConstString s_vsh = CGE_SHADER_STRING(
+attribute vec2 vPosition;
+varying vec2 texCoord;
+uniform mat2 rotation;
+uniform vec2 flipScale;
+void main()
+{
+   gl_Position = vec4(vPosition, 0.0, 1.0);
+   texCoord = flipScale * (vPosition / 2.0 * rotation) + 0.5;
+});
+
+static CGEConstString s_fsh = CGE_SHADER_STRING_PRECISION_M(
+varying vec2 texCoord;
+uniform sampler2D inputImageTexture;
+void main()
+{
+   gl_FragColor = texture2D(inputImageTexture, texCoord);
+	// gl_FragColor = vec4(texCoord, 0.0, 1.0);
+});
+
+static CGEConstString s_vshMask = CGE_SHADER_STRING(
+attribute vec2 vPosition;
+varying vec2 texCoord;
+varying vec2 maskCoord;
+
+uniform mat2 texRotation;
+uniform vec2 texFlipScale;
+
+uniform mat2 maskRotation;
+uniform vec2 maskFlipScale;
+
+void main()
+{
+   gl_Position = vec4(vPosition, 0.0, 1.0);
+   texCoord = texFlipScale * (vPosition / 2.0 * texRotation) + 0.5;
+   //如果mask的大小与source 不一致, 需要进行flipscale 达到一致
+   maskCoord = maskFlipScale * (vPosition / 2.0 * maskRotation) + 0.5;
+});
+
+
+static CGEConstString s_fshMask = CGE_SHADER_STRING_PRECISION_M(
+varying vec2 texCoord;
+varying vec2 maskCoord;
+uniform sampler2D inputImageTexture;
+uniform sampler2D maskTexture;
+void main()
+{
+   gl_FragColor = texture2D(inputImageTexture, texCoord);
+   
+   //mask一般为单通道
+
+   //不预乘
+   gl_FragColor *= texture2D(maskTexture, maskCoord);
+
+   // 预乘
+   // vec4 maskColor = texture2D(maskTexture, maskCoord);
+   // maskColor.rgb *= maskColor.a;
+   // gl_FragColor *= maskColor;
+});
+
+#ifdef GL_TEXTURE_EXTERNAL_OES
+
+static CGEConstString s_vshExternal_OES = CGE_SHADER_STRING(
+attribute vec2 vPosition;
+varying vec2 texCoord;
+uniform mat4 transform;
+uniform mat2 rotation;
+uniform vec2 flipScale;
+void main()
+{
+   gl_Position = vec4(vPosition, 0.0, 1.0);
+   vec2 coord = flipScale * (vPosition / 2.0 * rotation) + 0.5;
+   texCoord = (transform * vec4(coord, 0.0, 1.0)).xy;
+});
+
+static CGEConstString s_fshExternal_OES =
+"#extension GL_OES_EGL_image_external : require\n"
+CGE_SHADER_STRING_PRECISION_M(
+varying vec2 texCoord;
+uniform samplerExternalOES inputImageTexture;
+void main()
+{
+   gl_FragColor = texture2D(inputImageTexture, texCoord);
+});
+
+#endif
+
+CGEConstString s_fshYUVConvert = CGE_SHADER_STRING_PRECISION_M
+(
+ varying vec2 texCoord;
+ uniform sampler2D luminanceTexture;
+ uniform sampler2D chrominanceTexture;
+ void main()
+ {
+     vec3 yuv;
+     vec3 rgb;
+     yuv.x = texture2D(luminanceTexture, texCoord).r;
+     yuv.yz = texture2D(chrominanceTexture, texCoord).rg - vec2(0.5, 0.5);
+     
+     rgb = mat3(1.0, 1.0, 1.0,
+                0.0, -0.18732, 1.8556,
+                1.57481, -0.46813, 0.0) * yuv;
+     
+     gl_FragColor = vec4(rgb, 1.0);
+ }
+ );
+
+static CGEConstString s_vshRGB2YUV = CGE_SHADER_STRING(
+attribute vec2 vPosition;
+varying vec2 texCoord;
+uniform mat2 rotation;
+uniform vec2 flipScale;
+void main()
+{
+   gl_Position = vec4(vPosition, 0.0, 1.0);
+   gl_Position.y = (gl_Position.y + 1.0) * 8.0 / 3.0 - 1.0;
+   texCoord = flipScale * (vPosition / 2.0 * rotation) + 0.5;
+});
+
+// The original code came from the Internet, and it's optimized by WY here.
+CGEConstString s_fshRGB2YUV = CGE_SHADER_STRING_PRECISION_H
+(
+varying vec2 texCoord;
+uniform sampler2D rgbTexture;
+uniform vec2 imageSize;
+
+const vec3 ycoeff = vec3(0.21260134, 0.71520028, 0.07219838);
+const vec3 ucoeff = vec3(-0.11457283, -0.38542805, 0.5);
+const vec3 vcoeff = vec3(0.5, -0.4541502, -0.04584577);
+ 
+const vec2 yScale = vec2(4.0, 4.0);
+const vec2 uvScale = vec2(8.0, 8.0);
+ 
+void main(void)
+{
+    float uvlines = 0.0625 * imageSize.y,
+    uvlinesI = floor(uvlines), coordOffset, posStep;
+    
+    vec2 imageStep = 1.0 / imageSize,
+    uvPosOffset = vec2(uvlines - uvlinesI, uvlinesI * imageStep.y),
+    uMaxPos = uvPosOffset + vec2(0, 0.25),
+    vMaxPos = uvPosOffset + uMaxPos, basePos, samplingPos;
+
+    vec4 dstColor;
+    vec3 coeff;
+
+    if(texCoord.y < 0.25)
+    {
+        basePos = texCoord * yScale * imageSize;
+        float addY = floor(basePos.x * imageStep.x);
+        basePos.x -= addY * imageSize.x;
+        basePos.y += addY;
+        
+        coeff = ycoeff;
+        samplingPos = basePos * imageStep;
+        coordOffset = 0.0;
+        posStep = 1.0;
+    }
+    else if(texCoord.y < uMaxPos.y || (texCoord.y == uMaxPos.y && texCoord.x < uMaxPos.x))
+    {
+        basePos = vec2(texCoord.x, texCoord.y - 0.25) * uvScale * imageSize;
+        float addY = floor(basePos.x * imageStep.x);
+        basePos.x -= addY * imageSize.x;
+        basePos.y += addY;
+        basePos.y *= 2.0;
+        basePos -= clamp(uvScale * 0.5 - 2.0, vec2(0.0), uvScale);
+        basePos.y -= 2.0;
+        
+        coeff = ucoeff;
+        samplingPos = basePos * imageStep;
+        coordOffset = 0.5;
+        posStep = 2.0;
+    }
+    else if(texCoord.y < vMaxPos.y || (texCoord.y == vMaxPos.y && texCoord.x < vMaxPos.x))
+    {
+        
+        vec2 basePos = (texCoord - uMaxPos) * uvScale * imageSize;
+        float addY = floor(basePos.x * imageStep.x);
+        basePos.x -= addY * imageSize.x;
+        basePos.y += addY;
+        basePos.y *= 2.0;
+        basePos -= clamp(uvScale * 0.5 - 2.0, vec2(0.0), uvScale);
+        basePos.y -= 2.0;
+        
+        coeff = vcoeff;
+        samplingPos = basePos * imageStep;
+        coordOffset = 0.5;
+        posStep = 2.0;
+    }
+    
+    dstColor.r = dot(texture2D(rgbTexture, samplingPos).rgb, coeff);
+    dstColor.r += coordOffset;
+    
+    samplingPos.x += posStep * imageStep.x;
+    dstColor.g = dot(texture2D(rgbTexture, samplingPos).rgb, coeff);
+    dstColor.g += coordOffset;
+    
+    samplingPos.x += posStep * imageStep.x;
+    dstColor.b = dot(texture2D(rgbTexture, samplingPos).rgb, coeff);
+    dstColor.b += coordOffset;
+    
+    samplingPos.x += posStep * imageStep.x;
+    dstColor.a = dot(texture2D(rgbTexture, samplingPos).rgb, coeff);
+    dstColor.a += coordOffset;
+    
+    gl_FragColor = dstColor%s;
+}
+);
+
+CGEConstString s_fshRGB2NV21 = CGE_SHADER_STRING_PRECISION_H
+(
+ varying vec2 texCoord;
+ uniform sampler2D rgbTexture;
+ uniform vec2 imageSize;
+ 
+ const vec3 ycoeff = vec3(0.21260134, 0.71520028, 0.07219838);
+ const vec3 ucoeff = vec3(-0.11457283, -0.38542805, 0.5);
+ const vec3 vcoeff = vec3(0.5, -0.4541502, -0.04584577);
+ 
+ const vec2 yScale = vec2(4.0, 4.0);
+ const vec2 uvScale = vec2(4.0, 8.0);
+ 
+ void main(void)
+{
+    float uvlines = 0.0625*imageSize.y,
+    uvlinesI = floor(uvlines);
+    
+    vec2 uvPosOffset = vec2(uvlines-uvlinesI,uvlinesI/imageSize.y),
+    imageStep = 1.0 / imageSize;
+    
+    vec4 dstColor;
+    
+    if(texCoord.y < 0.25)
+    {
+        vec2 basePos = texCoord * yScale * imageSize;
+        
+        float addY = floor(basePos.x * imageStep.x);
+        
+        basePos.x -= addY * imageSize.x;
+        basePos.y += addY;
+        
+        basePos *= imageStep;
+        
+        float move = 1.0 * imageStep.x;
+        
+        dstColor.x = dot(texture2D(rgbTexture, basePos).rgb, ycoeff);
+        
+        basePos.x += move;
+        dstColor.y = dot(texture2D(rgbTexture, basePos).rgb, ycoeff);
+        
+        basePos.x += move;
+        dstColor.z = dot(texture2D(rgbTexture, basePos).rgb, ycoeff);
+        
+        basePos.x += move;
+        dstColor.w = dot(texture2D(rgbTexture, basePos).rgb, ycoeff);
+
+    }
+    else
+    {
+        vec2 basePos = (texCoord - 0.25) * uvScale * imageSize;
+        
+        float addY = floor(basePos.x * imageStep.x);
+        
+        basePos.x -= addY * imageSize.x;
+        basePos.y += addY * 2.0;
+        
+        
+        basePos *= imageStep;
+        basePos += 0.5 * imageStep.x;
+        
+        vec3 uvColor = texture2D(rgbTexture, basePos).rgb;
+        
+        dstColor.x = dot(uvColor, vcoeff);
+        dstColor.y = dot(uvColor, ucoeff);
+        
+        basePos.x += 2.0 * imageStep.x;
+        
+        uvColor = texture2D(rgbTexture, basePos).rgb;
+        
+        dstColor.z = dot(uvColor, vcoeff);
+        dstColor.w = dot(uvColor, ucoeff);
+        
+        dstColor += 0.5;
+    }
+    
+    gl_FragColor = dstColor%s;
+});
+
+CGEConstString s_fshRGB2NV12 = CGE_SHADER_STRING_PRECISION_H
+(
+ varying vec2 texCoord;
+ uniform sampler2D rgbTexture;
+ uniform vec2 imageSize;
+ 
+ const vec3 ycoeff = vec3(0.21260134, 0.71520028, 0.07219838);
+ const vec3 ucoeff = vec3(-0.11457283, -0.38542805, 0.5);
+ const vec3 vcoeff = vec3(0.5, -0.4541502, -0.04584577);
+ 
+ const vec2 yScale = vec2(4.0, 4.0);
+ const vec2 uvScale = vec2(4.0, 8.0);
+ 
+ void main(void)
+{
+    float uvlines = 0.0625*imageSize.y,
+    uvlinesI = floor(uvlines);
+    
+    vec2 uvPosOffset = vec2(uvlines-uvlinesI,uvlinesI/imageSize.y),
+    imageStep = 1.0 / imageSize;
+    
+    vec4 dstColor;
+    
+    if(texCoord.y < 0.25)
+    {
+        vec2 basePos = texCoord * yScale * imageSize;
+        
+        float addY = floor(basePos.x * imageStep.x);
+        
+        basePos.x -= addY * imageSize.x;
+        basePos.y += addY;
+        
+        basePos *= imageStep;
+        
+        float move = 1.0 * imageStep.x;
+        
+        dstColor.x = dot(texture2D(rgbTexture, basePos).rgb, ycoeff);
+        
+        basePos.x += move;
+        dstColor.y = dot(texture2D(rgbTexture, basePos).rgb, ycoeff);
+        
+        basePos.x += move;
+        dstColor.z = dot(texture2D(rgbTexture, basePos).rgb, ycoeff);
+        
+        basePos.x += move;
+        dstColor.w = dot(texture2D(rgbTexture, basePos).rgb, ycoeff);
+    }
+    else
+    {
+        vec2 basePos = (texCoord - 0.25) * uvScale * imageSize;
+        
+        float addY = floor(basePos.x * imageStep.x);
+        
+        basePos.x -= addY * imageSize.x;
+        basePos.y += addY * 2.0;
+        
+        
+        basePos *= imageStep;
+        basePos += 0.5 * imageStep.x;
+        
+        vec3 uvColor = texture2D(rgbTexture, basePos).rgb;
+        
+        dstColor.y = dot(uvColor, vcoeff);
+        dstColor.x = dot(uvColor, ucoeff);
+        
+        basePos.x += 2.0 * imageStep.x;
+        
+        uvColor = texture2D(rgbTexture, basePos).rgb;
+        
+        dstColor.w = dot(uvColor, vcoeff);
+        dstColor.z = dot(uvColor, ucoeff);
+        
+        dstColor += 0.5;
+    }
+    
+    gl_FragColor = dstColor%s;
+});
+
+namespace CGE
+{
+
+	CGEConstString TextureDrawer::getFragmentShaderString()
+	{
+		return s_fsh;
+	}
+
+	CGEConstString TextureDrawer::getVertexShaderString()
+	{
+		return s_vsh;
+	}
+    
+    TextureDrawer::~TextureDrawer()
+    {
+        glDeleteBuffers(1, &m_vertBuffer);
+    }
+
+	bool TextureDrawer::init()
+	{
+        return initWithShaderString(getVertexShaderString(), getFragmentShaderString());
+	}
+    
+    bool TextureDrawer::initWithShaderString(CGEConstString vsh, CGEConstString fsh)
+    {
+        glGenBuffers(1, &m_vertBuffer);
+        if(m_vertBuffer == 0)
+            return false;
+        glBindBuffer(GL_ARRAY_BUFFER, m_vertBuffer);
+        glBufferData(GL_ARRAY_BUFFER, sizeof(CGEGlobalConfig::sVertexDataCommon), CGEGlobalConfig::sVertexDataCommon, GL_STATIC_DRAW);
+        
+        m_program.bindAttribLocation("vPosition", 0);
+        if(!m_program.initWithShaderStrings(vsh, fsh))
+        {
+            return false;
+        }
+        
+        m_program.bind();
+        m_rotLoc = m_program.uniformLocation("rotation");
+        m_flipScaleLoc = m_program.uniformLocation("flipScale");
+        if(m_rotLoc < 0 || m_flipScaleLoc < 0)
+        {
+            CGE_LOG_ERROR("TextureDrawer program init error...");
+        }
+        setRotation(0.0f);
+        setFlipScale(1.0f, 1.0f);
+        return true;
+    }
+
+	void TextureDrawer::drawTexture(GLuint src)
+	{
+		glActiveTexture(GL_TEXTURE0);
+		glBindTexture(GL_TEXTURE_2D, src);
+
+		glBindBuffer(GL_ARRAY_BUFFER, m_vertBuffer);
+		glEnableVertexAttribArray(0);
+        glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0);
+
+        m_program.bind();
+        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+	}
+
+	void TextureDrawer::setRotation(float rad)
+	{
+		_rotate(m_rotLoc, rad);
+	}
+
+	void TextureDrawer::setFlipScale(float x, float y)
+	{
+		m_program.bind();
+		glUniform2f(m_flipScaleLoc, x, y);
+	}
+
+	void TextureDrawer::bindVertexBuffer()
+	{
+		glBindBuffer(GL_ARRAY_BUFFER, m_vertBuffer);
+	}
+
+	void TextureDrawer::_rotate(GLint location, float rad)
+	{
+		float cosRad = cosf(rad);
+		float sinRad = sinf(rad);
+		float mat2[] = {
+			cosRad, sinRad,
+			-sinRad, cosRad
+		};
+
+		m_program.bind();
+		glUniformMatrix2fv(location, 1, GL_FALSE, mat2);
+	}
+
+	///////////////////////////////////////////////////////////////
+    
+    TextureDrawerExt::~TextureDrawerExt()
+    {
+        
+    }
+
+	void TextureDrawerExt::drawTexture2Texture(GLuint src, GLuint dst)
+	{
+		assert(src != 0 && dst != 0);
+		m_framebuffer.bindTexture2D(dst);
+		TextureDrawer::drawTexture(src);
+	}
+
+	void TextureDrawerExt::bindTextureDst(GLuint dst, int width, int height)
+	{
+		m_framebuffer.bindTexture2D(dst);
+		m_texSize.set(width, height);
+	}
+
+
+	void TextureDrawerExt::drawTexture2Buffer(GLuint src, bool fullSize)
+	{
+		m_framebuffer.bind();
+		if(fullSize)
+			glViewport(0, 0, m_texSize.width, m_texSize.height);
+		TextureDrawer::drawTexture(src);
+	}	
+
+	///////////////////////////////////////////////////////////////
+    
+    TextureDrawerWithMask::~TextureDrawerWithMask()
+    {
+        glDeleteTextures(1, &m_maskTexture);
+    }
+
+	bool TextureDrawerWithMask::init()
+	{
+		glGenBuffers(1, &m_vertBuffer);
+		if(m_vertBuffer == 0)
+			return false;
+		glBindBuffer(GL_ARRAY_BUFFER, m_vertBuffer);
+		glBufferData(GL_ARRAY_BUFFER, sizeof(CGEGlobalConfig::sVertexDataCommon), CGEGlobalConfig::sVertexDataCommon, GL_STATIC_DRAW);
+
+//		CGE_LOG_INFO("TextureDrawer- program id: %d", m_program.programID());
+
+		m_program.bindAttribLocation("vPosition", 0);
+		if(!m_program.initWithShaderStrings(s_vshMask, s_fshMask))
+		{
+			return false;
+		}
+
+		m_program.bind();
+		m_rotLoc = m_program.uniformLocation("texRotation");
+		m_flipScaleLoc = m_program.uniformLocation("texFlipScale");
+
+		m_maskRotLoc = m_program.uniformLocation("maskRotation");
+		m_maskFlipScaleLoc = m_program.uniformLocation("maskFlipScale");
+
+		m_program.sendUniformi("inputImageTexture", 0);
+		m_program.sendUniformi("maskTexture", 1);
+
+		if(m_rotLoc < 0 || m_flipScaleLoc < 0 ||
+			m_maskRotLoc < 0 || m_maskFlipScaleLoc < 0)
+		{
+			CGE_LOG_ERROR("TextureDrawer program init error...");
+		}
+		setRotation(0.0f);
+		setFlipScale(1.0f, 1.0f);
+		setMaskRotation(0.0f);
+		setMaskFlipScale(1.0, 1.0f);
+		m_maskTexture = 0;
+		return true;
+	}
+
+	void TextureDrawerWithMask::setMaskRotation(float rad)
+	{
+		_rotate(m_maskRotLoc, rad);
+	}
+
+	void TextureDrawerWithMask::setMaskFlipScale(float x, float y)
+	{
+		m_program.bind();
+		glUniform2f(m_maskFlipScaleLoc, x, y);
+	}
+
+	void TextureDrawerWithMask::drawTexture(GLuint src)
+	{
+		glActiveTexture(GL_TEXTURE1);
+		glBindTexture(GL_TEXTURE_2D, m_maskTexture);
+
+		TextureDrawer::drawTexture(src);
+	}
+
+	void TextureDrawerWithMask::setMaskTexture(GLuint maskTexture)
+	{
+		if(maskTexture == m_maskTexture)
+			return;
+
+		glDeleteTextures(1, &m_maskTexture);
+		m_maskTexture = maskTexture;
+	}
+
+///////////////////////////////////////////////////////////////
+#ifdef GL_TEXTURE_EXTERNAL_OES
+
+	CGEConstString TextureDrawer4ExtOES::getFragmentShaderString()
+	{
+		return s_fshExternal_OES;
+	}
+	CGEConstString TextureDrawer4ExtOES::getVertexShaderString()
+	{
+		return s_vshExternal_OES;
+	}
+
+	bool TextureDrawer4ExtOES::init()
+	{
+		TextureDrawer::init();
+		m_program.bind();
+		m_transformLoc = m_program.uniformLocation("transform");
+		CGE::Mat4 mat4 = CGE::Mat4::makeIdentity();
+		setTransform(mat4.data[0]);
+		return true;
+	}
+
+	void TextureDrawer4ExtOES::setTransform(float* mat16)
+	{
+		m_program.bind();
+		glUniformMatrix4fv(m_transformLoc, 1, GL_FALSE, mat16);
+	}
+
+	void TextureDrawer4ExtOES::drawTexture(GLuint src)
+	{
+		glActiveTexture(GL_TEXTURE0);
+		glBindTexture(GL_TEXTURE_EXTERNAL_OES, src);
+
+		glBindBuffer(GL_ARRAY_BUFFER, m_vertBuffer);
+		glEnableVertexAttribArray(0);
+        glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0);
+
+        m_program.bind();
+        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+	}
+#endif
+    
+    //////////////////////////////////////////////////////
+    
+    bool TextureDrawerYUV::init()
+    {
+        if(!TextureDrawer::init())
+            return false;
+        
+        m_program.bind();
+        m_program.sendUniformi("luminanceTexture", 0);
+        m_program.sendUniformi("chrominanceTexture", 1);
+        return true;
+    }
+    
+    void TextureDrawerYUV::drawTextures(GLuint lumaTex, GLuint chromaTex)
+    {
+        glActiveTexture(GL_TEXTURE0);
+        glBindTexture(GL_TEXTURE_2D, lumaTex);
+        glActiveTexture(GL_TEXTURE1);
+        glBindTexture(GL_TEXTURE_2D, chromaTex);
+        drawTextures();
+    }
+    
+    void TextureDrawerYUV::drawTextures()
+    {
+        m_program.bind();
+        glBindBuffer(GL_ARRAY_BUFFER, m_vertBuffer);
+        glEnableVertexAttribArray(0);
+        glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0);
+        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+    }
+    
+    CGEConstString TextureDrawerYUV::getFragmentShaderString()
+    {
+        return s_fshYUVConvert;
+    }
+    
+    CGEConstString TextureDrawerYUV::getVertexShaderString()
+    {
+        return s_vsh;
+    }
+    
+    //--------------------TextureDrawerRGB2YUV420P------------------------
+    
+    CGEConstString TextureDrawerRGB2YUV420P::getVertexShaderString()
+    {
+        return s_vshRGB2YUV;
+    }
+    
+    CGEConstString TextureDrawerRGB2YUV420P::getFragmentShaderString()
+    {
+        return s_fshRGB2YUV;
+    }
+    
+    void TextureDrawerRGB2YUV420P::setOutputSize(int width, int height)
+    {
+    	m_program.bind();
+        m_program.sendUniformf("imageSize", width, height);
+    }
+
+    bool TextureDrawerRGB2YUV420P::initWithOutputFormat(CGETextureDrawerOutputFormat format)
+    {
+        CGEConstString fsh = getFragmentShaderString();
+        std::vector<char> vecData(strlen(fsh) + 256);
+        vecData[0] = '\0';
+        
+        sprintf(vecData.data(), fsh, format == CGETextureDrawerOutputFormat_Default ? "" : ".bgra");
+        
+        if(!TextureDrawer::initWithShaderString(getVertexShaderString(), vecData.data()))
+            return false;
+        
+        return true;
+    }
+    
+    //--------------------TextureDrawerRGB2NV21------------------------
+    
+    CGEConstString TextureDrawerRGB2NV21::getFragmentShaderString()
+    {
+        return s_fshRGB2NV21;
+    }
+    
+    //--------------------TextureDrawerRGB2NV12------------------------
+    
+    CGEConstString TextureDrawerRGB2NV12::getFragmentShaderString()
+    {
+        return s_fshRGB2NV12;
+    }
+    
+    //////////////////////////////////////
+    
+    CGELerpBlurUtil::CGELerpBlurUtil()
+    {
+        memset(m_texCache, 0, sizeof(MAX_LERP_BLUR_INTENSITY));
+        m_intensity = 0;
+        m_vertBuffer = 0;
+    }
+    
+    CGELerpBlurUtil::~CGELerpBlurUtil()
+    {
+        _clearMipmaps();
+        if(m_vertBuffer != 0)
+            glDeleteBuffers(1, &m_vertBuffer);
+    }
+    
+    bool CGELerpBlurUtil::init()
+    {
+        memset(m_texCache, 0, sizeof(MAX_LERP_BLUR_INTENSITY));
+        m_intensity = MAX_LERP_BLUR_INTENSITY;
+        m_program.bindAttribLocation(CGEImageFilterInterface::paramPositionIndexName, 0);
+        if(m_program.initWithShaderStrings(g_vshDefaultWithoutTexCoord, g_fshDefault))
+        {
+            m_isBaseChanged = true;
+            
+            m_vertBuffer = cgeGenCommonQuadArrayBuffer();
+            return true;
+        }
+        return false;
+    }
+    
+    void CGELerpBlurUtil::setBlurLevel(int value)
+    {
+        m_intensity = value;
+        if(m_intensity > MAX_LERP_BLUR_INTENSITY)
+            m_intensity = MAX_LERP_BLUR_INTENSITY;
+    }
+    
+    void CGELerpBlurUtil::_genMipmaps(int width, int height)
+    {
+        _clearMipmaps();
+        GLuint texIDs[MAX_LERP_BLUR_INTENSITY];
+        glGenTextures(MAX_LERP_BLUR_INTENSITY, texIDs);
+        
+        for(int i = 0; i != MAX_LERP_BLUR_INTENSITY; ++i)
+        {
+            CGESizei sz(_calcLevel(width, i), _calcLevel(height, i));
+            if(sz.width < 1)
+                sz.width = 1;
+            if(sz.height < 1)
+                sz.height = 1;
+            glBindTexture(GL_TEXTURE_2D, texIDs[i]);
+            glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sz.width, sz.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
+            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+            m_texCache[i].texID = texIDs[i];
+            m_texCache[i].size = sz;
+        }
+    }
+    
+    void CGELerpBlurUtil::calcWithTexture(GLuint texture, int width, int height, GLuint target, int targetWidth, int targetHeight)
+    {
+        m_program.bind();
+        glBindBuffer(GL_ARRAY_BUFFER, m_vertBuffer);
+        glEnableVertexAttribArray(0);
+        glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+        glActiveTexture(GL_TEXTURE0);
+        
+        glDisable(GL_BLEND);
+        
+        if(m_texCache[0].texID == 0 || m_cacheTargetWidth != width || m_cacheTargetHeight != height || m_isBaseChanged)
+        {
+            m_cacheTargetWidth = width;
+            m_cacheTargetHeight = height;
+            if(m_texCache[0].texID == 0)
+                _genMipmaps(width, height);
+            m_isBaseChanged = false;
+        }
+        
+        m_framebuffer.bindTexture2D(m_texCache[0].texID);
+        glBindTexture(GL_TEXTURE_2D, texture);
+        glViewport(0, 0, m_texCache[0].size.width, m_texCache[0].size.height);
+        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+        glFlush();
+        
+        //down scale
+        for(int i = 1; i < m_intensity; ++i)
+        {
+            TextureCache& texCache = m_texCache[i];
+            m_framebuffer.bindTexture2D(texCache.texID);
+            glViewport(0, 0, texCache.size.width, texCache.size.height);
+            
+            glBindTexture(GL_TEXTURE_2D, m_texCache[i - 1].texID);
+            glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+            glFlush();
+        }
+        
+        //up scale
+        for(int i = m_intensity - 1; i > 0; --i)
+        {
+            TextureCache& texCache = m_texCache[i - 1];
+            m_framebuffer.bindTexture2D(texCache.texID);
+            glViewport(0, 0, texCache.size.width, texCache.size.height);
+            
+            glBindTexture(GL_TEXTURE_2D, m_texCache[i].texID);
+            glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+            glFlush();
+        }
+        
+        if(target != 0)
+        {
+            m_framebuffer.bindTexture2D(target);
+            glViewport(0, 0, targetWidth, targetHeight);
+            glBindTexture(GL_TEXTURE_2D, m_texCache[0].texID);
+            glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+        }
+    }
+    
+    void CGELerpBlurUtil::drawTexture(GLuint texID)
+    {
+        m_program.bind();
+        glBindBuffer(GL_ARRAY_BUFFER, m_vertBuffer);
+        glEnableVertexAttribArray(0);
+        glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+        glActiveTexture(GL_TEXTURE0);
+        glBindTexture(GL_TEXTURE_2D, texID);
+        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+    }
+    
+    void CGELerpBlurUtil::_clearMipmaps()
+    {
+        if(m_texCache[0].texID != 0)
+        {
+            GLuint texIDs[MAX_LERP_BLUR_INTENSITY];
+            for(int i = 0; i != MAX_LERP_BLUR_INTENSITY; ++i)
+                texIDs[i] = m_texCache[i].texID;
+            glDeleteTextures(MAX_LERP_BLUR_INTENSITY, texIDs);
+            memset(m_texCache, 0, sizeof(MAX_LERP_BLUR_INTENSITY));
+            m_cacheTargetWidth = 0;
+            m_cacheTargetHeight = 0;
+        }
+    }
+    
+    int CGELerpBlurUtil::_calcLevel(int len, int level)
+    {
+        static float sLevelList[] = {
+            2, 3, 5, 7, 10, 14, 19, 26, 35
+        };
+        int ret = len / sLevelList[level];
+        return ret;
+//        return roundf(len / (level * 4.0));
+    }
+
+}
+
+
+
+
+
+
+
+
+
+

+ 347 - 0
media/cge_library/src/main/jni/cge/extends/cgeThread.cpp

@@ -0,0 +1,347 @@
+/*
+* cgeThread.cpp
+*
+*  Created on: 2015-3-17
+*      Author: Wang Yang
+*        Mail: admin@wysaid.org
+*/
+
+#include "cgeThread.h"
+#include <cassert>
+#include <cstdio>
+
+namespace CGE
+{
+	// CGEThreadPreemptive
+
+	CGEThreadPreemptive::CGEThreadPreemptive() : m_thread(nullptr), m_taskRestart(false), m_threadOver(false), m_runningStatus(false)
+	{
+
+	}
+
+	CGEThreadPreemptive::~CGEThreadPreemptive()
+	{
+		quit();
+	}
+
+	void CGEThreadPreemptive::run()
+	{
+		m_runningStatus = true;
+		m_taskRestart = true;
+
+		if(m_thread != nullptr)
+		{
+			m_mutex.lock();
+			m_condition.notify_one();			
+			m_mutex.unlock();
+		}
+		else
+		{
+			m_thread = new std::thread(std::bind(&CGEThreadPreemptive::_run, this));
+		}
+		
+	}
+
+	void CGEThreadPreemptive::_run()
+	{
+		for(;;)
+		{
+			{
+				std::unique_lock<std::mutex> lock(m_mutex);
+
+				if(m_threadOver)
+					break;
+
+				if(!m_taskRestart)
+				{
+					m_runningStatus = false;
+					m_condition.wait(lock);
+
+					if(m_threadOver)
+						break;
+
+					m_runningStatus = true;
+				}
+
+				m_taskRestart = false;
+			}
+
+			runTask();
+		}
+	}
+
+	// void CGEThreadPreemptive::terminate()
+	// {
+	// 	//std::terminate();
+	// }
+
+	void CGEThreadPreemptive::quit()
+	{
+		m_mutex.lock();
+		m_threadOver = true;
+		m_mutex.unlock();
+
+		m_condition.notify_all();
+
+		if(m_thread != nullptr)
+		{
+			m_thread->join();
+			delete m_thread;
+			m_thread = nullptr;
+		}
+	}
+
+	void CGEThreadPreemptive::join()
+	{
+		if(m_thread != nullptr && m_thread->joinable())
+			m_thread->join();
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+
+    
+    CGEThreadPool* CGEThreadPool::globalThreadPool = nullptr;
+    
+    void CGEThreadPool::setGlobalThreadNum(size_t maxThreadNum)
+    {
+        if(globalThreadPool != nullptr)
+            delete globalThreadPool;
+        globalThreadPool = new CGEThreadPool(maxThreadNum);
+    }
+    
+    void CGEThreadPool::runOnGlobalPool(const CGE::CGEThreadPool::Work &work)
+    {
+        if(globalThreadPool == nullptr)
+            globalThreadPool = new CGEThreadPool();
+        globalThreadPool->run(work);
+    }
+    
+    void CGEThreadPool::clearGlobalPool()
+    {
+        delete globalThreadPool;
+    }
+    
+	CGEThreadPool::CGEThreadPool(std::list<std::unique_ptr<Worker>>::size_type maxWorkerNum) : m_threadOver(false), m_maxWorkerSize(maxWorkerNum), m_threadJoining(false)
+	{
+		assert(maxWorkerNum >= 1);
+	}
+
+	CGEThreadPool::~CGEThreadPool()
+	{
+		quit();
+	}
+
+	bool CGEThreadPool::isActive()
+	{
+		std::unique_lock<std::mutex> lock(m_threadMutex);
+
+		if(!m_workList.empty())
+			return true;
+
+		for(auto& t : m_workerList)
+		{
+			if(t->isActive())
+				return true;
+		}
+		return false;
+	}
+
+	bool CGEThreadPool::isBusy()
+	{
+		for(auto& t : m_workerList)
+		{
+			if(!t->isActive())
+				return false;
+		}
+		return true;
+	}
+    
+    void CGEThreadPool::wait4Active(long ms)
+    {
+        if(ms <= 0)
+        {
+            while(isActive())
+                std::this_thread::sleep_for(std::chrono::milliseconds(1));
+        }
+        else
+        {
+            while(isActive() && ms > 0)
+            {
+                std::this_thread::sleep_for(std::chrono::milliseconds(1));
+                --ms;
+            }
+        }
+    }
+    
+    void CGEThreadPool::wait4Busy(long ms)
+    {
+        if(ms <= 0)
+        {
+            while(isBusy())
+                std::this_thread::sleep_for(std::chrono::milliseconds(1));
+        }
+        else
+        {
+            while(isBusy() && ms > 0)
+            {
+                std::this_thread::sleep_for(std::chrono::milliseconds(1));
+                --ms;
+            }
+        }
+    }
+
+	void CGEThreadPool::terminate()
+	{
+
+	}
+
+	void CGEThreadPool::quit()
+	{
+		if(m_threadOver && m_workList.empty() && m_workerList.empty())
+		{
+			return;
+		}
+
+		m_threadMutex.lock();
+		m_workList.clear();
+		m_threadOver = true;
+		m_threadMutex.unlock();
+
+		m_poolMutex.lock();
+
+		m_condition.notify_all();
+
+		for(auto& t : m_workerList)
+		{
+			t->waitForQuit();
+		}
+		m_workerList.clear();
+		m_poolMutex.unlock();
+	}
+
+	void CGEThreadPool::join()
+	{
+		m_poolMutex.lock();
+		m_threadJoining = true;
+		m_condition.notify_all();
+		for(auto& t : m_workerList)
+		{
+			t->join();
+		}
+		m_workerList.clear();
+		m_threadJoining = false;
+		m_poolMutex.unlock();
+	}
+
+	void CGEThreadPool::run(const Work& work)
+	{
+		m_threadMutex.lock();
+		m_workList.push_back(work);
+		m_threadMutex.unlock();
+
+		m_poolMutex.lock();
+
+		if(m_workerList.size() < m_maxWorkerSize && isBusy())
+		{
+			m_workerList.push_back(std::unique_ptr<Worker>(new Worker(*this)));
+			m_workerList.back()->run();
+		}
+		else if(!isBusy())
+		{
+			m_condition.notify_one();
+		}
+		m_poolMutex.unlock();
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+
+	CGEThreadPool::Worker::Worker(CGEThreadPool& pool) : m_thread(nullptr), m_pool(pool), m_runningStatus(false), m_shouldLeave(false)
+	{
+
+	}
+
+	CGEThreadPool::Worker::Worker(Worker& worker) : m_pool(worker.m_pool)
+	{
+
+	}
+
+	CGEThreadPool::Worker::Worker(Worker&& worker) : m_pool(worker.m_pool)
+	{
+		m_thread = worker.m_thread;
+		m_runningStatus = worker.m_runningStatus;
+		m_shouldLeave = worker.m_shouldLeave;
+		worker.m_thread = nullptr;
+	}
+
+	CGEThreadPool::Worker::~Worker()
+	{
+		waitForQuit();
+	}
+
+	void CGEThreadPool::Worker::run()
+	{
+		if(m_thread == nullptr)
+		{
+			m_runningStatus = true;
+
+			m_thread = new std::thread(std::bind(&CGEThreadPool::Worker::_run, this));
+		}
+	}
+
+	void CGEThreadPool::Worker::_run()
+	{
+		for(;;)
+		{
+			CGEThreadPool::Work work;
+
+			{
+				std::unique_lock<std::mutex> lock(m_pool.m_threadMutex);
+
+				if(m_pool.m_threadOver || m_shouldLeave)
+					break;
+
+				if(m_pool.m_workList.empty())
+				{
+					m_runningStatus = false;
+
+					if(m_pool.m_threadJoining)
+						break;
+
+					m_pool.m_condition.wait(lock);
+
+					if(m_pool.m_threadOver || m_shouldLeave)
+						return;
+				}
+
+				if(m_pool.m_workList.empty())
+					continue;
+
+				work = std::move(m_pool.m_workList.front());
+				m_pool.m_workList.pop_front();
+				m_runningStatus = true; 
+			}
+
+			work.run();
+		}
+	}
+
+	void CGEThreadPool::Worker::terminate()
+	{
+
+	}
+
+	void CGEThreadPool::Worker::waitForQuit()
+	{
+		join();
+		delete m_thread;
+		m_thread = nullptr;
+		m_runningStatus = false;
+	}
+
+	void CGEThreadPool::Worker::join()
+	{
+		if(m_thread != nullptr && m_thread->joinable())
+			m_thread->join();
+	}
+
+}

+ 833 - 0
media/cge_library/src/main/jni/cge/filters/CGELiquifyFilter.cpp

@@ -0,0 +1,833 @@
+/*
+* cgeLiquidation.cpp
+*
+*  Created on: 2014-5-15
+*      Author: Wang Yang
+*        Mail: admin@wysaid.org
+*/
+
+#include "CGELiquifyFilter.h"
+#include "cgeMat.h"
+
+static CGEConstString s_vshDeform = CGE_SHADER_STRING
+(
+attribute vec2 vPosition;
+attribute vec2 vTexture;
+varying vec2 textureCoordinate;
+void main()
+{
+	//An opportunism code. Do not use it unless you know what it means.
+	gl_Position = vec4(vPosition * 2.0 - 1.0, 0.0, 1.0);
+	
+	textureCoordinate = vTexture;//(vPosition.xy + 1.0) / 2.0;
+}
+);
+
+static CGEConstString s_fshDeform = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+void main()
+{
+	gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
+}
+);
+
+#ifdef CGE_DEFORM_SHOW_MESH
+
+static CGEConstString s_fshMesh = CGE_SHADER_STRING_PRECISION_L
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+void main()
+{
+	gl_FragColor.rgb = 1.0 - texture2D(inputImageTexture, textureCoordinate).rgb;
+}
+);
+
+#endif
+
+namespace CGE
+{
+
+	CGEConstString CGELiquifyFilter::paramTextureVertexName = "vTexture";
+
+	CGELiquifyFilter::CGELiquifyFilter() : m_meshVBO(0), m_meshIndexVBO(0), m_textureVBO(0), m_currentMeshIndex(0), m_doingRestore(false)
+	{
+#ifdef CGE_DEFORM_SHOW_MESH
+		m_program.bindAttribLocation(paramTextureVertexName, 1);
+		m_programMesh.bindAttribLocation(paramPositionIndexName, 0);
+		m_programMesh.bindAttribLocation(paramTextureVertexName, 1);
+		if(!m_programMesh.initWithShaderStrings(s_vshDeform, s_fshMesh))
+		{
+			CGE_LOG_ERROR("Init Mesh Program Failed!\n");
+		}
+
+		m_bShowMesh = false;
+
+#endif
+
+		setUndoSteps(10);
+	}
+
+	CGELiquifyFilter::~CGELiquifyFilter()
+	{
+		
+		glDeleteBuffers(1, &m_meshVBO);
+		glDeleteBuffers(1, &m_meshIndexVBO);
+		glDeleteBuffers(1, &m_textureVBO);
+	}
+
+	bool CGELiquifyFilter::initWithMesh(float width, float height, float stride)
+	{
+		return initWithMesh(width / height, stride / CGE_MAX(width, height));
+	}
+
+	bool CGELiquifyFilter::initWithMesh(float ratio, float stride)
+	{
+		if(!initShadersFromString(s_vshDeform, s_fshDeform))
+			return false;
+
+		stride = CGE_MID(stride, 0.001f, 0.2f);
+
+		CGE_LOG_CODE(
+			if(ratio < 0.0f)
+			{
+				CGE_LOG_ERROR("DeformProcessor::initWithMesh Ratio must > 0!\n");
+				return false;
+			}
+		);
+
+		float len = 1.0f / stride;
+		CGESizef sz;
+		if(ratio > 1.0f)
+		{
+			sz.width = len;
+			sz.height = len / ratio;
+		}
+		else
+		{
+			sz.width = len * ratio;
+			sz.height = len;
+		}
+
+		m_meshSize.width = (int)sz.width;
+		m_meshSize.height = (int)sz.height;
+
+		CGE_LOG_CODE(
+			if(m_meshSize.width < 2 || m_meshSize.width > 5000 ||
+				m_meshSize.height < 2 || m_meshSize.height > 5000)
+			{
+				CGE_LOG_ERROR("Invalid Mesh Size!\n");
+				return false;
+			}
+			);
+
+		m_mesh.resize(m_meshSize.width * m_meshSize.height);
+		restoreMesh();
+
+		return initBuffers();
+	}
+
+	void CGELiquifyFilter::restoreMesh()
+	{
+		if(m_mesh.size() != m_meshSize.width * m_meshSize.height || m_mesh.empty())
+		{
+			CGE_LOG_ERROR("Invalid Mesh!\n");
+		}
+
+		const float widthStep = 1.0f / (m_meshSize.width - 1.0f);
+		const float heightStep = 1.0f / (m_meshSize.height - 1.0f);
+
+		for(int i = 0; i != m_meshSize.height; ++i)
+		{
+			const float heightI = i * heightStep;
+			int index = m_meshSize.width * i;
+			for(int j = 0; j != m_meshSize.width; ++j)
+			{
+				const float widthJ = j * widthStep;
+				m_mesh[index] = Vec2f(widthJ, heightI);
+				++index;
+			}
+		}
+
+		m_vecMeshes.clear();
+
+		updateBuffers();
+	}
+
+	void CGELiquifyFilter::restoreMeshWithIntensity(float intensity)
+	{
+		if(m_mesh.size() != m_meshSize.width * m_meshSize.height || m_mesh.empty())
+		{
+			CGE_LOG_ERROR("Invalid Mesh!\n");
+			return ;
+		}
+
+		if(!m_doingRestore && !pushMesh())
+		{
+			CGE_LOG_ERROR("DeformProcessor::restoreMeshWithIntensity failed!\n");
+			return ;
+		}
+
+		const std::vector<Vec2f>& v2Mesh = m_vecMeshes[m_currentMeshIndex];
+
+		const float widthStep = 1.0f / (m_meshSize.width - 1.0f);
+		const float heightStep = 1.0f / (m_meshSize.height - 1.0f);
+
+		const float revIntensity = 1.0f - intensity;
+
+		for(int i = 0; i != m_meshSize.height; ++i)
+		{
+			const float heightI = i * heightStep;
+			int index = m_meshSize.width * i;
+			for(int j = 0; j != m_meshSize.width; ++j)
+			{
+				const float widthJ = j * widthStep;
+				const Vec2f v(widthJ, heightI);
+				m_mesh[index] = v2Mesh[index] * revIntensity + v * intensity;
+				++index;
+			}
+		}
+
+		updateBuffers();
+
+		m_doingRestore = true;
+	}
+
+	bool CGELiquifyFilter::initBuffers()
+	{
+		glDeleteBuffers(1, &m_meshVBO);
+		glGenBuffers(1, &m_meshVBO);
+		glBindBuffer(GL_ARRAY_BUFFER, m_meshVBO);
+		glBufferData(GL_ARRAY_BUFFER, m_mesh.size() * sizeof(m_mesh[0]), m_mesh.data(), GL_STREAM_DRAW);
+
+		glDeleteBuffers(1, &m_textureVBO);
+		glGenBuffers(1, &m_textureVBO);
+		glBindBuffer(GL_ARRAY_BUFFER, m_textureVBO);
+		glBufferData(GL_ARRAY_BUFFER, m_mesh.size() * sizeof(m_mesh[0]), m_mesh.data(), GL_STATIC_DRAW);
+
+
+		int index = 0;
+		std::vector<unsigned short> meshIndexes;
+		m_meshIndexSize = (m_meshSize.width-1) * (m_meshSize.height-1) * 2;
+		meshIndexes.resize(m_meshIndexSize * 3);
+
+		for(int i = 0; i < m_meshSize.height - 1; ++i)
+		{
+			int pos1 = i * m_meshSize.width;
+			int pos2 = (i + 1) * m_meshSize.width;
+
+#ifdef CGE_DEFORM_SHOW_MESH
+
+			if(i%2)
+			{
+				for(int j = 0; j < m_meshSize.width - 1; ++j)
+				{
+					meshIndexes[index] = pos1 + j;
+					meshIndexes[index + 1] = pos1 + j + 1;
+					meshIndexes[index + 2] = pos2 + j;
+					meshIndexes[index + 3] = pos2 + j;
+					meshIndexes[index + 4] = pos1 + j + 1;
+					meshIndexes[index + 5] = pos2 + j + 1;
+					index += 6;
+				}
+			}
+			else
+			{
+				for(int j = m_meshSize.width - 2; j >= 0; --j)
+				{
+					meshIndexes[index] = pos1 + j + 1;
+					meshIndexes[index + 1] = pos2 + j + 1;
+					meshIndexes[index + 2] = pos2 + j;
+					meshIndexes[index + 3] = pos1 + j;
+					meshIndexes[index + 4] = pos1 + j + 1;
+					meshIndexes[index + 5] = pos2 + j;		
+					index += 6;
+				}
+			}
+
+#else
+			for(int j = 0; j < m_meshSize.width - 1; ++j)
+			{
+				meshIndexes[index] = pos1 + j;
+				meshIndexes[index + 1] = pos1 + j + 1;
+				meshIndexes[index + 2] = pos2 + j;
+				meshIndexes[index + 3] = pos2 + j;
+				meshIndexes[index + 4] = pos1 + j + 1;
+				meshIndexes[index + 5] = pos2 + j + 1;
+				index += 6;
+			}
+#endif
+		}
+
+		glDeleteBuffers(1, &m_meshIndexVBO);
+		glGenBuffers(1, &m_meshIndexVBO);
+		glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_meshIndexVBO);
+		glBufferData(GL_ELEMENT_ARRAY_BUFFER, meshIndexes.size() * sizeof(meshIndexes[0]), meshIndexes.data(), GL_STATIC_DRAW);
+
+		glBindBuffer(GL_ARRAY_BUFFER, 0);
+		glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
+		return true;
+	}
+
+	bool CGELiquifyFilter::updateBuffers()
+	{
+		if(m_meshVBO != 0)
+		{
+			glBindBuffer(GL_ARRAY_BUFFER, m_meshVBO);
+			glBufferData(GL_ARRAY_BUFFER, sizeof(m_mesh[0]) * m_mesh.size(), m_mesh.data(), GL_STREAM_DRAW);
+			glBindBuffer(GL_ARRAY_BUFFER, 0);
+			return true;
+		}
+		return false;
+	}
+
+	void CGELiquifyFilter::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+	{
+		if(m_meshVBO == 0 || m_mesh.empty())
+		{
+			CGE_LOG_ERROR("DeformProcessor::render2Texture - Invalid Mesh!\n");
+			handler->swapBufferFBO();
+			return ;
+		}
+		
+		handler->setAsTarget();
+		m_program.bind();
+		
+		glActiveTexture(GL_TEXTURE0);
+		glBindTexture(GL_TEXTURE_2D, srcTexture);
+
+		if(m_uniformParam != nullptr)
+			m_uniformParam->assignUniforms(handler, m_program.programID());
+
+		{
+			glBindBuffer(GL_ARRAY_BUFFER, m_meshVBO);
+			glEnableVertexAttribArray(0);
+			glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+			
+			glBindBuffer(GL_ARRAY_BUFFER, m_textureVBO);
+			glEnableVertexAttribArray(1);
+			glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, 0);
+
+			glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_meshIndexVBO);
+
+		}
+
+		glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+		glClear(GL_COLOR_BUFFER_BIT);
+
+ 		glDrawElements(GL_TRIANGLES, m_meshIndexSize * 3, GL_UNSIGNED_SHORT, 0);
+
+#ifdef CGE_DEFORM_SHOW_MESH
+		if(m_bShowMesh)
+		{
+			m_programMesh.bind();
+			glDrawElements(GL_LINE_STRIP, m_meshIndexSize * 3, GL_UNSIGNED_SHORT, 0);
+		}
+#endif
+		
+		cgeCheckGLError("glDrawElements");
+		glBindBuffer(GL_ARRAY_BUFFER, 0);
+		glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
+	}
+
+	void CGELiquifyFilter::forwardDeformMesh(Vec2f start, Vec2f end, float w, float h, float radius, float intensity)
+	{
+		m_doingRestore = false;
+
+		CGE_LOG_CODE(
+		clock_t t = clock();
+		);
+
+		float loopStartY = CGE_MAX(CGE_MIN(start[1], end[1]) - radius, -radius);
+		float loopEndY = CGE_MIN(CGE_MAX(start[1], end[1]) + radius, h + radius);
+		float loopStartX = CGE_MAX(CGE_MIN(start[0], end[0]) - radius, -radius);
+		float loopEndX = CGE_MIN(CGE_MAX(start[0], end[0]) + radius, w + radius);
+
+		CGE_LOG_INFO("Canvas Size: %g, %g\nBoundBox: left:%g, top: %g, right: %g, bottom: %g\n", w, h, loopStartX, loopStartY, loopEndX, loopEndY);
+
+		Vec2f sz(w, h);
+		Vec2f motion = (end - start) / sz;
+
+		for(int i = 0; i < m_meshSize.height; ++i)
+		{
+			const int lines = i * m_meshSize.width;
+			for(int j = 0; j < m_meshSize.width; ++j)
+			{
+				const Vec2f v0 = m_mesh[lines + j] * sz;
+
+				if(v0[0] < loopStartX || v0[0] > loopEndX ||
+					v0[1] < loopStartY || v0[1] > loopEndY)
+					continue;
+
+				const Vec2f v = v0 - start;
+				float dis = v.length();
+				if(dis > radius)
+					continue;
+				float percent = 1.0f - dis / radius;
+				percent = percent * percent * (3.0f - 2.0f * percent) * intensity;
+				m_mesh[lines + j] += motion * percent;
+			}
+		}
+
+		updateBuffers();
+
+		CGE_LOG_CODE(
+			CGE_LOG_INFO("##########Deform mesh take time: %gs #####\n", float(clock()-t) / CLOCKS_PER_SEC);
+		);
+	}
+
+	void CGELiquifyFilter::pushLeftDeformMesh(Vec2f start, Vec2f end, float w, float h, float radius, float intensity, float angle)
+	{
+		
+
+		m_doingRestore = false;
+
+		CGE_LOG_CODE(
+			clock_t t = clock();
+		);
+
+		float loopStartY = CGE_MAX(CGE_MIN(start[1], end[1]) - radius, -radius);
+		float loopEndY = CGE_MIN(CGE_MAX(start[1], end[1]) + radius, h + radius);
+		float loopStartX = CGE_MAX(CGE_MIN(start[0], end[0]) - radius, -radius);
+		float loopEndX = CGE_MIN(CGE_MAX(start[0], end[0]) + radius, w + radius);
+
+		CGE_LOG_INFO("Canvas Size: %g, %g\nBoundBox: left:%g, top: %g, right: %g, bottom: %g\n", w, h, loopStartX, loopStartY, loopEndX, loopEndY);
+
+		Vec2f sz(w, h);
+		Vec2f motion = (end - start) / sz;
+		
+		//coordinate transformation
+		const float cosRad = cosf(-angle);
+		const float sinRad = sinf(-angle);
+		motion = Mat2(cosRad, sinRad, -sinRad, cosRad) * motion;
+
+		for(int i = 0; i < m_meshSize.height; ++i)
+		{
+			const int lines = i * m_meshSize.width;
+			for(int j = 0; j < m_meshSize.width; ++j)
+			{
+				const Vec2f v0 = m_mesh[lines + j] * sz;
+
+				if(v0[0] < loopStartX || v0[0] > loopEndX ||
+					v0[1] < loopStartY || v0[1] > loopEndY)
+					continue;
+
+				const Vec2f v = v0 - start;
+				float dis = v.length();
+				if(dis > radius)
+					continue;
+				float percent = 1.0f - dis / radius;
+				percent = percent * percent * (3.0f - 2.0f * percent) * intensity;
+				m_mesh[lines + j] += motion * percent;
+			}
+		}
+
+		updateBuffers();
+
+		CGE_LOG_CODE(
+			CGE_LOG_INFO("##########Deform mesh take time: %gs #####\n", float(clock()-t) / CLOCKS_PER_SEC);
+		);
+	}
+
+	void CGELiquifyFilter::restoreMeshWithPoint(Vec2f pnt, float w, float h, float radius, float intensity)
+	{
+		
+
+		m_doingRestore = false;
+
+		CGE_LOG_CODE(
+			clock_t t = clock();
+		);
+
+		Vec2f sz(w, h);
+		const float widthStep = 1.0f / (m_meshSize.width - 1.0f);
+		const float heightStep = 1.0f / (m_meshSize.height - 1.0f);
+
+		for(int i = 0; i < m_meshSize.height; ++i)
+		{
+			const int lines = i * m_meshSize.width;
+			const float heightI = i * heightStep;
+			for(int j = 0; j < m_meshSize.width; ++j)
+			{
+				const int index = lines + j;
+				const Vec2f v0 = m_mesh[index] * sz - pnt;
+				float dis = v0.length();
+				if(dis > radius)
+					continue;
+
+				const float widthJ = j * widthStep;
+				const Vec2f v(widthJ, heightI);
+				float percent = 1.0f - dis / radius;
+				percent = percent * percent * (3.0f - 2.0f * percent) * intensity;
+				m_mesh[index] = m_mesh[index] * (1.0f - percent) + v * percent;
+			}
+		}
+
+		updateBuffers();
+
+		CGE_LOG_CODE(
+			CGE_LOG_INFO("##########Deform mesh take time: %gs #####\n", float(clock()-t) / CLOCKS_PER_SEC);
+		);
+	}
+
+	void CGELiquifyFilter::bloatMeshWithPoint(Vec2f pnt, float w, float h, float radius, float intensity)
+	{
+		
+
+		m_doingRestore = false;
+
+		CGE_LOG_CODE(
+			clock_t t = clock();
+		);
+
+		Vec2f sz(w, h);
+
+		for(int i = 0; i < m_meshSize.height; ++i)
+		{
+			const int lines = i * m_meshSize.width;
+			for(int j = 0; j < m_meshSize.width; ++j)
+			{
+				const int index = lines + j;
+				const Vec2f v = m_mesh[index] * sz - pnt;
+				float dis = v.length();
+				if(dis > radius)
+					continue;
+
+				float percent = 1.0f - dis / radius;
+				percent = percent * percent * (3.0f - 2.0f * percent) * intensity;
+				m_mesh[index] += v / sz * percent;
+			}
+		}
+
+		updateBuffers();
+
+		CGE_LOG_CODE(
+			CGE_LOG_INFO("##########Deform mesh take time: %gs #####\n", float(clock()-t) / CLOCKS_PER_SEC);
+		);
+	}
+
+	void CGELiquifyFilter::wrinkleMeshWithPoint(Vec2f pnt, float w, float h, float radius, float intensity)
+	{
+		
+
+		m_doingRestore = false;
+
+		CGE_LOG_CODE(
+			clock_t t = clock();
+		);
+
+		Vec2f sz(w, h);
+
+		for(int i = 0; i < m_meshSize.height; ++i)
+		{
+			const int lines = i * m_meshSize.width;
+			for(int j = 0; j < m_meshSize.width; ++j)
+			{
+				const int index = lines + j;
+				const Vec2f v = pnt - m_mesh[index] * sz;
+				float dis = v.length();
+				if(dis > radius)
+					continue;
+
+				float percent = 1.0f - dis / radius;
+				percent = percent * percent * (3.0f - 2.0f * percent) * intensity;
+				m_mesh[index] += v / sz * percent;
+			}
+		}
+
+		updateBuffers();
+
+		CGE_LOG_CODE(
+			CGE_LOG_INFO("##########Deform mesh take time: %gs #####\n", (double)(clock()-t) / CLOCKS_PER_SEC);
+		);
+	}
+
+	void CGELiquifyFilter::setUndoSteps(unsigned n)
+	{
+		m_undoSteps = n;
+		if(n == 0)
+			m_vecMeshes.clear();
+		else if(m_currentMeshIndex > n)
+		{
+			m_currentMeshIndex = n;
+			m_vecMeshes.erase(m_vecMeshes.begin() + m_currentMeshIndex, m_vecMeshes.end());
+		}
+	}
+
+	bool CGELiquifyFilter::canUndo()
+	{
+		return !m_vecMeshes.empty() && m_currentMeshIndex > 0;
+	}
+
+	bool CGELiquifyFilter::canRedo()
+	{
+		return !m_vecMeshes.empty() && m_currentMeshIndex < m_vecMeshes.size() - 1;
+	}
+
+	bool CGELiquifyFilter::undo()
+	{
+		if(!canUndo())
+			return false;
+
+		--m_currentMeshIndex;
+		m_mesh = m_vecMeshes[m_currentMeshIndex];
+
+		updateBuffers();
+		m_doingRestore = false;
+		return true;
+	}
+
+	bool CGELiquifyFilter::redo()
+	{
+		if(!canRedo())
+			return false;
+
+		++m_currentMeshIndex;
+		m_mesh = m_vecMeshes[m_currentMeshIndex];
+
+		updateBuffers();
+		return true;
+	}
+
+	bool CGELiquifyFilter::pushMesh()
+	{
+		if(m_undoSteps <= 0)
+			return false;
+
+
+		if(!m_vecMeshes.empty() && m_currentMeshIndex < m_vecMeshes.size() - 1)
+			m_vecMeshes.erase(m_vecMeshes.begin() + m_currentMeshIndex + 1, m_vecMeshes.end());
+
+		m_vecMeshes.push_back(m_mesh);
+
+		if(m_vecMeshes.size() > m_undoSteps)
+			m_vecMeshes.erase(m_vecMeshes.begin(), m_vecMeshes.end() - m_undoSteps);
+
+		m_currentMeshIndex = (unsigned)m_vecMeshes.size() - 1;
+		return true;
+	}
+
+	//Algorithm created by Wang Yang. Ask me if you wanna know.
+	void CGELiquidationNicerFilter::forwardDeformMesh(Vec2f start, Vec2f end, float w, float h, float radius, float intensity)
+	{
+		
+
+		m_doingRestore = false;
+
+		CGE_LOG_CODE(
+			clock_t t = clock();
+		);
+
+		float loopStartY = CGE_MAX(CGE_MIN(start[1], end[1]) - radius, -radius);
+		float loopEndY = CGE_MIN(CGE_MAX(start[1], end[1]) + radius, h + radius);
+		float loopStartX = CGE_MAX(CGE_MIN(start[0], end[0]) - radius, -radius);
+		float loopEndX = CGE_MIN(CGE_MAX(start[0], end[0]) + radius, w + radius);
+
+		CGE_LOG_INFO("Canvas Size: %g, %g\nBoundBox: left:%g, top: %g, right: %g, bottom: %g\n", w, h, loopStartX, loopStartY, loopEndX, loopEndY);
+
+		const Vec2f sz(w, h);
+		Vec2f motion = (end - start) / sz;
+
+		Vec2f v2Min = start, v2Max = end;
+		if(v2Min[0] > v2Max[0]) std::swap(v2Min[0], v2Max[0]);
+		if(v2Min[1] > v2Max[1]) std::swap(v2Min[1], v2Max[1]);
+
+		//直线方程系数, 直线由起始点跟终止点构成。
+		float eqA, eqB, eqC, eqD, eqD2;
+
+		//计算直线方程
+		{
+			float a = start[1] - end[1], b = start[0] - end[0], c = start[0] * end[1] - start[1] * end[0];			
+
+			if(CGE_FLOATCOMP0(b))
+			{
+				eqA = 1.0f;
+				eqB = 0.0f;
+				eqC = -start[0];
+			}
+			else
+			{
+				eqA = a / b;
+				eqB = -1.0f;
+				eqC = c / b;
+			}
+			eqD2 = eqA * eqA + eqB * eqB;
+			eqD = sqrtf(eqD2);
+		}
+
+		for(int i = 0; i < m_meshSize.height; ++i)
+		{
+			const int lines = i * m_meshSize.width;
+			for(int j = 0; j < m_meshSize.width; ++j)
+			{
+				const Vec2f v = m_mesh[lines + j] * sz;
+				
+				if(v[0] < loopStartX || v[0] > loopEndX ||
+					v[1] < loopStartY || v[1] > loopEndY)
+					continue;
+
+				//点到直线距离
+				float dis1 = fabsf(eqA * v[0] + eqB * v[1] + eqC) / eqD;
+
+				if(dis1 > radius)
+				{
+					continue;
+				}
+
+				//点到端点距离
+				float dis2 = (v - start).length();
+				float dis3 = (v - end).length();
+				//点在直线方向上的投影坐标
+				const Vec2f projV(
+					(eqB * eqB * v[0] - eqA * eqB * v[1] - eqA * eqC) / eqD2, 
+					(eqA * eqA * v[1] - eqB * eqC - eqA * eqB * v[0]) / eqD2);
+
+				//实际计算距离,若点在投影上则使用点到直线距离,否则使用较近端点距离
+				float dis;
+
+				if(projV[0] < v2Min[0] || projV[0] > v2Max[0] ||
+					projV[1] < v2Min[1] || projV[1] > v2Max[1])					
+				{
+					if(dis2 > radius &&	dis3 > radius)
+					{
+						continue;
+					}
+					else
+					{
+						dis = CGE_MIN(dis2, dis3);
+					}					
+				}
+				else dis = dis1;
+
+				float percent = (1.0f - dis / radius);
+				percent = percent * percent * (3.0f - 2.0f * percent) * intensity;
+				m_mesh[lines + j] += motion * percent;
+			}
+		}
+
+		updateBuffers();
+
+		CGE_LOG_CODE(
+			CGE_LOG_INFO("##########Deform mesh take time: %gs #####\n", (double)(clock()-t) / CLOCKS_PER_SEC);
+		);
+	}
+
+	void CGELiquidationNicerFilter::pushLeftDeformMesh(Vec2f start, Vec2f end, float w, float h, float radius, float intensity, float angle)
+	{
+		
+
+		m_doingRestore = false;
+
+		CGE_LOG_CODE(
+			clock_t t = clock();
+		);
+
+		float loopStartY = CGE_MAX(CGE_MIN(start[1], end[1]) - radius, -radius);
+		float loopEndY = CGE_MIN(CGE_MAX(start[1], end[1]) + radius, h + radius);
+		float loopStartX = CGE_MAX(CGE_MIN(start[0], end[0]) - radius, -radius);
+		float loopEndX = CGE_MIN(CGE_MAX(start[0], end[0]) + radius, w + radius);
+
+		CGE_LOG_INFO("Canvas Size: %g, %g\nBoundBox: left:%g, top: %g, right: %g, bottom: %g\n", w, h, loopStartX, loopStartY, loopEndX, loopEndY);
+
+		const Vec2f sz(w, h);
+		Vec2f motion = (end - start) / sz;
+
+		//coordinate transformation
+		const float cosRad = cosf(-angle);
+		const float sinRad = sinf(-angle);
+		motion = Mat2(cosRad, sinRad, -sinRad, cosRad) * motion;
+
+		Vec2f v2Min = start, v2Max = end;
+		if(v2Min[0] > v2Max[0]) std::swap(v2Min[0], v2Max[0]);
+		if(v2Min[1] > v2Max[1]) std::swap(v2Min[1], v2Max[1]);
+
+		//直线方程系数, 直线由起始点跟终止点构成。
+		float eqA, eqB, eqC, eqD, eqD2;
+
+		//计算直线方程
+		{
+			float a = start[1] - end[1], b = start[0] - end[0], c = start[0] * end[1] - start[1] * end[0];			
+
+			if(CGE_FLOATCOMP0(b))
+			{
+				eqA = 1.0f;
+				eqB = 0.0f;
+				eqC = -start[0];
+			}
+			else
+			{
+				eqA = a / b;
+				eqB = -1.0f;
+				eqC = c / b;
+			}
+			eqD2 = eqA * eqA + eqB * eqB;
+			eqD = sqrtf(eqD2);
+		}
+
+		for(int i = 0; i < m_meshSize.height; ++i)
+		{
+			const int lines = i * m_meshSize.width;
+			for(int j = 0; j < m_meshSize.width; ++j)
+			{
+				const Vec2f v = m_mesh[lines + j] * sz;
+
+				if(v[0] < loopStartX || v[0] > loopEndX ||
+					v[1] < loopStartY || v[1] > loopEndY)
+					continue;
+
+				//点到直线距离
+				float dis1 = fabsf(eqA * v[0] + eqB * v[1] + eqC) / eqD;
+
+				if(dis1 > radius)
+				{
+					continue;
+				}
+
+				//点到端点距离
+				float dis2 = (v - start).length();
+				float dis3 = (v - end).length();
+				//点在直线方向上的投影坐标
+				const Vec2f projV(
+					(eqB * eqB * v[0] - eqA * eqB * v[1] - eqA * eqC) / eqD2, 
+					(eqA * eqA * v[1] - eqB * eqC - eqA * eqB * v[0]) / eqD2);
+
+				//实际计算距离,若点在投影上则使用点到直线距离,否则使用较近端点距离
+				float dis;
+
+				if(projV[0] < v2Min[0] || projV[0] > v2Max[0] ||
+					projV[1] < v2Min[1] || projV[1] > v2Max[1])					
+				{
+					if(dis2 > radius &&	dis3 > radius)
+					{
+						continue;
+					}
+					else
+					{
+						dis = CGE_MIN(dis2, dis3);
+					}					
+				}
+				else dis = dis1;
+
+				float percent = (1.0f - dis / radius);
+				percent = percent * percent * (3.0f - 2.0f * percent) * intensity;
+				m_mesh[lines + j] += motion * percent;
+			}
+		}
+
+		updateBuffers();
+
+		CGE_LOG_CODE(
+			CGE_LOG_INFO("##########Deform mesh take time: %gs #####\n", float(clock()-t) / CLOCKS_PER_SEC);
+		);
+	}
+
+	void CGELiquifyFilter::showMesh(bool bShow)
+	{
+#ifdef CGE_DEFORM_SHOW_MESH
+		m_bShowMesh = bShow;
+#endif
+	}
+
+}

+ 140 - 0
media/cge_library/src/main/jni/cge/filters/cgeAdvancedEffects.cpp

@@ -0,0 +1,140 @@
+/*
+ * cgeAdvancedEffects.cpp
+ *
+ *  Created on: 2013-12-13
+ *      Author: Wang Yang
+ */
+
+#include "cgeAdvancedEffects.h"
+
+#define COMMON_FUNC(type) \
+type* proc = new type();\
+if(!proc->init())\
+{\
+	delete proc;\
+	proc = NULL;\
+}\
+return proc;\
+
+namespace CGE
+{
+	CGEEmbossFilter* createEmbossFilter()
+	{
+		COMMON_FUNC(CGEEmbossFilter);
+	}
+
+	CGEEdgeFilter* createEdgeFilter()
+	{
+		COMMON_FUNC(CGEEdgeFilter);
+	}
+
+	CGEEdgeSobelFilter* createEdgeSobelFilter()
+	{
+		COMMON_FUNC(CGEEdgeSobelFilter);
+	}
+
+	CGERandomBlurFilter* createRandomBlurFilter()
+	{
+		COMMON_FUNC(CGERandomBlurFilter);
+	}
+	
+	CGEBilateralBlurFilter* createBilateralBlurFilter()
+	{
+		COMMON_FUNC(CGEBilateralBlurFilter);
+	}
+
+    CGEBilateralBlurBetterFilter* createBilateralBlurBetterFilter()
+    {
+        COMMON_FUNC(CGEBilateralBlurBetterFilter);
+    }
+
+	CGEMosaicBlurFilter* createMosaicBlurFilter()
+	{
+		COMMON_FUNC(CGEMosaicBlurFilter);
+	}
+	
+	CGELiquifyFilter* getLiquidationFilter(float ratio, float stride)
+	{
+		CGELiquifyFilter* proc = new CGELiquifyFilter;
+		if(!proc->initWithMesh(ratio, stride))
+		{
+			delete proc;
+			return nullptr;
+		}
+		return proc;
+	}
+
+	CGELiquifyFilter* getLiquidationFilter(float width, float height, float stride)
+	{
+		CGELiquifyFilter* proc = new CGELiquifyFilter;
+		if(!proc->initWithMesh(width, height, stride))
+		{
+			delete proc;
+			return nullptr;
+		}
+		return proc;
+	}
+
+	CGELiquidationNicerFilter* getLiquidationNicerFilter(float ratio, float stride)
+	{
+		CGELiquidationNicerFilter* proc = new CGELiquidationNicerFilter;
+		if(!proc->initWithMesh(ratio, stride))
+		{
+			delete proc;
+			return nullptr;
+		}
+		return proc;
+	}
+
+	CGELiquidationNicerFilter* getLiquidationNicerFilter(float width, float height, float stride)
+	{
+		CGELiquidationNicerFilter* proc = new CGELiquidationNicerFilter;
+		if(!proc->initWithMesh(width, height, stride))
+		{
+			delete proc;
+			return nullptr;
+		}
+		return proc;
+	}
+
+	CGEHalftoneFilter* createHalftoneFilter()
+	{
+		COMMON_FUNC(CGEHalftoneFilter);
+	}
+
+	CGEPolarPixellateFilter* createPolarPixellateFilter()
+	{
+		COMMON_FUNC(CGEPolarPixellateFilter);
+	}
+
+	CGEPolkaDotFilter* createPolkaDotFilter()
+	{
+		COMMON_FUNC(CGEPolkaDotFilter);
+	}
+
+	CGECrosshatchFilter* createCrosshatchFilter()
+	{
+		COMMON_FUNC(CGECrosshatchFilter);
+	}
+
+	CGEHazeFilter* createHazeFilter()
+	{
+		COMMON_FUNC(CGEHazeFilter);
+	}
+
+	CGELerpblurFilter* createLerpblurFilter()
+	{
+		COMMON_FUNC(CGELerpblurFilter);
+	}
+
+	CGESketchFilter* createSketchFilter()
+	{
+		COMMON_FUNC(CGESketchFilter);
+	}
+
+    CGEBeautifyFilter* createBeautifyFilter()
+    {
+        COMMON_FUNC(CGEBeautifyFilter);
+    }
+    
+ }

+ 67 - 0
media/cge_library/src/main/jni/cge/filters/cgeAdvancedEffectsCommon.cpp

@@ -0,0 +1,67 @@
+/*
+ * cgeAdvancedEffectsCommon.cpp
+ *
+ *  Created on: 2013-12-13
+ *      Author: Wang Yang
+ */
+
+#include "cgeAdvancedEffectsCommon.h"
+
+namespace CGE
+ {
+	 CGEConstString CGEAdvancedEffectOneStepFilterHelper::paramStepsName = "samplerSteps";
+
+	 void CGEAdvancedEffectOneStepFilterHelper::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+	 {
+		 handler->setAsTarget();
+		 m_program.bind();
+		 glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+		 glEnableVertexAttribArray(0);
+		 glActiveTexture(GL_TEXTURE0);
+		 glBindTexture(GL_TEXTURE_2D, srcTexture);
+
+		 if(m_uniformParam != nullptr)
+			 m_uniformParam->assignUniforms(handler, m_program.programID());
+
+		 // Additional functions for new effects.
+		 {
+			 CGESizei sz = handler->getOutputFBOSize();
+			 m_program.sendUniformf(paramStepsName, 1.0f / sz.width, 1.0f / sz.height);
+		 }
+		 glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+		 cgeCheckGLError("glDrawArrays");
+	 }
+
+	 //////////////////////////////////////////////////////////////////////////
+
+	 CGEConstString CGEAdvancedEffectTwoStepFilterHelper::paramStepsName = "samplerSteps";
+
+	 void CGEAdvancedEffectTwoStepFilterHelper::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+	 {
+		 CGESizei sz = handler->getOutputFBOSize();
+		 m_program.bind();
+
+         glActiveTexture(GL_TEXTURE0);
+
+		 //Pass one
+		 handler->setAsTarget();
+		 {
+			 glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+			 glEnableVertexAttribArray(0);
+			 glBindTexture(GL_TEXTURE_2D, srcTexture);
+
+			 m_program.sendUniformf(paramStepsName, 0.0f, 1.0f / sz.height);
+			 glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+		 }
+		 //Pass Two
+		 handler->swapBufferFBO();
+		 handler->setAsTarget();
+		 glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+		 glEnableVertexAttribArray(0);
+		 glBindTexture(GL_TEXTURE_2D, handler->getBufferTextureID());
+
+		 m_program.sendUniformf(paramStepsName, 1.0f / sz.width, 0.0f);
+		 glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+	 }
+
+ }

+ 169 - 0
media/cge_library/src/main/jni/cge/filters/cgeBeautifyFilter.cpp

@@ -0,0 +1,169 @@
+/* cgeBeautifyFilter.cpp
+ *
+ *  Created on: 2016-3-22
+ *      Author: Wang Yang
+ */
+
+
+#include "cgeBeautifyFilter.h"
+#include <cmath>
+
+CGEConstString s_fshBeautify = CGE_SHADER_STRING_PRECISION_H
+(
+ uniform sampler2D inputImageTexture;
+ varying vec2 textureCoordinate;
+ 
+ uniform vec2 imageStep;
+ uniform float intensity;
+ 
+ void main()
+{
+    
+    vec2 blurCoordinates[20];
+    
+    blurCoordinates[0] = textureCoordinate + vec2(0.0, -10.0) * imageStep;
+    blurCoordinates[1] = textureCoordinate + vec2(5.0, -8.0) * imageStep;
+    blurCoordinates[2] = textureCoordinate + vec2(8.0, -5.0) * imageStep;
+    blurCoordinates[3] = textureCoordinate + vec2(10.0, 0.0) * imageStep;
+    blurCoordinates[4] = textureCoordinate + vec2(8.0, 5.0) * imageStep;
+    blurCoordinates[5] = textureCoordinate + vec2(5.0, 8.0) * imageStep;
+    blurCoordinates[6] = textureCoordinate + vec2(0.0, 10.0) * imageStep;
+    blurCoordinates[7] = textureCoordinate + vec2(-5.0, 8.0) * imageStep;
+    blurCoordinates[8] = textureCoordinate + vec2(-8.0, 5.0) * imageStep;
+    blurCoordinates[9] = textureCoordinate + vec2(-10.0, 0.0) * imageStep;
+    blurCoordinates[10] = textureCoordinate + vec2(-8.0, -5.0) * imageStep;
+    blurCoordinates[11] = textureCoordinate + vec2(-5.0, -8.0) * imageStep;
+    blurCoordinates[12] = textureCoordinate + vec2(0.0, -6.0) * imageStep;
+    blurCoordinates[13] = textureCoordinate + vec2(-4.0, -4.0) * imageStep;
+    blurCoordinates[14] = textureCoordinate + vec2(-6.0, 0.0) * imageStep;
+    blurCoordinates[15] = textureCoordinate + vec2(-4.0, 4.0) * imageStep;
+    blurCoordinates[16] = textureCoordinate + vec2(0.0, 6.0) * imageStep;
+    blurCoordinates[17] = textureCoordinate + vec2(4.0, 4.0) * imageStep;
+    blurCoordinates[18] = textureCoordinate + vec2(6.0, 0.0) * imageStep;
+    blurCoordinates[19] = textureCoordinate + vec2(4.0, -4.0) * imageStep;
+    
+    vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb;
+    
+    float sampleColor = centralColor.g * 24.0;
+    
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[0]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[1]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[2]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[3]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[4]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[5]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[6]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[7]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[8]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[9]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[10]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[11]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[12]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[13]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[14]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[15]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[16]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[17]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[18]).g;
+    sampleColor += texture2D(inputImageTexture, blurCoordinates[19]).g;
+    
+    sampleColor = sampleColor/44.0;
+    
+    float dis = centralColor.g - sampleColor + 0.5;
+    
+    if(dis <= 0.5)
+    {
+        dis = dis * dis * 2.0;
+    }
+    else
+    {
+        dis = 1.0 - ((1.0 - dis)*(1.0 - dis) * 2.0);
+    }
+    
+    if(dis <= 0.5)
+    {
+        dis = dis * dis * 2.0;
+    }
+    else
+    {
+        dis = 1.0 - ((1.0 - dis)*(1.0 - dis) * 2.0);
+    }
+    
+    if(dis <= 0.5)
+    {
+        dis = dis * dis * 2.0;
+    }
+    else
+    {
+        dis = 1.0 - ((1.0 - dis)*(1.0 - dis) * 2.0);
+    }
+    
+    if(dis <= 0.5)
+    {
+        dis = dis * dis * 2.0;
+    }
+    else
+    {
+        dis = 1.0 - ((1.0 - dis)*(1.0 - dis) * 2.0);
+    }
+    
+    if(dis <= 0.5)
+    {
+        dis = dis * dis * 2.0;
+    }
+    else
+    {
+        dis = 1.0 - ((1.0 - dis)*(1.0 - dis) * 2.0);
+    }
+    
+    vec3 result = centralColor * 1.065 - dis * 0.065;
+    
+    float hue = dot(result, vec3(0.299,0.587,0.114)) - 0.3;
+    
+    hue = pow(clamp(hue, 0.0, 1.0), 0.3);
+    
+    result = centralColor * (1.0 - hue) + result * hue;
+    result = (result - 0.8) * 1.06 + 0.8;
+    
+    result = pow(result, vec3(0.75));
+    
+    result = mix(centralColor, result, intensity);
+    
+    gl_FragColor = vec4(result, 1.0);
+}
+ );
+
+namespace CGE
+{
+	bool CGEBeautifyFilter::init()
+    {
+        if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshBeautify))
+        {
+            setImageSize(720.0f, 1280.0f);
+            setIntensity(1.0f);
+            return true;
+        }
+        return false;
+    }
+    
+    void CGEBeautifyFilter::setIntensity(float intensity)
+    {
+        m_program.bind();
+        m_program.sendUniformf("intensity", intensity);
+        m_intensity = fabs(intensity) < 0.05f ? 0.0f : intensity;
+    }
+    
+    void CGEBeautifyFilter::setImageSize(float width, float height, float mul)
+    {
+        m_program.bind();
+        m_program.sendUniformf("imageStep", mul / width, mul / height);
+    }
+    
+    void CGEBeautifyFilter::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+    {
+        if(m_intensity != 0.0f)
+        {
+            CGEImageFilterInterface::render2Texture(handler, srcTexture, vertexBufferID);
+        }
+    }
+}

+ 221 - 0
media/cge_library/src/main/jni/cge/filters/cgeBilateralBlurFilter.cpp

@@ -0,0 +1,221 @@
+/* cgeBilateralBlurFilter.cpp
+*
+*  Created on: 2014-4-1
+*      Author: Wang Yang
+*/
+
+#include "cgeBilateralBlurFilter.h"
+#include <cmath>
+
+CGEConstString s_fshBilateralBlur = CGE_SHADER_STRING_PRECISION_H
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+
+//const int GAUSSIAN_SAMPLES = 9;
+uniform float blurFactors[9];// = float[GAUSSIAN_SAMPLES](0.05, 0.09, 0.12, 0.15, 0.18, 0.15, 0.12, 0.09, 0.05);
+
+uniform float distanceNormalizationFactor;
+uniform float blurSamplerScale;
+uniform vec2 samplerSteps;
+
+const int samplerRadius = 4;
+
+float random(vec2 seed)
+{
+	return fract(sin(dot(seed ,vec2(12.9898,78.233))) * 43758.5453);
+}
+
+void main()
+{
+	vec4 centralColor = texture2D(inputImageTexture, textureCoordinate);
+	float gaussianWeightTotal = blurFactors[4];
+	vec4 sum = centralColor * blurFactors[4];
+	vec2 stepScale = blurSamplerScale * samplerSteps;
+	float offset = random(textureCoordinate) - 0.5;
+
+    for(int i = 0; i < samplerRadius; ++i)
+	{
+		vec2 dis = (float(i) + offset) * stepScale;
+		
+        float blurfactor = blurFactors[samplerRadius-i];
+
+		{
+			vec4 sampleColor1 = texture2D(inputImageTexture, textureCoordinate + dis);
+			float distanceFromCentralColor1 = min(distance(centralColor, sampleColor1) * distanceNormalizationFactor, 1.0);
+            float gaussianWeight1 = blurfactor * (1.0 - distanceFromCentralColor1);
+			gaussianWeightTotal += gaussianWeight1;
+			sum += sampleColor1 * gaussianWeight1;
+		}
+
+		//////////////////////////////////////////////////////////////////////////
+
+		{
+			vec4 sampleColor2 = texture2D(inputImageTexture, textureCoordinate - dis);
+			float distanceFromCentralColor2 = min(distance(centralColor, sampleColor2) * distanceNormalizationFactor, 1.0);
+            float gaussianWeight2 = blurfactor * (1.0 - distanceFromCentralColor2);
+			gaussianWeightTotal += gaussianWeight2;
+			sum += sampleColor2 * gaussianWeight2;
+		}
+	}
+
+	gl_FragColor = sum / gaussianWeightTotal;
+}
+);
+
+CGEConstString s_fshBilateralBlur2 = CGE_SHADER_STRING_PRECISION_H
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+
+uniform float distanceNormalizationFactor;
+uniform float blurSamplerScale;
+uniform vec2 samplerSteps;
+
+uniform int samplerRadius;
+
+const float arg = 0.5;
+
+float random(vec2 seed)
+{
+	return fract(sin(dot(seed ,vec2(12.9898,78.233))) * 43758.5453);
+}
+
+void main()
+{
+	vec4 centralColor = texture2D(inputImageTexture, textureCoordinate);
+	float lum = dot(centralColor.rgb, vec3(0.299, 0.587, 0.114));
+	float factor = (1.0 + arg) / (arg + lum) * distanceNormalizationFactor;
+
+	float gaussianWeightTotal = 1.0;
+	vec4 sum = centralColor * gaussianWeightTotal;
+	vec2 stepScale = blurSamplerScale * samplerSteps / float(samplerRadius);
+	float offset = random(textureCoordinate) - 0.5;
+
+	for(int i = 1; i <= samplerRadius; ++i)
+	{
+		vec2 dis = (float(i) + offset) * stepScale;
+		float percent = 1.0 - (float(i) + offset) / float(samplerRadius);
+
+		{
+			vec4 sampleColor1 = texture2D(inputImageTexture, textureCoordinate + dis);
+			float distanceFromCentralColor1 = min(distance(centralColor, sampleColor1) * factor, 1.0);
+			float gaussianWeight1 = percent * (1.0 - distanceFromCentralColor1);
+			gaussianWeightTotal += gaussianWeight1;
+			sum += sampleColor1 * gaussianWeight1;
+		}
+
+		//////////////////////////////////////////////////////////////////////////
+
+		{
+			vec4 sampleColor2 = texture2D(inputImageTexture, textureCoordinate - dis);
+			float distanceFromCentralColor2 = min(distance(centralColor, sampleColor2) * factor, 1.0);
+			float gaussianWeight2 = percent * (1.0 - distanceFromCentralColor2);
+			gaussianWeightTotal += gaussianWeight2;
+			sum += sampleColor2 * gaussianWeight2;
+		}
+	}
+
+	gl_FragColor = sum / gaussianWeightTotal;
+}
+);
+
+namespace CGE
+{
+	CGEConstString CGEBilateralBlurFilter::paramDistanceFactorName = "distanceNormalizationFactor";
+	CGEConstString CGEBilateralBlurFilter::paramBlurSamplerScaleName = "blurSamplerScale";
+	CGEConstString CGEBilateralBlurFilter::paramBlurFactorsName = "blurFactors";
+
+	bool CGEBilateralBlurFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshBilateralBlur))
+		{
+			setBlurScale(4.0f);
+			setDistanceNormalizationFactor(8.0);
+			GLint loc = m_program.uniformLocation(paramBlurFactorsName);
+			if(loc < 0)
+				return false;
+			const float factors[9] = {0.05f, 0.09f, 0.12f, 0.15f, 0.18f, 0.15f, 0.12f, 0.09f, 0.05f};
+			glUniform1fv(loc, 9, factors);
+			return true;
+		}
+		return false;
+	}
+
+	void CGEBilateralBlurFilter::setBlurScale(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramBlurSamplerScaleName, value / 4.0f);
+	}
+
+	void CGEBilateralBlurFilter::setDistanceNormalizationFactor(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramDistanceFactorName, value);
+	}
+
+    CGEConstString CGEBilateralBlurBetterFilter::paramBlurRadiusName = "samplerRadius";
+
+    bool CGEBilateralBlurBetterFilter::init()
+    {
+        if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshBilateralBlur2))
+        {
+            setBlurScale(4.0f);
+            setDistanceNormalizationFactor(8.0f);
+            setSamplerRadiusLimit(15);
+            return true;
+        }
+        return false;
+    }
+
+    void CGEBilateralBlurBetterFilter::setSamplerRadiusLimit(int limit)
+    {
+        m_limit = limit;
+    }
+
+    void CGEBilateralBlurBetterFilter::setBlurScale(float value)
+    {
+        m_program.bind();
+        m_program.sendUniformf(paramBlurSamplerScaleName, value);
+        int radius = CGE_MIN(m_limit, (int)value);
+        if(radius < 0)
+            radius = 0;
+        m_program.sendUniformi(paramBlurRadiusName, radius);
+    }
+
+	//////////////////////////////////////////////////////////////////////////
+
+	bool CGEBilateralWrapperFilter::init()
+	{
+		m_proc = new CGEBilateralBlurFilter;
+
+		if(!m_proc->init())
+		{
+			delete m_proc;
+			m_proc = nullptr;
+		}
+		return true;
+	}
+
+	void CGEBilateralWrapperFilter::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+	{
+		assert(m_proc != nullptr); // Filter 尚未初始化成功
+
+		float blurScale = 200.0f * powf(0.5f, m_blurScale / 50.0f);
+
+		CGESizei sz = handler->getOutputFBOSize();
+
+		m_proc->setBlurScale(CGE::CGE_MIN(sz.width, sz.height) / blurScale);
+
+		for(int i = 0; i < m_repeatTimes; )
+		{
+			m_proc->render2Texture(handler, srcTexture, vertexBufferID);
+
+			if(++i < m_repeatTimes)
+			{
+				handler->swapBufferFBO();
+			}
+		}
+	}
+
+}

Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 1142 - 0
media/cge_library/src/main/jni/cge/filters/cgeBlendFilter.cpp


+ 98 - 0
media/cge_library/src/main/jni/cge/filters/cgeBrightnessAdjust.cpp

@@ -0,0 +1,98 @@
+/*
+* cgeBrightnessAdjust.cpp
+*
+*  Created on: 2013-12-26
+*      Author: Wang Yang
+*/
+
+#include "cgeBrightnessAdjust.h"
+#include <cmath>
+
+const static char* const s_fshBrightness = CGE_SHADER_STRING_PRECISION_H
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+uniform float intensity;
+
+void main()
+{
+	vec4 src = texture2D(inputImageTexture, textureCoordinate);
+
+    float fac = 1.0 / intensity;
+    float fac2 = 1.0 - fac;
+    vec3 tmp1 = fac2 - src.rgb;
+    vec3 tmp2 = sqrt(fac2 * fac2 + (4.0 * fac) * src.rgb);
+    
+    src.rgb = tmp1 + tmp2 * (step(0.0, intensity) * 2.0 - 1.0);
+    
+	gl_FragColor = src;
+}
+);
+
+namespace CGE
+{
+	CGEConstString CGEBrightnessFilter::paramName = "intensity";
+
+	bool CGEBrightnessFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshBrightness))
+		{
+			return true;
+		}
+		return false;
+	}
+
+	void CGEBrightnessFilter::setIntensity(float value)
+	{
+		m_program.bind();
+        
+        //优先保证低精度设备不出现失真
+        m_intensity = fabsf(value) < 0.05f ? 0.0f : value;
+        m_program.sendUniformf(paramName, m_intensity);
+	}
+    
+    void CGEBrightnessFilter::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+    {
+        if(m_intensity == 0.0f)
+        {
+            handler->swapBufferFBO();
+        }
+        else
+        {
+            CGEImageFilterInterface::render2Texture(handler, srcTexture, vertexBufferID);
+        }
+    }
+
+	//////////////////////////////////////////////////////////////////////////
+
+	bool CGEBrightnessFastFilter::init()
+	{
+		if(CGEFastAdjustRGBFilter::init())
+		{
+			assignCurveArray();
+			return true;
+		}
+		return false;
+	}
+
+	void CGEBrightnessFastFilter::setIntensity(float value)
+	{
+		if(fabsf(value) < 0.001) 
+		{
+			initCurveArray();
+			assignCurveArray();
+			return;
+		}
+		std::vector<float>::size_type sz = m_curveRGB.size();
+		int iSign = value > 0.0f ? 1 : -1;
+		const float SQRT2 = 1.41421f;
+		const float fac = SQRT2 / value;
+		for(std::vector<float>::size_type t = 0; t != sz; ++t)
+		{
+			const float v = float(t) / (CGE_CURVE_PRECISION - 1.0f);
+			m_curveRGB[t] = 1.0f - v - (fac / SQRT2) + iSign * sqrtf(1.0f - SQRT2 * fac + 2.0f * SQRT2 * v * fac + 0.5f * fac * fac);
+		}
+		assignCurveArray();
+	}
+
+}

+ 169 - 0
media/cge_library/src/main/jni/cge/filters/cgeColorBalanceAdjust.cpp

@@ -0,0 +1,169 @@
+/*
+ * cgeColorBalanceAdjust.cpp
+ *
+ *  Created on: 2015-3-30
+ *      Author: Wang Yang
+ */
+
+#include "cgeColorBalanceAdjust.h"
+
+//GIMP color balance filter shader
+
+static CGEConstString s_colorBalance = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+
+uniform sampler2D inputImageTexture;
+uniform float redShift;
+uniform float greenShift;
+uniform float blueShift;
+
+float RGBToL(vec3 color)
+{
+	float fmin = min(min(color.r, color.g), color.b);    //Min. value of RGB
+	float fmax = max(max(color.r, color.g), color.b);    //Max. value of RGB
+
+	return (fmax + fmin) / 2.0; // Luminance
+}
+
+vec3 RGBToHSL(vec3 color)
+{
+	vec3 hsl; // init to 0 to avoid warnings ? (and reverse if + remove first part)
+
+	float fmin = min(min(color.r, color.g), color.b);    //Min. value of RGB
+	float fmax = max(max(color.r, color.g), color.b);    //Max. value of RGB
+	float delta = fmax - fmin;             //Delta RGB value
+
+	hsl.z = (fmax + fmin) / 2.0; // Luminance
+
+	if (delta == 0.0)		//This is a gray, no chroma...
+	{
+		hsl.x = 0.0;	// Hue
+		hsl.y = 0.0;	// Saturation
+	}
+	else                                    //Chromatic data...
+	{
+		if (hsl.z < 0.5)
+			hsl.y = delta / (fmax + fmin); // Saturation
+		else
+			hsl.y = delta / (2.0 - fmax - fmin); // Saturation
+
+		float deltaR = (((fmax - color.r) / 6.0) + (delta / 2.0)) / delta;
+		float deltaG = (((fmax - color.g) / 6.0) + (delta / 2.0)) / delta;
+		float deltaB = (((fmax - color.b) / 6.0) + (delta / 2.0)) / delta;
+
+		if (color.r == fmax )
+			hsl.x = deltaB - deltaG; // Hue
+		else if (color.g == fmax)
+			hsl.x = (1.0 / 3.0) + deltaR - deltaB; // Hue
+		else if (color.b == fmax)
+			hsl.x = (2.0 / 3.0) + deltaG - deltaR; // Hue
+
+		if (hsl.x < 0.0)
+			hsl.x += 1.0; // Hue
+		else if (hsl.x > 1.0)
+			hsl.x -= 1.0; // Hue
+	}
+
+	return hsl;
+}
+
+float HueToRGB(float f1, float f2, float hue)
+{
+	if (hue < 0.0)
+		hue += 1.0;
+	else if (hue > 1.0)
+		hue -= 1.0;
+	float res;
+	if ((6.0 * hue) < 1.0)
+		res = f1 + (f2 - f1) * 6.0 * hue;
+	else if ((2.0 * hue) < 1.0)
+		res = f2;
+	else if ((3.0 * hue) < 2.0)
+		res = f1 + (f2 - f1) * ((2.0 / 3.0) - hue) * 6.0;
+	else
+		res = f1;
+	return res;
+}
+
+vec3 HSLToRGB(vec3 hsl)
+{
+	vec3 rgb;
+
+	if (hsl.y == 0.0)
+		rgb = vec3(hsl.z); // Luminance
+	else
+	{
+		float f2;
+
+		if (hsl.z < 0.5)
+			f2 = hsl.z * (1.0 + hsl.y);
+		else
+			f2 = (hsl.z + hsl.y) - (hsl.y * hsl.z);
+
+		float f1 = 2.0 * hsl.z - f2;
+
+		rgb.r = HueToRGB(f1, f2, hsl.x + (1.0/3.0));
+		rgb.g = HueToRGB(f1, f2, hsl.x);
+		rgb.b= HueToRGB(f1, f2, hsl.x - (1.0/3.0));
+	}
+
+	return rgb;
+}
+
+void main()
+{
+	vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
+
+	// New way:
+	float lightness = RGBToL(textureColor.rgb);
+	vec3 shift = vec3(redShift, greenShift, blueShift);
+
+	const float a = 0.25;
+	const float b = 0.333;
+	const float scale = 0.7;
+
+	vec3 midtones = (clamp((lightness - b) /  a + 0.5, 0.0, 1.0) * clamp ((lightness + b - 1.0) / -a + 0.5, 0.0, 1.0) * scale) * shift;
+
+	vec3 newColor = textureColor.rgb + midtones;
+	newColor = clamp(newColor, 0.0, 1.0);
+
+	// preserve luminosity
+	vec3 newHSL = RGBToHSL(newColor);
+	float oldLum = RGBToL(textureColor.rgb);
+	textureColor.rgb = HSLToRGB(vec3(newHSL.x, newHSL.y, oldLum));
+
+	gl_FragColor = textureColor;
+}
+);
+
+namespace CGE
+{
+	CGEConstString CGEColorBalanceFilter::paramRedShiftName = "redShift";
+	CGEConstString CGEColorBalanceFilter::paramGreenShiftName = "greenShift";
+	CGEConstString CGEColorBalanceFilter::paramBlueShiftName = "blueShift";
+
+	bool CGEColorBalanceFilter::init()
+	{
+		return initShadersFromString(g_vshDefaultWithoutTexCoord, s_colorBalance);
+	}
+
+	void CGEColorBalanceFilter::setRedShift(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramRedShiftName, value);
+	}
+
+	void CGEColorBalanceFilter::setGreenShift(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramGreenShiftName, value);
+	}
+
+	void CGEColorBalanceFilter::setBlueShift(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramBlueShiftName, value);
+	}
+
+}

+ 65 - 0
media/cge_library/src/main/jni/cge/filters/cgeColorLevelAdjust.cpp

@@ -0,0 +1,65 @@
+/*
+ * cgeColorLevelAdjust.cpp
+ *
+ *  Created on: 2014-1-20
+ *      Author: Wang Yang
+ */
+
+#include "cgeColorLevelAdjust.h"
+
+const static char* const s_fshColorLevel = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+
+uniform vec2 colorLevel;
+uniform float gamma;
+
+vec3 levelFunc(vec3 src, vec2 colorLevel) 
+{
+	return clamp((src - colorLevel.x) / (colorLevel.y - colorLevel.x), 0.0, 1.0);
+}
+
+vec3 gammaFunc(vec3 src, float value) //value: 0~1
+{
+	return clamp(pow(src, vec3(value)), 0.0, 1.0);
+}
+
+void main()
+{
+	vec4 src = texture2D(inputImageTexture, textureCoordinate);
+	src.rgb = levelFunc(src.rgb, colorLevel);
+	src.rgb = gammaFunc(src.rgb, gamma);
+	gl_FragColor = src;
+}
+);
+
+namespace CGE
+{
+	CGEConstString CGEColorLevelFilter::paramLevelName = "colorLevel";
+	CGEConstString CGEColorLevelFilter::paramGammaName = "gamma";
+
+	bool CGEColorLevelFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshColorLevel))
+		{
+			setGamma(1.0f);
+			setLevel(0.0f, 1.0f);
+			return true;
+		}
+		return false;
+	}
+
+	void CGEColorLevelFilter::setLevel(float dark, float light)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramLevelName, dark, light);
+	}
+
+	void CGEColorLevelFilter::setGamma(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramGammaName, value);
+	}
+
+}

+ 234 - 0
media/cge_library/src/main/jni/cge/filters/cgeColorMappingFilter.cpp

@@ -0,0 +1,234 @@
+/*
+* cgeColorMappingFilter.cpp
+*
+*  Created on: 2016-8-5
+*      Author: Wang Yang
+* Description: 色彩映射
+*/
+
+#include "cgeColorMappingFilter.h"
+#include "cgeTextureUtils.h"
+#include <algorithm>
+
+static CGEConstString s_vshMapingBuffered= CGE_SHADER_STRING
+(
+attribute vec2 vPosition;
+attribute vec2 vTexPosition;
+varying vec2 textureCoordinate;
+
+
+void main()
+{
+	gl_Position = vec4(vPosition * 2.0 - 1.0, 0.0, 1.0);
+	textureCoordinate = vTexPosition;
+}
+);
+
+
+static CGEConstString s_fshMapingBuffered = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+void main()
+{
+	gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
+// 	gl_FragColor.rg = textureCoordinate;
+// 	gl_FragColor.a = 1.0;
+}
+);
+
+namespace CGE
+{
+	class CGEColorMappingFilterBuffered_Area : public CGEColorMappingFilter
+	{
+		CGEColorMappingFilterBuffered_Area() : m_mappingVertBuffer(0), m_mappingTexVertBuffer(0), m_drawer(nullptr) {}
+	public:
+		~CGEColorMappingFilterBuffered_Area()
+		{
+			delete m_drawer;
+			CGE_DELETE_GL_OBJS(glDeleteBuffers, m_mappingVertBuffer, m_mappingTexVertBuffer);
+		}
+
+		static inline CGEColorMappingFilterBuffered_Area* create()
+		{
+			CGEColorMappingFilterBuffered_Area* f = new CGEColorMappingFilterBuffered_Area();
+			f->m_drawer = TextureDrawer::create();
+			if(!f->init() || f->m_drawer == nullptr)
+			{
+				delete f;
+				f = nullptr;
+			}
+			return f;
+		}
+
+		bool init()
+		{
+			m_program.bindAttribLocation("vTexPosition", 1);
+			if(m_program.initWithShaderStrings(s_vshMapingBuffered, s_fshMapingBuffered))
+			{
+				return true;
+			}
+			return false;
+		}
+
+		void render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+		{
+			assert(m_mappingAreas.size() > 0 && m_cacheBufferData.size() >= m_texUnitResolution.width * m_texUnitResolution.height && m_texVertBufferData.size() >= m_texUnitResolution.width * m_texUnitResolution.height * 6);
+
+			handler->setAsTarget();
+			glViewport(0, 0, m_texUnitResolution.width, m_texUnitResolution.height);
+			m_drawer->drawTexture(srcTexture);
+
+			glFinish();
+			glReadPixels(0, 0, m_texUnitResolution.width, m_texUnitResolution.height, GL_RGBA, GL_UNSIGNED_BYTE, m_cacheBufferData.data());
+
+			int index = 0;
+			const int cnt = m_texUnitResolution.width * m_texUnitResolution.height;
+			const int mappingSize = (int)m_mappingAreas.size() - 1;
+
+			for(int i = 0; i != cnt; ++i)
+			{
+				const auto& v = m_cacheBufferData[i];
+				const int mappingIndex = v[1] * (mappingSize / 255.0f);
+				const auto& m = m_mappingAreas[mappingIndex];
+				const auto& a = m.area;
+				const Vec2f rb(a[0] + a[2], a[1] + a[3]);
+
+				m_texVertBufferData[index] = Vec2f(a[0], a[1]);
+				m_texVertBufferData[index + 1] = Vec2f(rb[0], a[1]);
+				m_texVertBufferData[index + 2] = Vec2f(a[0], rb[1]);
+
+				m_texVertBufferData[index + 3] = m_texVertBufferData[index + 1];
+				m_texVertBufferData[index + 4] = rb;
+				m_texVertBufferData[index + 5] = m_texVertBufferData[index + 2];
+				index += 6;
+			}
+
+			handler->setAsTarget();
+			m_program.bind();
+
+			glBindBuffer(GL_ARRAY_BUFFER, m_mappingVertBuffer);
+			glEnableVertexAttribArray(0);
+			glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+
+			glBindBuffer(GL_ARRAY_BUFFER, m_mappingTexVertBuffer);
+			glBufferSubData(GL_ARRAY_BUFFER, 0, m_texVertBufferData.size() * sizeof(m_texVertBufferData[0]), m_texVertBufferData.data());
+			glEnableVertexAttribArray(1);
+			glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, 0);
+
+			glActiveTexture(GL_TEXTURE0);
+			glBindTexture(GL_TEXTURE_2D, m_mappingTexture);
+
+			glDrawArrays(GL_TRIANGLES, 0, m_drawCount);
+		}
+
+		void setupMapping(GLuint mappingTex, int texWidth, int texHeight, int texUnitWidth, int texUnitHeight)
+		{
+			CGEColorMappingFilter::setupMapping(mappingTex, texWidth, texHeight, texUnitWidth, texUnitHeight);
+			
+			m_cacheBufferData.resize(texUnitWidth * texUnitHeight);
+			m_texVertBufferData.resize(texUnitWidth * texUnitHeight * 6);
+
+			if(m_mappingVertBuffer == 0)
+				glGenBuffers(1, &m_mappingVertBuffer);
+
+			if(m_mappingTexVertBuffer == 0)
+				glGenBuffers(1, &m_mappingTexVertBuffer);
+
+			CGESizef mappingSize(m_texSize.width / (float)m_texUnitResolution.width, m_texSize.height / (float)m_texUnitResolution.height);
+
+			std::vector<Vec2f> vertBuf(m_texUnitResolution.width * m_texUnitResolution.height * 6);
+
+			int index = 0;
+			float w = m_texUnitResolution.width;
+			float h = m_texUnitResolution.height;
+			for(int i = 0; i != m_texUnitResolution.height; ++i)
+			{
+				for(int j = 0; j != m_texUnitResolution.width; ++j)
+				{
+					vertBuf[index] = Vec2f(j / w, i / h);
+					vertBuf[index + 1] = Vec2f((j + 1) / w, i / h);
+					vertBuf[index + 2] = Vec2f(j / w, (i + 1) / h);
+
+					vertBuf[index + 3] = vertBuf[index + 1];
+					vertBuf[index + 4] = Vec2f((j + 1) / w, (i + 1) / h);
+					vertBuf[index + 5] = vertBuf[index + 2];
+					index += 6;
+				}
+			}
+
+			glBindBuffer(GL_ARRAY_BUFFER, m_mappingVertBuffer);
+			glBufferData(GL_ARRAY_BUFFER, vertBuf.size() * sizeof(vertBuf[0]), vertBuf.data(), GL_STATIC_DRAW);
+			m_drawCount = (int)vertBuf.size();
+
+			glBindBuffer(GL_ARRAY_BUFFER, m_mappingTexVertBuffer);
+			glBufferData(GL_ARRAY_BUFFER, m_texVertBufferData.size() * sizeof(m_texVertBufferData[0]), nullptr, GL_STREAM_DRAW);
+
+		}
+
+// 		void endPushing()
+// 		{
+// 			CGEColorMappingFilter::endPushing();
+// 
+// 			glGenBuffers(1, &m_mappingVertBuffer);
+// 			glBindBuffer(GL_ARRAY_BUFFER, m_mappingVertBuffer);
+// 
+// 
+// 
+// 		}
+
+	protected:
+
+		GLuint m_mappingVertBuffer;
+		GLuint m_mappingTexVertBuffer;
+		std::vector<Vec4ub> m_cacheBufferData;
+		std::vector<Vec2f> m_texVertBufferData;
+		FrameBuffer m_framebuffer;
+		int m_drawCount;
+		TextureDrawer* m_drawer;
+	};
+
+	//////////////////////////////////////////////////////////////////////////
+
+	CGEColorMappingFilter::CGEColorMappingFilter() : m_mappingTexture(0), m_texSize(0, 0)
+	{
+
+	}
+
+	CGEColorMappingFilter::~CGEColorMappingFilter()
+	{
+		glDeleteTextures(1, &m_mappingTexture);
+	}
+
+	CGEColorMappingFilter* CGEColorMappingFilter::createWithMode(MapingMode mode)
+	{
+		switch (mode)
+		{
+		case MAPINGMODE_BUFFERED_AREA:
+			return CGEColorMappingFilterBuffered_Area::create();
+		case MAPINGMODE_SINGLE:
+		default:
+			break;
+		}
+
+		return nullptr;
+	}
+
+	void CGEColorMappingFilter::pushMapingArea(const MappingArea& area)
+	{
+		 m_mappingAreas.push_back(area);
+	}
+
+	void CGEColorMappingFilter::endPushing()
+	{
+		std::stable_sort(m_mappingAreas.begin(), m_mappingAreas.end());
+	}
+
+	void CGEColorMappingFilter::setupMapping(GLuint mappingTex, int texWidth, int texHeight, int texUnitWidth, int texUnitHeight)
+	{
+		m_mappingTexture = mappingTex;
+		m_texSize.set(texWidth, texHeight);
+		m_texUnitResolution.set(texUnitWidth, texUnitHeight);
+	}
+
+}

+ 42 - 0
media/cge_library/src/main/jni/cge/filters/cgeContrastAdjust.cpp

@@ -0,0 +1,42 @@
+/*
+* cgeContrastAdjust.cpp
+*
+*  Created on: 2013-12-26
+*      Author: Wang Yang
+*/
+
+#include "cgeContrastAdjust.h"
+
+const static char* const s_fshContrast = CGE_SHADER_STRING_PRECISION_H
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+uniform float intensity;
+
+void main()
+{
+	vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
+	gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * intensity + vec3(0.5)), textureColor.a);
+}
+
+);
+
+namespace CGE
+{
+	CGEConstString CGEContrastFilter::paramName = "intensity";
+
+	bool CGEContrastFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshContrast))
+		{
+			return true;
+		}
+		return false;
+	}
+
+	void CGEContrastFilter::setIntensity(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramName, value);
+	}
+}

+ 87 - 0
media/cge_library/src/main/jni/cge/filters/cgeCrosshatchFilter.cpp

@@ -0,0 +1,87 @@
+/*
+ * cgeCrosshatchFilter.cpp
+ *
+ *  Created on: 2015-2-1
+ *      Author: Wang Yang
+ */
+
+#include "cgeCrosshatchFilter.h"
+
+CGEConstString s_fshCrosshatch = CGE_SHADER_STRING_PRECISION_H
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+
+uniform float crossHatchSpacing;
+uniform float lineWidth;
+
+const vec3 W = vec3(0.2125, 0.7154, 0.0721);
+
+void main()
+{
+	vec4 color = texture2D(inputImageTexture, textureCoordinate);
+	float luminance = dot(color.rgb, W);
+
+	vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, color.a);
+	if (luminance < 1.00) 
+	{
+		if (mod(textureCoordinate.x + textureCoordinate.y, crossHatchSpacing) <= lineWidth) 
+		{
+			colorToDisplay.rgb = vec3(0.0, 0.0, 0.0);
+		}
+	}
+	if (luminance < 0.75) 
+	{
+		if (mod(textureCoordinate.x - textureCoordinate.y, crossHatchSpacing) <= lineWidth) 
+		{
+			colorToDisplay.rgb = vec3(0.0, 0.0, 0.0);
+		}
+	}
+	if (luminance < 0.50) 
+	{
+		if (mod(textureCoordinate.x + textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth) 
+		{
+			colorToDisplay.rgb = vec3(0.0, 0.0, 0.0);
+		}
+	}
+	if (luminance < 0.3) 
+	{
+		if (mod(textureCoordinate.x - textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth) 
+		{
+			colorToDisplay.rgb = vec3(0.0, 0.0, 0.0);
+		}
+	}
+
+	gl_FragColor = colorToDisplay;
+}
+);
+
+
+namespace CGE
+{
+	CGEConstString CGECrosshatchFilter::paramCrosshatchSpacing = "crossHatchSpacing";
+	CGEConstString CGECrosshatchFilter::paramLineWidth = "lineWidth";
+
+	bool CGECrosshatchFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshCrosshatch))
+		{
+			setCrosshatchSpacing(0.03f);
+			setLineWidth(0.003f);
+			return true;
+		}
+		return false;
+	}
+
+	void CGECrosshatchFilter::setCrosshatchSpacing(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramCrosshatchSpacing, value);
+	}
+
+	void CGECrosshatchFilter::setLineWidth(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramLineWidth, value);
+	}
+}

+ 619 - 0
media/cge_library/src/main/jni/cge/filters/cgeCurveAdjust.cpp

@@ -0,0 +1,619 @@
+/*
+* cgeCurveAdjust.cpp
+*
+*  Created on: 2014-1-2
+*      Author: Wang Yang
+*        Mail: admin@wysaid.org
+*/
+
+#include "cgeCurveAdjust.h"
+#include "cgeCommonDefine.h"
+
+#define CURVE_BIND_TEXTURE_ID 0
+
+
+/*
+const static char* const s_fshCurveMap = SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+uniform sampler2D curveTexture; //We do not use sampler1D because GLES dosenot support that.
+uniform float intensity;
+
+void main()
+{
+	vec3 src = texture2D(inputImageTexture, textureCoordinate).rgb;
+	vec3 dst = vec3(texture2D(curveTexture, vec2(src.r, 0.0)).r,
+					texture2D(curveTexture, vec2(src.g, 0.0)).g,
+					texture2D(curveTexture, vec2(src.b, 0.0)).b);
+	gl_FragColor = vec4(mix(src, dst, intensity), 1.0);
+}
+);
+ */
+
+namespace CGE
+{
+	void CGECurveInterface::loadCurves(const float* curveR, size_t nR, const float* curveG, size_t nG, const float* curveB, size_t nB)
+	{
+		if(curveR != nullptr)
+			CGECurveInterface::loadCurve(m_curve, curveR, nR, CHANNEL_R, 1u, 0u);
+		if(curveG != nullptr)
+			CGECurveInterface::loadCurve(m_curve, curveG, nG, CHANNEL_G, 1u, 0u);
+		if(curveB != nullptr)
+			CGECurveInterface::loadCurve(m_curve, curveB, nB, CHANNEL_B, 1u, 0u);
+	}
+
+	bool CGECurveInterface::loadCurve(std::vector<float>& vec, const float* curve, size_t cnt)
+	{
+		if(curve == nullptr || cnt <= 1)
+		{
+			CGECurveInterface::resetCurve(vec, CGE_CURVE_PRECISION);
+			return false;
+		}
+		if(vec.size() != cnt)
+			vec.resize(cnt);
+		vec.resize(cnt);
+		for(int i = 0; i != cnt; ++i)
+		{
+			vec[i] = curve[i];
+		}
+		return true;
+	}
+
+	bool CGECurveInterface::loadCurve(std::vector<CurveData>& vec, const float* curve, size_t cnt, size_t dstChannel, size_t srcChannel, size_t stride)
+	{
+		if(curve == nullptr || cnt <= 1 || srcChannel == 0 || dstChannel >= 3)
+		{
+			CGECurveInterface::resetCurve(vec, CGE_CURVE_PRECISION);
+			return false;
+		}
+		if(vec.size() != cnt)
+			vec.resize(cnt);
+		for(int i = 0; i != cnt; ++i)
+		{
+			vec[i][(int)dstChannel] = curve[i*srcChannel + stride];
+		}
+		return true;
+	}
+
+	int CGECurveInterface::getPrecision()
+	{
+		return CGE_CURVE_PRECISION;
+	}
+
+	void CGECurveInterface::setPoints(const CurvePoint* r, size_t nr, const CurvePoint* g, size_t ng, const CurvePoint* b, size_t nb)
+	{
+		CGECurveInterface::genCurve(m_curve, r, nr, g, ng, b, nb);
+	}
+
+	void CGECurveInterface::setPointsRGB(const CurvePoint* pnts, size_t cnt)
+	{
+		std::vector<float> curve(CGE_CURVE_PRECISION);
+		if(CGECurveInterface::genCurve(curve, pnts, cnt))
+		{
+			CGECurveInterface::loadCurve(m_curve, curve.data(), curve.size(), CHANNEL_R, 1u, 0u);
+			CGECurveInterface::loadCurve(m_curve, curve.data(), curve.size(), CHANNEL_G, 1u, 0u);
+			CGECurveInterface::loadCurve(m_curve, curve.data(), curve.size(), CHANNEL_B, 1u, 0u);
+		}
+	}
+
+	void CGECurveInterface::setPointsR(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::genCurve(m_curve, pnts, cnt, CHANNEL_R);
+	}
+
+	void CGECurveInterface::setPointsG(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::genCurve(m_curve, pnts, cnt, CHANNEL_G);
+	}
+
+	void CGECurveInterface::setPointsB(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::genCurve(m_curve, pnts, cnt, CHANNEL_B);
+	}
+
+	void CGECurveInterface::resetCurve(std::vector<float>& vec, size_t precision)
+	{
+		if(vec.size() != precision) vec.resize(precision);
+		for(int i = 0; i != precision; ++i)
+		{
+			vec[i] = float(i) / (precision - 1);
+		}
+	}
+
+	void CGECurveInterface::resetCurve(std::vector<CurveData>& vec, size_t precision)
+	{
+		if(vec.size() != precision) vec.resize(precision);
+		for(int i = 0; i != precision; ++i)
+		{
+			const float tmp = float(i) / (precision - 1);
+			vec[i][0] = tmp;
+			vec[i][1] = tmp;
+			vec[i][2] = tmp;
+		}
+	}
+
+	void CGECurveInterface::scaleCurve(std::vector<float>& vec, size_t precision)
+	{
+		const std::vector<float>::size_type sz = vec.size();
+		if(sz == precision) return;
+		if(sz == 0)
+		{
+			CGECurveInterface::resetCurve(vec, precision);
+			return;
+		}
+
+		std::vector<float> vTmp(precision);
+		float scale = float(sz - 1) / (precision - 1);
+		for(std::vector<float>::size_type t = 0; t != precision; ++t)
+		{
+			const size_t index = CGE_MID(size_t(t * scale), size_t(0), (sz - 1));
+			vTmp[t] = vec[index];
+		}
+		vec = vTmp;
+	}
+
+	void CGECurveInterface::scaleCurve(std::vector<CurveData>& vec, size_t precision)
+	{
+		const std::vector<float>::size_type sz = vec.size();
+		if(sz == precision) return;
+		if(sz == 0)
+		{
+			CGECurveInterface::resetCurve(vec, precision);
+			return;
+		}
+
+		std::vector<CurveData> vTmp(precision);
+		float scale = float(sz - 1) / (precision - 1);
+		for(std::vector<CurveData>::size_type t = 0; t != precision; ++t)
+		{
+			const size_t index = CGE_MID(size_t(t * scale), size_t(0), (sz - 1));
+			vTmp[t][0] = vec[index][0];
+			vTmp[t][1] = vec[index][1];
+			vTmp[t][2] = vec[index][2];
+		}
+		vec = vTmp;
+	}
+
+	bool CGECurveInterface::mergeCurveConst(std::vector<float>& dst, const std::vector<float>& late, const std::vector<float>& early)
+	{
+		if(early.empty() || early.size() != late.size())
+			return false; //You should call "scaleCurve" to make the sizes equal. (none-zero)
+
+		const std::vector<float>::size_type sz = early.size();
+		if(dst.size() != sz) dst.resize(sz);
+		for(std::vector<float>::size_type t = 0; t != sz; ++t)
+		{
+			const size_t index = CGE_MID(size_t(early[t] * (sz - 1)), size_t(0), (sz - 1));
+			dst[t] = late[index];
+		}
+		return true;
+	}
+
+	bool CGECurveInterface::mergeCurveConst(std::vector<CurveData>& dst, const std::vector<CurveData>& late, const std::vector<CurveData>& early)
+	{
+		if(early.empty() || early.size() != late.size())
+			return false; //You should call "scaleCurve" to make the sizes equal. (none-zero)
+
+		const std::vector<float>::size_type sz = early.size();
+		if(dst.size() != sz) dst.resize(sz);
+		for(std::vector<float>::size_type t = 0; t != sz; ++t)
+		{
+			const size_t indexR = CGE_MID(size_t(early[t][0] * (sz - 1)), size_t(0), (sz - 1));
+			dst[t][0] = late[indexR][0];
+
+			const size_t indexG = CGE_MID(size_t(early[t][1] * (sz - 1)), size_t(0), (sz - 1));
+			dst[t][1] = late[indexG][1];
+
+			const size_t indexB = CGE_MID(size_t(early[t][2] * (sz - 1)), size_t(0), (sz - 1));
+			dst[t][2] = late[indexB][2];
+		}
+		return true;
+	}
+
+	bool CGECurveInterface::mergeCurve(std::vector<float>& dst, std::vector<float>& late, std::vector<float>& early)
+	{
+		if(early.size() != late.size())
+		{
+			scaleCurve(late, CGE_CURVE_PRECISION);
+			scaleCurve(early, CGE_CURVE_PRECISION);
+		}
+		return mergeCurveConst(dst, late, early);
+	}
+
+	bool CGECurveInterface::mergeCurve(std::vector<CurveData>& dst, std::vector<CurveData>& late, std::vector<CurveData>& early)
+	{
+		if(early.size() != late.size())
+		{
+			scaleCurve(late, CGE_CURVE_PRECISION);
+			scaleCurve(early, CGE_CURVE_PRECISION);
+		}
+		return mergeCurveConst(dst, late, early);
+	}
+
+	bool CGECurveInterface::mergeCurve(std::vector<CurveData>& dst, std::vector<float>& late, std::vector<CurveData>& early, unsigned channel)
+	{
+		if(early.empty() || channel >= 3)
+			return false;
+
+		if(early.size() != late.size())
+		{
+			scaleCurve(late, CGE_CURVE_PRECISION);
+			scaleCurve(early, CGE_CURVE_PRECISION);
+		}
+
+		const std::vector<float>::size_type sz = early.size();
+		if(dst.size() != sz) dst.resize(sz);
+		for(std::vector<float>::size_type t = 0; t != sz; ++t)
+		{
+			const size_t index = CGE_MID(size_t(early[t][channel] * (sz - 1)), size_t(0), (sz - 1));
+			dst[t][channel] = late[index];
+		}
+		return true;
+	}
+
+	//Be sure that the size of "curve" is right.(CURVE_PRECISION * channel)
+	bool CGECurveInterface::_genCurve(float* curve, const CurvePoint* pnts, size_t cnt, unsigned channel, unsigned stride)
+	{
+		if(curve == nullptr)
+			return false;
+
+		std::vector<float> u(cnt - 1), ypp(cnt);
+		ypp[0] = u[0] = 0.0f;
+		for(int i=1; i != cnt - 1; ++i)
+		{
+			float sig = (pnts[i].x - pnts[i - 1].x) / (pnts[i + 1].x - pnts[i - 1].x);
+			float p = sig * ypp[i - 1] + 2.0f;
+			ypp[i] = (sig - 1.0f) / p;
+			u[i] = ((pnts[i + 1].y - pnts[i].y)/ (pnts[i + 1].x - pnts[i].x) - (pnts[i].y - pnts[i - 1].y) / (pnts[i].x - pnts[i - 1].x));
+			u[i] = (6.0f * u[i] / (pnts[i + 1].x - pnts[i - 1].x) - sig * u[i - 1]) / p;
+		}
+		ypp[cnt - 1] = 0.0;
+		for(int i = (int)(cnt - 2); i >= 0; --i)
+		{
+			ypp[i] = ypp[i] * ypp[i+1] + u[i];
+		}
+		int kL = -1, kH = 0;
+		for(int i = 0; i != CGE_CURVE_PRECISION; ++i)
+		{
+			const float t = (float)i/(CGE_CURVE_PRECISION - 1);
+			while(kH < (int)cnt && t > pnts[kH].x)
+			{
+				kL = kH;
+				++kH;
+			}
+			if(kH == cnt)
+			{
+				curve[i * channel + stride] = pnts[cnt-1].y;
+				continue;
+			}
+			if(kL == -1)
+			{
+				curve[i * channel + stride] = pnts[0].y;
+				continue;
+			}
+			const float h = pnts[kH].x - pnts[kL].x;
+			const float a = (pnts[kH].x - t) / h;
+			const float b = (t - pnts[kL].x) / h;
+			const float g = a * pnts[kL].y + b*pnts[kH].y + ((a*a*a - a)*ypp[kL] + (b*b*b - b) * ypp[kH]) * (h*h) / 6.0f;
+			curve[i * channel + stride] = CGE_MID(g, 0.0f, 1.0f);
+		}
+		return true;
+	}
+
+	bool CGECurveInterface::genCurve(std::vector<float>& vec, const CurvePoint* pnts, size_t cnt)
+	{
+		if(vec.size() != CGE_CURVE_PRECISION) vec.resize(CGE_CURVE_PRECISION);
+		if(cnt <= 1 || pnts == nullptr)
+		{
+			resetCurve(vec, CGE_CURVE_PRECISION);
+			CGE_LOG_ERROR("Invalid Curve Points! Ptr: %p, Count: %d", pnts, (int)cnt);
+			return false;
+		}
+		return _genCurve(vec.data(), pnts, cnt);
+	}
+
+	bool CGECurveInterface::genCurve(std::vector<CurveData>& vec, const CurvePoint* pntsR, size_t cntR, const CurvePoint* pntsG, size_t cntG, const CurvePoint* pntsB, size_t cntB)
+	{
+		if(vec.size() != CGE_CURVE_PRECISION) vec.resize(CGE_CURVE_PRECISION);
+		if(cntR <= 1 || pntsR == nullptr ||
+			cntG <= 1 || pntsG == nullptr ||
+			cntB <= 1 || pntsB == nullptr)
+		{
+			resetCurve(vec, CGE_CURVE_PRECISION);
+			CGE_LOG_ERROR("Invalid Curve Points!\nR: %p, Count: %d\nG: %p, Count: %d\nB: %p, Count: %d\n", pntsR, (int)cntR, pntsG, (int)cntG, pntsB, (int)cntB);
+			return false;
+		}
+
+		return _genCurve(&vec[0][0], pntsR, cntR, 3, 0) &&
+			_genCurve(&vec[0][0], pntsG, cntG, 3, 1) &&
+			_genCurve(&vec[0][0], pntsB, cntB, 3, 2);
+	}
+
+	bool CGECurveInterface::genCurve(std::vector<CurveData>& vec, const CurvePoint* pnts, size_t cnt, size_t channel)
+	{
+		if(vec.size() != CGE_CURVE_PRECISION) vec.resize(CGE_CURVE_PRECISION);
+		if(cnt <= 1 || pnts == nullptr || channel > 3)
+		{
+			resetCurve(vec, CGE_CURVE_PRECISION);
+			CGE_LOG_ERROR("Invalid Curve Points! Ptr: %p, Count: %d\n", pnts, (int)cnt);
+			return false;
+		}
+		return _genCurve(&vec[0][0], pnts, cnt, 3, (int)channel);
+	}
+
+	void CGECurveInterface::_assignCurveArrays(ProgramObject& program, CGEConstString name, std::vector<CurveData>& data)
+	{
+		program.bind();
+		GLint index = program.uniformLocation(name);
+		if(index < 0)
+		{
+			CGE_LOG_ERROR("CGECurveFilter: Failed when assignCurveArray()\n");
+			return;
+		}
+		glUniform3fv(index, (GLsizei)data.size(), &data[0][0]);
+	}
+
+	void CGECurveInterface::_assignCurveSampler(GLuint& texID, std::vector<CurveData>& data)
+	{
+		std::vector<float>::size_type sz = data.size();
+		if(sz != CGE_CURVE_PRECISION)
+		{
+			scaleCurve(data, CGE_CURVE_PRECISION);
+			sz = CGE_CURVE_PRECISION;
+		}
+
+#ifdef CGE_NOT_OPENGL_ES
+
+		
+		const GLenum dataType = GL_FLOAT;
+		const void* samplerData = &data[0][0];		
+
+#else
+
+		const GLenum dataType = GL_UNSIGNED_BYTE;
+
+		const CurveData* curveData = data.data();
+		unsigned char samplerData[CGE_CURVE_PRECISION * 3];
+		for(int i = 0; i != CGE_CURVE_PRECISION; ++i)
+		{
+			const int index = i * 3;
+			samplerData[index] = (unsigned char)(curveData[i][0] * 255.0f);
+			samplerData[index + 1] = (unsigned char)(curveData[i][1] * 255.0f);
+			samplerData[index + 2] = (unsigned char)(curveData[i][2] * 255.0f);
+		}
+
+#endif
+
+		if(texID == 0)
+        {
+			texID = cgeGenTextureWithBuffer(samplerData, int(sz), 1, GL_RGB, dataType, 3, 0, GL_LINEAR, GL_CLAMP_TO_EDGE);
+		}
+		else
+		{
+			glBindTexture(GL_TEXTURE_2D, texID);
+			glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, (int)sz, 1, GL_RGB, dataType, samplerData);
+		}
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+
+	bool CGECurveTexFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, g_fshCurveMapNoIntensity))
+		{
+			initSampler();
+			return true;
+		}
+		return false;
+	}
+
+	void CGECurveTexFilter::flush()
+	{
+		_assignCurveSampler(m_curveTexture, m_curve);
+	}
+
+	void CGECurveTexFilter::initSampler()
+	{		
+		flush();
+		UniformParameters* param;
+		if(m_uniformParam == nullptr) param = new UniformParameters;
+		else param = m_uniformParam;
+		param->pushSampler2D(g_paramCurveMapTextureName, &m_curveTexture, CURVE_BIND_TEXTURE_ID);
+		setAdditionalUniformParameter(param);
+	}
+
+	void CGECurveTexFilter::setPoints(const CurvePoint* r, size_t nr, const CurvePoint* g, size_t ng, const CurvePoint* b, size_t nb)
+	{
+		CGECurveInterface::setPoints(r, nr, g, ng, b, nb);
+		flush();
+	}
+
+	void CGECurveTexFilter::setPointsRGB(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::setPointsRGB(pnts, cnt);
+		flush();
+	}
+
+	void CGECurveTexFilter::setPointsR(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::setPointsR(pnts, cnt);
+		flush();
+	}
+
+	void CGECurveTexFilter::setPointsG(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::setPointsG(pnts, cnt);
+		flush();
+	}
+
+	void CGECurveTexFilter::setPointsB(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::setPointsB(pnts, cnt);
+		flush();
+	}
+
+	void CGECurveTexFilter::loadCurves(const float* curveR, size_t nR, const float* curveG, size_t nG, const float* curveB, size_t nB)
+	{
+		CGECurveInterface::loadCurves(curveR, nR, curveG, nG, curveB, nB);
+		flush();
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+
+	bool CGECurveFilter::init()
+	{
+		resetCurve(m_curve, CGE_CURVE_PRECISION);
+
+		return initShadersFromString(g_vshDefaultWithoutTexCoord, g_fshFastAdjust);
+	}
+
+	void CGECurveFilter::setPoints(const CurvePoint* r, size_t nr, const CurvePoint* g, size_t ng, const CurvePoint* b, size_t nb)
+	{
+		CGECurveInterface::setPoints(r, nr, g, ng, b, nb);
+		flush();
+	}
+
+	void CGECurveFilter::setPointsRGB(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::setPointsRGB(pnts, cnt);
+		flush();
+	}
+
+	void CGECurveFilter::setPointsR(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::setPointsR(pnts, cnt);
+		flush();
+	}
+	void CGECurveFilter::setPointsG(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::setPointsG(pnts, cnt);
+		flush();
+	}
+	void CGECurveFilter::setPointsB(const CurvePoint* pnts, size_t cnt)
+	{
+		CGECurveInterface::setPointsB(pnts, cnt);
+		flush();
+	}
+
+	void CGECurveFilter::loadCurves(const float* curveR, size_t nR, const float* curveG, size_t nG,	const float* curveB, size_t nB)
+	{
+		CGECurveInterface::loadCurves(curveR, nR, curveG, nG, curveB, nB);
+		flush();
+	}
+
+	void CGECurveFilter::flush()
+	{
+		_assignCurveArrays(m_program, g_paramFastAdjustArrayName, m_curve);
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+
+	bool CGEMoreCurveFilter::init()
+	{
+		resetCurve(m_curve, CGE_CURVE_PRECISION);
+		return initShadersFromString(g_vshDefaultWithoutTexCoord, g_fshFastAdjust);
+	}
+
+	void CGEMoreCurveFilter::pushPoints(const CurvePoint* r, size_t nr, const CurvePoint* g, size_t ng, const CurvePoint* b, size_t nb)
+	{
+		pushPointsR(r, nr);
+		pushPointsG(g, ng);
+		pushPointsB(b, nb);
+	}
+
+	void CGEMoreCurveFilter::pushPointsRGB(const CurvePoint* rgb, size_t nrgb)
+	{
+		std::vector<float> vecRGB;
+		if(rgb != nullptr && nrgb >= 2)
+		{
+			genCurve(vecRGB, rgb, nrgb);
+			mergeCurve(m_curve, vecRGB, m_curve, 0);
+			mergeCurve(m_curve, vecRGB, m_curve, 1);
+			mergeCurve(m_curve, vecRGB, m_curve, 2);
+		}
+	}
+
+	void CGEMoreCurveFilter::pushPointsR(const CurvePoint* r, size_t nr)
+	{
+		std::vector<float> vecR;
+		if(r != nullptr && nr >= 2)
+		{
+			genCurve(vecR, r, nr);
+			mergeCurve(m_curve, vecR, m_curve, 0);
+		}
+	}
+
+	void CGEMoreCurveFilter::pushPointsG(const CurvePoint* g, size_t ng)
+	{
+		std::vector<float> vecG;
+		if(g != nullptr && ng >= 2)
+		{
+			genCurve(vecG, g, ng);
+			mergeCurve(m_curve, vecG, m_curve, 1);
+		}
+	}
+
+	void CGEMoreCurveFilter::pushPointsB(const CurvePoint* b, size_t nb)
+	{
+		std::vector<float> vecB;
+		if(b != nullptr && nb >= 2)
+		{
+			genCurve(vecB, b, nb);
+			mergeCurve(m_curve, vecB, m_curve, 2);
+		}
+	}
+
+	void CGEMoreCurveFilter::pushCurves(const float* curveR, size_t nR, const float* curveG, size_t nG, const float* curveB, size_t nB)
+	{
+
+		if(curveR != nullptr && nR > 0)
+		{
+			std::vector<float> vecR(curveR, curveR + nR);
+			mergeCurve(m_curve, vecR, m_curve, 0);
+		}
+		if(curveG != nullptr && nG > 0)
+		{
+			std::vector<float> vecG(curveG, curveG + nG);
+			mergeCurve(m_curve, vecG, m_curve, 1);
+		}
+		if(curveB != nullptr && nB > 0)
+		{
+			std::vector<float> vecB(curveB, curveB + nB);
+			mergeCurve(m_curve, vecB, m_curve, 2);
+		}
+	}
+
+	void CGEMoreCurveFilter::flush()
+	{
+		_assignCurveArrays(m_program, g_paramFastAdjustArrayName, m_curve);
+	}
+
+	//////////////////////////////////////////////////////////////////////////
+
+	bool CGEMoreCurveTexFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, g_fshCurveMapNoIntensity))
+		{
+			initSampler();
+			return true;
+		}
+		return false;
+	}
+
+	void CGEMoreCurveTexFilter::flush()
+	{
+		_assignCurveSampler(m_curveTexture, m_curve);
+	}
+
+	void CGEMoreCurveTexFilter::initSampler()
+	{
+		flush();
+		UniformParameters* param;
+		if(m_uniformParam == nullptr) param = new UniformParameters;
+		else param = m_uniformParam;
+		param->pushSampler2D(g_paramCurveMapTextureName, &m_curveTexture, 0);
+		setAdditionalUniformParameter(param);
+	}
+
+}

Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 1396 - 0
media/cge_library/src/main/jni/cge/filters/cgeDataParsingEngine.cpp


+ 31 - 0
media/cge_library/src/main/jni/cge/filters/cgeDynamicFilters.cpp

@@ -0,0 +1,31 @@
+/*
+ * cgeDynamicFilters.cpp
+ *
+ *  Created on: 2015-11-18
+ *      Author: Wang Yang
+ */
+
+#include "cgeDynamicFilters.h"
+
+#define COMMON_FUNC(type) \
+type* proc = new type();\
+if(!proc->init())\
+{\
+	delete proc;\
+	proc = NULL;\
+}\
+return proc;\
+
+namespace CGE
+{
+
+	CGEDynamicWaveFilter* createDynamicWaveFilter()
+	{
+		COMMON_FUNC(CGEDynamicWaveFilter);
+	}
+    
+    CGEMotionFlowFilter* createMotionFlowFilter()
+    {
+        COMMON_FUNC(CGEMotionFlowFilter);
+    }
+}

+ 110 - 0
media/cge_library/src/main/jni/cge/filters/cgeDynamicWaveFilter.cpp

@@ -0,0 +1,110 @@
+/*
+ * cgeDynamicWaveFilter.cpp
+ *
+ *  Created on: 2015-11-12
+ *      Author: Wang Yang
+ */
+
+#include "cgeDynamicWaveFilter.h"
+
+static CGEConstString s_fshWave = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+
+uniform float motion;
+uniform float angle;
+uniform float strength;
+void main()
+{
+   vec2 coord;
+   coord.x = textureCoordinate.x + strength * sin(motion + textureCoordinate.x * angle);
+   coord.y = textureCoordinate.y + strength * sin(motion + textureCoordinate.y * angle);
+   gl_FragColor = texture2D(inputImageTexture, coord);
+}
+);
+
+namespace CGE
+{
+	CGEConstString CGEDynamicWaveFilter::paramMotion = "motion";
+	CGEConstString CGEDynamicWaveFilter::paramAngle = "angle";
+    CGEConstString CGEDynamicWaveFilter::paramStrength = "strength";
+
+	bool CGEDynamicWaveFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshWave))
+		{
+			m_program.bind();
+			m_motionLoc = m_program.uniformLocation(paramMotion);
+			m_angleLoc = m_program.uniformLocation(paramAngle);
+            m_strengthLoc = m_program.uniformLocation(paramStrength);
+			setWaveAngle(20.0f);
+            setStrength(0.01f);
+			m_motion = 0.0f;
+			m_autoMotion = false;
+			return true;
+		}
+
+		return false;
+	}
+
+	void CGEDynamicWaveFilter::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+	{
+        handler->setAsTarget();
+        m_program.bind();
+
+        glEnableVertexAttribArray(0);
+        glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+        glActiveTexture(GL_TEXTURE0);
+        glBindTexture(GL_TEXTURE_2D, srcTexture);
+
+        if(m_autoMotion)
+        {
+            m_motion += m_motionSpeed;
+            glUniform1f(m_motionLoc, m_motion);
+            if(m_motion > 3.14159f * m_angle)
+            {
+                m_motion -= 3.14159f * m_angle;
+            }
+        }
+
+        if(m_uniformParam != nullptr)
+            m_uniformParam->assignUniforms(handler, m_program.programID());
+
+        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+        cgeCheckGLError("glDrawArrays");
+	}
+
+	void CGEDynamicWaveFilter::setIntensity(float value)
+	{
+		setAutoMotionSpeed(value);
+	}
+
+	void CGEDynamicWaveFilter::setWaveMotion(float motion)
+	{
+		m_motion = motion;
+		m_program.bind();
+		glUniform1f(m_motionLoc, motion);
+	}
+
+	void CGEDynamicWaveFilter::setWaveAngle(float angle)
+	{
+		m_angle = angle;
+		m_program.bind();
+		glUniform1f(m_angleLoc, angle);
+	}
+
+    void CGEDynamicWaveFilter::setStrength(float strength)
+    {
+        m_strength = strength;
+        m_program.bind();
+        glUniform1f(m_strengthLoc, strength);
+    }
+
+	void CGEDynamicWaveFilter::setAutoMotionSpeed(float speed)
+	{
+		m_motionSpeed = speed;
+		m_autoMotion = (speed > 0.0f);
+	}
+
+}

+ 110 - 0
media/cge_library/src/main/jni/cge/filters/cgeEdgeFilter.cpp

@@ -0,0 +1,110 @@
+/*
+ * cgeEdgeFilter.cpp
+ *
+ *  Created on: 2013-12-29
+ *      Author: Wang Yang
+ */
+
+#include "cgeEdgeFilter.h"
+#include "cgeMat.h"
+
+const static char* const s_fshEdge = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+uniform vec2 samplerSteps;
+uniform float stride;
+uniform float intensity;
+uniform vec2 norm;
+
+void main()
+{
+	vec4 src = texture2D(inputImageTexture, textureCoordinate);
+	vec3 tmpColor = texture2D(inputImageTexture, textureCoordinate + samplerSteps * stride * norm).rgb;
+	tmpColor = abs(src.rgb - tmpColor) * 2.0;
+	gl_FragColor = vec4(mix(src.rgb, tmpColor, intensity), src.a);
+}
+);
+
+CGEConstString s_vshEdgeSobel = CGE_SHADER_STRING
+(
+attribute vec2 vPosition;
+varying vec2 textureCoordinate;
+varying vec2 coords[8];
+
+uniform vec2 samplerSteps;
+uniform float stride;
+
+void main()
+{
+	gl_Position = vec4(vPosition, 0.0, 1.0);
+	textureCoordinate = (vPosition.xy + 1.0) / 2.0;
+
+	coords[0] = textureCoordinate - samplerSteps * stride;
+	coords[1] = textureCoordinate + vec2(0.0, -samplerSteps.y) * stride;
+	coords[2] = textureCoordinate + vec2(samplerSteps.x, -samplerSteps.y) * stride;
+
+	coords[3] = textureCoordinate - vec2(samplerSteps.x, 0.0) * stride;
+	coords[4] = textureCoordinate + vec2(samplerSteps.x, 0.0) * stride;
+
+	coords[5] = textureCoordinate + vec2(-samplerSteps.x, samplerSteps.y) * stride;
+	coords[6] = textureCoordinate + vec2(0.0, samplerSteps.y) * stride;
+	coords[7] = textureCoordinate + vec2(samplerSteps.x, samplerSteps.y) * stride;
+
+}
+);
+
+const static char* const s_fshEdgeSobel = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+uniform vec2 samplerSteps;
+uniform float stride;
+uniform float intensity;
+
+varying vec2 coords[8];
+
+void main()
+{
+	vec3 colors[8];
+
+	for(int i = 0; i < 8; ++i)
+	{
+		colors[i] = texture2D(inputImageTexture, coords[i]).rgb;
+	}
+
+	vec4 src = texture2D(inputImageTexture, textureCoordinate);
+
+	vec3 h = -colors[0] - 2.0 * colors[1] - colors[2] + colors[5] + 2.0 * colors[6] + colors[7];
+	vec3 v = -colors[0] + colors[2] - 2.0 * colors[3] + 2.0 * colors[4] - colors[5] + colors[7];
+
+	gl_FragColor = vec4(mix(src.rgb, sqrt(h * h + v * v), intensity), 1.0);
+}
+);
+
+namespace CGE
+{
+
+	bool CGEEdgeFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshEdge))
+		{
+			setIntensity(1.0f);
+			setStride(2.0f);
+			setAngle(M_PI * 0.75f);
+			return true;
+		}
+		return false;
+	}
+
+	bool CGEEdgeSobelFilter::init()
+	{
+		if(initShadersFromString(s_vshEdgeSobel, s_fshEdgeSobel))
+		{
+			setIntensity(1.0f);
+			setStride(2.0f);
+			return true;
+		}
+		return false;
+	}
+}

+ 64 - 0
media/cge_library/src/main/jni/cge/filters/cgeEmbossFilter.cpp

@@ -0,0 +1,64 @@
+/*
+ * cgeEmbossFilter.cpp
+ *
+ *  Created on: 2013-12-27
+ *      Author: Wang Yang
+ */
+
+#include "cgeEmbossFilter.h"
+#include "cgeMat.h"
+
+const static char* const s_fshEmboss = CGE_SHADER_STRING_PRECISION_M
+(
+uniform sampler2D inputImageTexture;
+varying vec2 textureCoordinate;
+uniform vec2 samplerSteps;
+uniform float stride;
+uniform float intensity;
+uniform vec2 norm;
+
+void main() {
+  vec4 src = texture2D(inputImageTexture, textureCoordinate);
+  vec3 tmp = texture2D(inputImageTexture, textureCoordinate + samplerSteps * stride * norm).rgb - src.rgb + 0.5;
+  float f = (tmp.r + tmp.g + tmp.b) / 3.0;
+  gl_FragColor = vec4(mix(src.rgb, vec3(f, f, f), intensity), src.a);
+}
+);
+
+namespace CGE
+{
+	CGEConstString CGEEmbossFilter::paramIntensity = "intensity";
+	CGEConstString CGEEmbossFilter::paramStride = "stride";
+	CGEConstString CGEEmbossFilter::paramNorm = "norm";
+
+	bool CGEEmbossFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshEmboss))
+		{
+			setIntensity(1.0f);
+			setStride(2.0f);
+			setAngle(M_PI * 0.75f);
+			return true;
+		}
+		return false;
+	}
+
+	void CGEEmbossFilter::setIntensity(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramIntensity, value);
+	}
+
+	void CGEEmbossFilter::setStride(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramStride, value);
+	}
+
+	void CGEEmbossFilter::setAngle(float value)
+	{
+		Vec2f v = Mat2::makeRotation(value) * Vec2f(1.0f, 0.0f);
+		m_program.bind();
+		m_program.sendUniformf(paramNorm, v[0], v[1]);
+	}
+}

+ 37 - 0
media/cge_library/src/main/jni/cge/filters/cgeExposureAdjust.cpp

@@ -0,0 +1,37 @@
+/*
+ * cgeExposureAdjust.cpp
+ *
+ *  Created on: 2015-1-29
+ *      Author: Wang Yang
+ */
+
+#include "cgeExposureAdjust.h"
+
+static CGEConstString s_fshExposure = CGE_SHADER_STRING_PRECISION_H
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+
+uniform float exposure;
+
+void main()
+{
+	vec4 color = texture2D(inputImageTexture, textureCoordinate);
+	gl_FragColor = vec4(color.rgb * exp2(exposure), color.a);
+});
+
+namespace CGE
+{
+	CGEConstString CGEExposureFilter::paramName = "exposure";
+
+	bool CGEExposureFilter::init()
+	{
+		return initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshExposure);
+	}
+
+	void CGEExposureFilter::setIntensity(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramName, value);
+	}
+}

+ 176 - 0
media/cge_library/src/main/jni/cge/filters/cgeFilterBasic.cpp

@@ -0,0 +1,176 @@
+/*
+* cgeFilterBasic.cpp
+*
+*  Created on: 2013-12-25
+*      Author: Wang Yang
+*        Mail: admin@wysaid.org
+*/
+
+#include "cgeFilterBasic.h"
+
+#define COMMON_FUNC(type) \
+type* proc = new type();\
+if(!proc->init())\
+{\
+	delete proc;\
+	proc = NULL;\
+}\
+return proc;\
+
+namespace CGE
+{
+	CGEBrightnessFilter* createBrightnessFilter()
+	{
+		COMMON_FUNC(CGEBrightnessFilter);
+	}
+
+	CGEBrightnessFastFilter* createBrightnessFastFilter()
+	{
+		COMMON_FUNC(CGEBrightnessFastFilter);
+	}
+
+	CGEContrastFilter* createContrastFilter()
+	{
+		COMMON_FUNC(CGEContrastFilter);
+	}
+
+	CGESharpenBlurFilter* createSharpenBlurFilter()
+	{
+		COMMON_FUNC(CGESharpenBlurFilter);
+	}
+
+	CGESharpenBlurFastFilter* createSharpenBlurFastFilter()
+	{
+		COMMON_FUNC(CGESharpenBlurFastFilter);
+	}
+
+	CGESharpenBlurSimpleFilter* createSharpenBlurSimpleFilter()
+	{
+		COMMON_FUNC(CGESharpenBlurSimpleFilter);
+	}
+
+	CGESharpenBlurSimpleBetterFilter* createSharpenBlurSimpleBetterFilter()
+	{
+		COMMON_FUNC(CGESharpenBlurSimpleBetterFilter);
+	}
+
+	CGESaturationHSLFilter* createSaturationHSLFilter()
+	{
+		COMMON_FUNC(CGESaturationHSLFilter);
+	}
+
+	CGESaturationFilter* createSaturationFilter()
+	{
+		COMMON_FUNC(CGESaturationFilter);
+	}
+	
+	CGEShadowHighlightFilter* createShadowHighlightFilter()
+	{
+		COMMON_FUNC(CGEShadowHighlightFilter);
+	}
+
+	CGEShadowHighlightFastFilter* createShadowHighlightFastFilter()
+	{
+		COMMON_FUNC(CGEShadowHighlightFastFilter);
+	}
+
+	CGEWhiteBalanceFilter* createWhiteBalanceFilter()
+	{
+		COMMON_FUNC(CGEWhiteBalanceFilter);
+	}
+
+	CGEWhiteBalanceFastFilter* createWhiteBalanceFastFilter()
+	{
+		COMMON_FUNC(CGEWhiteBalanceFastFilter);
+	}
+
+	CGEMonochromeFilter* createMonochromeFilter()
+	{
+		COMMON_FUNC(CGEMonochromeFilter);
+	}
+
+	CGECurveTexFilter* createCurveTexFilter()
+	{
+		COMMON_FUNC(CGECurveTexFilter);
+	}
+
+	CGECurveFilter* createCurveFilter()
+	{
+		COMMON_FUNC(CGECurveFilter);
+	}
+
+	CGEMoreCurveFilter* createMoreCurveFilter()
+	{
+		COMMON_FUNC(CGEMoreCurveFilter);
+	}
+
+	CGEMoreCurveTexFilter* createMoreCurveTexFilter()
+	{
+		COMMON_FUNC(CGEMoreCurveTexFilter);
+	}
+
+	CGEColorLevelFilter* createColorLevelFilter()
+	{
+		COMMON_FUNC(CGEColorLevelFilter);
+	}
+    
+	CGEVignetteFilter* createVignetteFilter()
+	{
+		COMMON_FUNC(CGEVignetteFilter);
+	}
+	CGEVignetteExtFilter* createVignetteExtFilter()
+	{
+		COMMON_FUNC(CGEVignetteExtFilter);
+	}
+    
+	CGETiltshiftVectorFilter* createTiltshiftVectorFilter()
+	{
+        
+        COMMON_FUNC(CGETiltshiftVectorFilter);
+	}
+    
+	CGETiltshiftEllipseFilter* createTiltshiftEllipseFilter()
+	{
+        COMMON_FUNC(CGETiltshiftEllipseFilter);
+	}
+    
+    CGETiltshiftVectorWithFixedBlurRadiusFilter* createFixedTiltshiftVectorFilter()
+    {
+        COMMON_FUNC(CGETiltshiftVectorWithFixedBlurRadiusFilter);
+    }
+    
+	CGETiltshiftEllipseWithFixedBlurRadiusFilter* createFixedTiltshiftEllipseFilter()
+    {
+        COMMON_FUNC(CGETiltshiftEllipseWithFixedBlurRadiusFilter);
+    }
+
+	CGESharpenBlurFastWithFixedBlurRadiusFilter* createSharpenBlurFastWithFixedBlurRadiusFilter()
+	{
+		 COMMON_FUNC(CGESharpenBlurFastWithFixedBlurRadiusFilter);
+	}
+
+	CGESelectiveColorFilter *createSelectiveColorFilter()
+	{
+		COMMON_FUNC(CGESelectiveColorFilter);
+	}
+
+	CGEExposureFilter* createExposureFilter()
+	{
+		COMMON_FUNC(CGEExposureFilter);
+	}
+
+	CGEHueAdjustFilter* createHueAdjustFilter()
+	{
+		COMMON_FUNC(CGEHueAdjustFilter);
+	}
+
+	CGEColorBalanceFilter* createColorBalanceFilter()
+	{
+		COMMON_FUNC(CGEColorBalanceFilter);
+	}
+
+	CGELookupFilter* createLookupFilter()
+	{
+		COMMON_FUNC(CGELookupFilter);
+	}
+}

+ 77 - 0
media/cge_library/src/main/jni/cge/filters/cgeHalftoneFilter.cpp

@@ -0,0 +1,77 @@
+/*
+ * cgeHalftoneFilter.cpp
+ *
+ *  Created on: 2015-1-29
+ *      Author: Wang Yang
+ */
+
+#include "cgeHalftoneFilter.h"
+
+static CGEConstString s_fshHalftone = CGE_SHADER_STRING_PRECISION_H
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+
+uniform vec2 dotPercent;
+uniform float aspectRatio;
+
+//const vec3 weight = vec3(0.2125, 0.7154, 0.0721);
+
+void main()
+{
+	vec2 samplePos = textureCoordinate - mod(textureCoordinate, dotPercent) + 0.5 * dotPercent;
+	vec2 coordToUse = vec2(textureCoordinate.x, (textureCoordinate.y - 0.5) * aspectRatio + 0.5);
+	vec2 adjustedPos = vec2(samplePos.x, (samplePos.y - 0.5) * aspectRatio + 0.5);
+
+	float dis = distance(coordToUse, adjustedPos);
+
+	vec4 color = texture2D(inputImageTexture, samplePos);
+	vec3 dotScaling = 1.0 - color.rgb;//dot(color.rgb, weight);
+	vec3 presenceDot = 1.0 - step(dis, dotPercent.x * dotScaling * 0.5);
+
+	gl_FragColor = vec4(presenceDot, color.a);
+});
+
+namespace CGE
+{
+	CGEConstString CGEHalftoneFilter::paramAspectRatio = "aspectRatio";
+	CGEConstString CGEHalftoneFilter::paramDotPercent = "dotPercent";
+
+	bool CGEHalftoneFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshHalftone))
+		{
+			setDotSize(1.0f);
+			return true;
+		}
+		return false;
+	}
+
+	void CGEHalftoneFilter::setDotSize(float value)
+	{
+		m_dotSize = value;
+	}
+
+	void CGEHalftoneFilter::setIntensity(float value)
+	{
+		setDotSize(value);
+	}
+
+	void CGEHalftoneFilter::render2Texture(CGE::CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+	{
+		handler->setAsTarget();
+		m_program.bind();
+		glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
+		glEnableVertexAttribArray(0);
+		glActiveTexture(GL_TEXTURE0);
+		glBindTexture(GL_TEXTURE_2D, srcTexture);
+		
+		CGESizei sz = handler->getOutputFBOSize();
+		float aspectRatio = sz.width / (float)sz.height;
+		float dotPercent = m_dotSize / sz.width;
+		m_program.sendUniformf(paramAspectRatio, aspectRatio);
+		m_program.sendUniformf(paramDotPercent, dotPercent, dotPercent / aspectRatio);
+		glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+		cgeCheckGLError("glDrawArrays");
+	}
+}

+ 65 - 0
media/cge_library/src/main/jni/cge/filters/cgeHazeFilter.cpp

@@ -0,0 +1,65 @@
+/*
+ * cgeHazeFilter.cpp
+ *
+ *  Created on: 2015-2-1
+ *      Author: Wang Yang
+ */
+
+#include "cgeHazeFilter.h"
+
+static CGEConstString s_fshHaze = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+
+uniform sampler2D inputImageTexture;
+
+uniform float dis;
+uniform float slope;
+uniform vec3 hazeColor;
+
+void main()
+{
+	float d = textureCoordinate.y * slope + dis;
+
+	vec4 c = texture2D(inputImageTexture, textureCoordinate);
+
+	c.rgb = (c.rgb - d * hazeColor.rgb) / (1.0 -d);
+
+	gl_FragColor = c;
+});
+
+namespace CGE
+{
+	CGEConstString CGEHazeFilter::paramDistance = "dis";
+	CGEConstString CGEHazeFilter::paramSlope = "slope";
+	CGEConstString CGEHazeFilter::paramHazeColor = "hazeColor";
+
+	bool CGEHazeFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshHaze))
+		{
+			setDistance(0.2f);
+			setHazeColor(1.0f, 1.0f, 1.0f);
+			return true;
+		}
+		return false;
+	}
+
+	void CGEHazeFilter::setDistance(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramDistance, value);
+	}
+
+	void CGEHazeFilter::setSlope(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramSlope, value);
+	}
+
+	void CGEHazeFilter::setHazeColor(float r, float g, float b)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramHazeColor, r, g, b);
+	}
+}

+ 64 - 0
media/cge_library/src/main/jni/cge/filters/cgeHueAdjust.cpp

@@ -0,0 +1,64 @@
+/*
+ * cgeHueAdjust.cpp
+ *
+ *  Created on: 2015-1-29
+ *      Author: Wang Yang
+ */
+
+#include "cgeHueAdjust.h"
+
+static CGEConstString s_fshHue = CGE_SHADER_STRING_PRECISION_H
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+
+uniform float hueAdjust;
+
+vec3 rgb2yiq(vec3 src)
+{
+	return src * mat3(0.299, 0.587, 0.114,
+		0.595716, -0.274453, -0.321263,
+		0.211456, -0.522591, 0.31135);
+}
+
+vec3 yiq2rgb(vec3 src)
+{
+	return src * mat3(1.0, 0.9563, 0.6210,
+		1.0, -0.2721, -0.6474,
+		1.0, -1.1070, 1.7046);
+}
+
+void main()
+{
+	vec4 color = texture2D(inputImageTexture, textureCoordinate);
+	vec3 yiq = rgb2yiq(color.rgb);
+	float hue = atan(yiq.z, yiq.y);
+	float chroma = length(yiq.yz);
+
+	hue -= hueAdjust;
+	yiq.yz = vec2(cos(hue), sin(hue)) * chroma;
+
+	gl_FragColor = vec4(yiq2rgb(yiq), color.a);	
+});
+
+namespace CGE
+{
+	CGEConstString CGEHueAdjustFilter::paramName = "hueAdjust";
+
+	bool CGEHueAdjustFilter::init()
+	{
+		return initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshHue);
+	}
+
+	void CGEHueAdjustFilter::setHue(float value)
+	{
+		m_program.bind();
+		m_program.sendUniformf(paramName, value);
+	}
+
+	void CGEHueAdjustFilter::setIntensity(float intensity)
+	{
+		setHue(intensity);
+	}
+
+}

+ 200 - 0
media/cge_library/src/main/jni/cge/filters/cgeLerpblurFilter.cpp

@@ -0,0 +1,200 @@
+/*
+ * cgeLerpblurFilter.cpp
+ *
+ *  Created on: 2015-8-3
+ *      Author: Wang Yang
+ */
+
+#include "cgeLerpblurFilter.h"
+#include <cmath>
+
+static CGEConstString s_fshScale = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+void main()
+{
+	gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
+}
+);
+
+namespace CGE
+{
+	CGELerpblurFilter::~CGELerpblurFilter()
+	{
+		_clearMipmaps();
+	}
+
+	bool CGELerpblurFilter::init()
+	{
+		memset(m_texCache, 0, sizeof(MAX_LERP_BLUR_INTENSITY));
+		m_intensity = 0;
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fshScale))
+		{
+			m_mipmapBase = 1.0f;
+			m_isBaseChanged = true;
+			return true;
+		}
+		return false;
+	}
+
+	void CGELerpblurFilter::setBlurLevel(int value)
+	{
+		m_intensity = value;
+		if(m_intensity > MAX_LERP_BLUR_INTENSITY)
+			m_intensity = MAX_LERP_BLUR_INTENSITY;
+	}
+
+	void CGELerpblurFilter::setIntensity(float value)
+	{
+		if(value <= 0.5f)
+		{
+			setBlurLevel(int(value * (2 * MAX_LERP_BLUR_INTENSITY)));
+			if(m_mipmapBase != 1.0f)
+				setMipmapBase(1.0f);
+		}
+		else
+		{
+			setBlurLevel(MAX_LERP_BLUR_INTENSITY);
+			setMipmapBase((value - 0.5f) * 4.0f + 1.0f);
+		}
+	}
+
+	void CGELerpblurFilter::_genMipmaps(int width, int height)
+	{
+		_clearMipmaps();
+		GLuint texIDs[MAX_LERP_BLUR_INTENSITY];
+
+#ifdef IOS_SDK
+        
+        static GLenum textureType = GL_FALSE;
+        
+        if(textureType == GL_FALSE)
+        {
+            textureType = GL_UNSIGNED_BYTE;
+            
+            //Apple A9 (iphone 6s+) GL_UNSIGNED_BYTE纹理精度较低, 需要使用 GL_HALF_FLOAT_OES
+            //为了增强兼容性, 对于支持 EXT_color_buffer_half_float 扩展的设备, 通通使用 GL_HALF_FLOAT_OES
+#ifdef GL_HALF_FLOAT_OES
+            
+            if(cgeCheckGLExtension("EXT_color_buffer_half_float"))
+            {
+                textureType = GL_HALF_FLOAT_OES;
+                CGE_LOG_INFO("###Lerp blur: EXT_color_buffer_half_float used!!\n");
+            }
+#endif
+        }
+      
+#else
+        
+        static const GLenum textureType = GL_UNSIGNED_BYTE;
+        
+#endif
+        
+		glGenTextures(MAX_LERP_BLUR_INTENSITY, texIDs);
+		for(int i = 0; i != MAX_LERP_BLUR_INTENSITY; ++i)
+		{
+			CGESizei sz(_calcLevel(width, i + 2), _calcLevel(height, i + 2));
+			if(sz.width < 1) 
+				sz.width = 1;
+			if(sz.height < 1)
+				sz.height = 1;
+			glBindTexture(GL_TEXTURE_2D, texIDs[i]);
+			glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sz.width, sz.height, 0, GL_RGBA, textureType, 0);
+			glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+			glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+			glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+			glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+			m_texCache[i].texID = texIDs[i];
+			m_texCache[i].size = sz;
+		}
+
+	}
+
+	void CGELerpblurFilter::_clearMipmaps()
+	{
+		GLuint texIDs[MAX_LERP_BLUR_INTENSITY];
+		for(int i = 0; i != MAX_LERP_BLUR_INTENSITY; ++i)
+			texIDs[i] = m_texCache[i].texID;
+		glDeleteTextures(MAX_LERP_BLUR_INTENSITY, texIDs);
+		memset(m_texCache, 0, sizeof(MAX_LERP_BLUR_INTENSITY));
+		m_cacheTargetSize.set(0, 0);
+	}
+
+	void CGELerpblurFilter::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+	{
+		if(m_intensity <= 0)
+		{
+			handler->swapBufferFBO();
+			return;
+		}
+
+		//TODO: Useless code to avoid some strange error on some devices: mx4&mx5(powerVR g6200)
+		handler->setAsTarget();
+
+		m_program.bind();
+
+		glEnableVertexAttribArray(0);
+		glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);		
+		glActiveTexture(GL_TEXTURE0);
+		
+		const CGESizei& sz = handler->getOutputFBOSize();
+
+		if(m_texCache[0].texID == 0 || m_cacheTargetSize.width != sz.width || m_cacheTargetSize.height != sz.height || m_isBaseChanged)
+		{
+			_genMipmaps(sz.width, sz.height);
+			m_cacheTargetSize = sz;
+			m_isBaseChanged = false;
+			CGE_LOG_INFO("CGELerpblurFilter::render2Texture - Base Changing!\n");
+		}
+
+		m_framebuffer.bindTexture2D(m_texCache[0].texID);
+		glBindTexture(GL_TEXTURE_2D, srcTexture);
+		glViewport(0, 0, m_texCache[0].size.width, m_texCache[0].size.height);
+		glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+		glFlush();
+
+		//down scale
+		for(int i = 1; i < m_intensity; ++i)
+		{
+			TextureCache& texCache = m_texCache[i];
+			m_framebuffer.bindTexture2D(texCache.texID);
+			glViewport(0, 0, texCache.size.width, texCache.size.height);
+
+			glBindTexture(GL_TEXTURE_2D, m_texCache[i - 1].texID);			
+			glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+			glFlush();
+		}
+
+		//up scale
+		for(int i = m_intensity - 1; i > 0; --i)
+		{
+			TextureCache& texCache = m_texCache[i - 1];
+			m_framebuffer.bindTexture2D(texCache.texID);
+			glViewport(0, 0, texCache.size.width, texCache.size.height);
+
+			glBindTexture(GL_TEXTURE_2D, m_texCache[i].texID);			
+			glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+			glFlush();
+		}
+
+		handler->setAsTarget();
+		glBindTexture(GL_TEXTURE_2D, m_texCache[0].texID);
+		glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+
+	}
+
+	void CGELerpblurFilter::setMipmapBase(float value)
+	{
+		m_mipmapBase = value;
+		if(m_mipmapBase < 0.6f)
+			m_mipmapBase = 0.6f;
+		m_isBaseChanged = true;
+	}
+
+	int CGELerpblurFilter::_calcLevel(int len, int level)
+	{
+		//return len / powf(m_mipmapBase, level);
+		return len / (level * m_mipmapBase);
+	}
+}

+ 66 - 0
media/cge_library/src/main/jni/cge/filters/cgeLookupFilter.cpp

@@ -0,0 +1,66 @@
+/*
+* cgeLookupFilter.cpp
+*
+*  Created on: 2016-7-4
+*      Author: Wang Yang
+* Description: 全图LUT滤镜
+*/
+
+#include "cgeLookupFilter.h"
+
+static CGEConstString s_fsh = CGE_SHADER_STRING_PRECISION_M
+(
+varying vec2 textureCoordinate;
+uniform sampler2D inputImageTexture;
+uniform sampler2D lookupTexture;
+
+const float stepDis = 1.0 / 8.0;
+const float perPixel = 1.0 / 512.0;
+const float halfPixel = 0.5 / 512.0;
+
+void main()
+{
+	vec4 color = texture2D(inputImageTexture, textureCoordinate);
+
+	float blue = color.b * 63.0;
+	vec2 coord1;
+	coord1.y = floor(blue / 8.0);
+	coord1.x = floor(blue) - (coord1.y * 8.0);
+	
+	coord1 = coord1 * stepDis + halfPixel + (stepDis - perPixel) * color.xy;
+	gl_FragColor.rgb = texture2D(lookupTexture, coord1).rgb;
+	gl_FragColor.a = color.a;
+}
+);
+
+namespace CGE
+{
+	CGELookupFilter::CGELookupFilter() : m_lookupTexture(0)
+	{
+
+	}
+
+	CGELookupFilter::~CGELookupFilter()
+	{
+		glDeleteTextures(1, &m_lookupTexture);
+	}
+
+	bool CGELookupFilter::init()
+	{
+		if(initShadersFromString(g_vshDefaultWithoutTexCoord, s_fsh))
+		{
+			m_program.bind();
+			m_program.sendUniformi("lookupTexture", 1);
+			return true;
+		}
+		return false;
+	}
+
+	void CGELookupFilter::render2Texture(CGEImageHandlerInterface* handler, GLuint srcTexture, GLuint vertexBufferID)
+	{
+		glActiveTexture(GL_TEXTURE1);
+		glBindTexture(GL_TEXTURE_2D, m_lookupTexture);
+		CGEImageFilterInterface::render2Texture(handler, srcTexture, vertexBufferID);
+	}
+
+}

+ 33 - 0
media/cge_library/src/main/jni/cge/filters/cgeMaxValueFilter.cpp

@@ -0,0 +1,33 @@
+/*
+* cgeMaxValueFilter.cpp
+*
+*  Created on: 2015-3-20
+*      Author: Wang Yang
+* Description: 最大值滤波
+*/
+
+#include "cgeMaxValueFilter.h"
+
+static CGEConstString s_maxValueFunc = CGE_SHADER_STRING
+(
+float lum(vec4 value)
+{
+	return dot(value.rgb, vec3(0.299, 0.587, 0.114));
+}
+vec4 getValue(vec4 newValue, vec4 originValue)
+{
+	return mix(newValue, originValue, step(lum(newValue), lum(originValue)));
+});
+
+namespace CGE
+{
+	const char* CGEMaxValueFilter3x3::getShaderCompFunc()
+	{
+		return s_maxValueFunc;
+	}
+
+	const char* CGEMaxValueFilter3x3Plus::getShaderCompFunc()
+	{
+		return s_maxValueFunc;
+	}
+}

+ 0 - 0
media/cge_library/src/main/jni/cge/filters/cgeMidValueFilter.cpp


Kaikkia tiedostoja ei voida näyttää, sillä liian monta tiedostoa muuttui tässä diffissä