Browse Source

push first

jixionghui 4 years ago
commit
7537f38968
100 changed files with 4964 additions and 0 deletions
  1. 16 0
      .gitgnore
  2. 1 0
      OpenGLlibrary/.gitignore
  3. 130 0
      OpenGLlibrary/OpenGLlibrary.iml
  4. 26 0
      OpenGLlibrary/build.gradle
  5. 25 0
      OpenGLlibrary/proguard-rules.pro
  6. 28 0
      OpenGLlibrary/src/main/AndroidManifest.xml
  7. 6 0
      OpenGLlibrary/src/main/assets/shader/base.frag
  8. 12 0
      OpenGLlibrary/src/main/assets/shader/base.vert
  9. 6 0
      OpenGLlibrary/src/main/assets/shader/base_fragment.sh
  10. 10 0
      OpenGLlibrary/src/main/assets/shader/base_vertex.sh
  11. 62 0
      OpenGLlibrary/src/main/assets/shader/beauty/beauty.frag
  12. 35 0
      OpenGLlibrary/src/main/assets/shader/beauty/beauty.vert
  13. 147 0
      OpenGLlibrary/src/main/assets/shader/choose/choose.frag
  14. 42 0
      OpenGLlibrary/src/main/assets/shader/choose/choose.vert
  15. 9 0
      OpenGLlibrary/src/main/assets/shader/color/gray_fragment.frag
  16. 8 0
      OpenGLlibrary/src/main/assets/shader/mh/brightness.frag
  17. 8 0
      OpenGLlibrary/src/main/assets/shader/mh/contrast.frag
  18. 11 0
      OpenGLlibrary/src/main/assets/shader/mh/saturation.frag
  19. 15 0
      OpenGLlibrary/src/main/assets/shader/mh/vignette.frag
  20. 7 0
      OpenGLlibrary/src/main/assets/shader/oes.frag
  21. 12 0
      OpenGLlibrary/src/main/assets/shader/oes.vert
  22. 7 0
      OpenGLlibrary/src/main/assets/shader/oes/default_fragment.sh
  23. 10 0
      OpenGLlibrary/src/main/assets/shader/oes/default_vertex.sh
  24. 7 0
      OpenGLlibrary/src/main/assets/shader/oes_base_fragment.sh
  25. 10 0
      OpenGLlibrary/src/main/assets/shader/oes_base_vertex.sh
  26. 10 0
      OpenGLlibrary/src/main/assets/shader/pkm_mul.frag
  27. 9 0
      OpenGLlibrary/src/main/assets/shader/pkm_mul.vert
  28. 28 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/FilterInfo.java
  29. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/FilterInfo.java.bak
  30. 35 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/SizeInfo.java
  31. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/SizeInfo.java.bak
  32. 50 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/TrackUtils.java
  33. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/TrackUtils.java.bak
  34. 206 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/VideoDecode.java
  35. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/VideoDecode.java.bak
  36. 424 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/CameraRecorder.java
  37. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/CameraRecorder.java.bak
  38. 196 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/EGLHelper.java
  39. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/EGLHelper.java.bak
  40. 41 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BaseFilter.java
  41. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BaseFilter.java.bak
  42. 75 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BeautyFilter.java
  43. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BeautyFilter.java.bak
  44. 120 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/ChooseFilter.java
  45. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/ChooseFilter.java.bak
  46. 43 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DistortionFilter.java
  47. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DistortionFilter.java.bak
  48. 13 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DrawFilter.java
  49. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DrawFilter.java.bak
  50. 216 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Filter.java
  51. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Filter.java.bak
  52. 113 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/GroupFilter.java
  53. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/GroupFilter.java.bak
  54. 34 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Mp4EditFilter.java
  55. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Mp4EditFilter.java.bak
  56. 48 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/OesFilter.java
  57. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/OesFilter.java.bak
  58. 87 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/FrameBuffer.java
  59. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/FrameBuffer.java.bak
  60. 415 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MP4Edior.java
  61. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MP4Edior.java.bak
  62. 59 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/AdjustFilter.java
  63. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/AdjustFilter.java.bak
  64. 35 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/BrightnessFilter.java
  65. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/BrightnessFilter.java.bak
  66. 35 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/ContrastFilter.java
  67. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/ContrastFilter.java.bak
  68. 35 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/SaturationFilter.java
  69. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/SaturationFilter.java.bak
  70. 57 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/VignetteFilter.java
  71. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/VignetteFilter.java.bak
  72. 582 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Mp4Processor.java
  73. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Mp4Processor.java.bak
  74. 13 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Renderer.java
  75. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Renderer.java.bak
  76. 180 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/TransUtil.java
  77. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/TransUtil.java.bak
  78. 88 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Transformation.java
  79. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Transformation.java.bak
  80. 80 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/WrapRenderer.java
  81. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/WrapRenderer.java.bak
  82. 150 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraParamUtil.java
  83. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraParamUtil.java.bak
  84. 47 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraUtils.java
  85. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraUtils.java.bak
  86. 87 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/FormatUtils.java
  87. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/FormatUtils.java.bak
  88. 133 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GetPathFromUri4kitkat.java
  89. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GetPathFromUri4kitkat.java.bak
  90. 106 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GpuUtils.java
  91. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GpuUtils.java.bak
  92. 195 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/MatrixUtils.java
  93. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/MatrixUtils.java.bak
  94. 36 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/UriUtils.java
  95. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/UriUtils.java.bak
  96. 228 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/View/CameraRecordView.java
  97. BIN
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/View/CameraRecordView.java.bak
  98. 5 0
      OpenGLlibrary/src/main/res/drawable/btn_shutter_background.xml
  99. BIN
      OpenGLlibrary/src/main/res/drawable/change_camera.png
  100. 0 0
      OpenGLlibrary/src/main/res/drawable/editchoose_backgroud.xml

+ 16 - 0
.gitgnore

@@ -0,0 +1,16 @@
+*.iml
+/.gradle
+/local.properties
+/.idea/caches
+/.idea/libraries
+/.idea/modules.xml
+/.idea/workspace.xml
+/.idea/navEditor.xml
+/.idea/assetWizardSettings.xml
+.DS_Store
+/build
+/captures
+.externalNativeBuild
+.cxx
+/.idea/
+.gradle

+ 1 - 0
OpenGLlibrary/.gitignore

@@ -0,0 +1 @@
+/build

+ 130 - 0
OpenGLlibrary/OpenGLlibrary.iml

@@ -0,0 +1,130 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module external.linked.project.id=":OpenGLlibrary" external.linked.project.path="$MODULE_DIR$" external.root.project.path="$MODULE_DIR$/.." external.system.id="GRADLE" type="JAVA_MODULE" version="4">
+  <component name="FacetManager">
+    <facet type="android-gradle" name="Android-Gradle">
+      <configuration>
+        <option name="GRADLE_PROJECT_PATH" value=":OpenGLlibrary" />
+      </configuration>
+    </facet>
+    <facet type="android" name="Android">
+      <configuration>
+        <option name="SELECTED_BUILD_VARIANT" value="debug" />
+        <option name="ASSEMBLE_TASK_NAME" value="assembleDebug" />
+        <option name="COMPILE_JAVA_TASK_NAME" value="compileDebugSources" />
+        <afterSyncTasks>
+          <task>generateDebugSources</task>
+        </afterSyncTasks>
+        <option name="ALLOW_USER_CONFIGURATION" value="false" />
+        <option name="MANIFEST_FILE_RELATIVE_PATH" value="/src/main/AndroidManifest.xml" />
+        <option name="RES_FOLDER_RELATIVE_PATH" value="/src/main/res" />
+        <option name="RES_FOLDERS_RELATIVE_PATH" value="file://$MODULE_DIR$/src/main/res;file://$MODULE_DIR$/build/generated/res/rs/debug;file://$MODULE_DIR$/build/generated/res/resValues/debug" />
+        <option name="TEST_RES_FOLDERS_RELATIVE_PATH" value="" />
+        <option name="ASSETS_FOLDER_RELATIVE_PATH" value="/src/main/assets" />
+        <option name="PROJECT_TYPE" value="1" />
+      </configuration>
+    </facet>
+  </component>
+  <component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_7">
+    <output url="file://$MODULE_DIR$/build/intermediates/classes/debug" />
+    <output-test url="file://$MODULE_DIR$/build/intermediates/classes/test/debug" />
+    <exclude-output />
+    <content url="file://$MODULE_DIR$">
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/source/apt/debug" isTestSource="false" generated="true" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/source/aidl/debug" isTestSource="false" generated="true" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/source/buildConfig/debug" isTestSource="false" generated="true" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/source/rs/debug" isTestSource="false" generated="true" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/res/rs/debug" type="java-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/res/resValues/debug" type="java-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/source/apt/androidTest/debug" isTestSource="true" generated="true" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/source/aidl/androidTest/debug" isTestSource="true" generated="true" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/source/buildConfig/androidTest/debug" isTestSource="true" generated="true" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/source/rs/androidTest/debug" isTestSource="true" generated="true" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/res/rs/androidTest/debug" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/res/resValues/androidTest/debug" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/build/generated/source/apt/test/debug" isTestSource="true" generated="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/debug/res" type="java-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/debug/resources" type="java-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/debug/assets" type="java-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/debug/aidl" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/debug/java" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/debug/rs" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/debug/shaders" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/res" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/resources" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/assets" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/aidl" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/java" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/rs" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTestDebug/shaders" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/testDebug/res" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/testDebug/resources" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/testDebug/assets" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/testDebug/aidl" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/testDebug/java" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/testDebug/rs" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/testDebug/shaders" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/main/res" type="java-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/main/resources" type="java-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/main/assets" type="java-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/main/aidl" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/main/rs" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/main/shaders" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTest/res" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTest/resources" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTest/assets" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTest/aidl" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTest/java" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTest/rs" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/androidTest/shaders" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/test/res" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/test/resources" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/test/assets" type="java-test-resource" />
+      <sourceFolder url="file://$MODULE_DIR$/src/test/aidl" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/test/java" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/test/rs" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/test/shaders" isTestSource="true" />
+      <excludeFolder url="file://$MODULE_DIR$/build/generated/not_namespaced_r_class_sources" />
+      <excludeFolder url="file://$MODULE_DIR$/build/generated/source/r" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/annotations" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/attr" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/blame" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/check-manifest" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/classes" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/extractedTypedefs" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/incremental" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/intermediate-jars" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/javaPrecompile" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/jniLibs" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/library_and_local_jars_jni" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/manifests" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/packaged-aidl" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/packaged-classes" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/packagedAssets" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/packaged_res" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/public_res" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/res" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/rs" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/shaders" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/symbols" />
+      <excludeFolder url="file://$MODULE_DIR$/build/intermediates/transforms" />
+      <excludeFolder url="file://$MODULE_DIR$/build/outputs" />
+      <excludeFolder url="file://$MODULE_DIR$/build/tmp" />
+    </content>
+    <orderEntry type="jdk" jdkName="Android API 28 Platform" jdkType="Android SDK" />
+    <orderEntry type="sourceFolder" forTests="false" />
+    <orderEntry type="library" name="Gradle: com.android.support.constraint:constraint-layout-solver:1.0.2@jar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:support-media-compat:25.3.1@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:support-annotations:25.3.1@jar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:recyclerview-v7:25.3.1@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support.constraint:constraint-layout:1.0.2@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:support-vector-drawable:25.3.1@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:support-core-utils:25.3.1@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:appcompat-v7:25.3.1@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:support-v4:25.3.1@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:support-compat:25.3.1@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:animated-vector-drawable:25.3.1@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:support-core-ui:25.3.1@aar" level="project" />
+    <orderEntry type="library" name="Gradle: com.android.support:support-fragment:25.3.1@aar" level="project" />
+  </component>
+</module>

+ 26 - 0
OpenGLlibrary/build.gradle

@@ -0,0 +1,26 @@
+apply plugin: 'com.android.library'
+
+android {
+    compileSdkVersion compile_version
+    defaultConfig {
+        minSdkVersion min_version
+        targetSdkVersion target_version
+        versionCode 1
+        versionName "1.0"
+    }
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+        }
+    }
+}
+
+dependencies {
+    api fileTree(include: ['*.jar'], dir: 'libs')
+    api 'com.android.support:appcompat-v7:25.3.1'
+    api 'com.android.support:recyclerview-v7:25.3.1'
+    api 'com.android.support.constraint:constraint-layout:1.0.2'
+//    api 'com.android.support:support-v4:25.3.1'
+//    api 'com.android.support:support-v13:25.3.1'
+}

+ 25 - 0
OpenGLlibrary/proguard-rules.pro

@@ -0,0 +1,25 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in C:\Users\Administrator\AppData\Local\Android\Sdk/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile

+ 28 - 0
OpenGLlibrary/src/main/AndroidManifest.xml

@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+          package="com.joe.camera2recorddemo">
+
+    <uses-feature
+        android:glEsVersion="0x00020000"
+        android:required="true"/>
+
+    <uses-permission android:name="android.permission.CAMERA"/>
+    <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.WAKE_LOCK"/>
+
+    <!--<application-->
+        <!--android:allowBackup="true"-->
+        <!--android:icon="@mipmap/ic_launcher"-->
+        <!--android:label="@string/app_name"-->
+        <!--android:roundIcon="@mipmap/ic_launcher_round"-->
+        <!--android:supportsRtl="true"-->
+        <!--android:theme="@style/AppTheme">-->
+        <!--<activity-->
+            <!--android:name=".Activity.MP4Activity"-->
+            <!--android:theme="@style/Theme.AppCompat.NoActionBar">-->
+        <!--</activity>-->
+    <!--</application>-->
+
+</manifest>

+ 6 - 0
OpenGLlibrary/src/main/assets/shader/base.frag

@@ -0,0 +1,6 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+void main() {
+    gl_FragColor = texture2D( uTexture, vTextureCo);
+}

+ 12 - 0
OpenGLlibrary/src/main/assets/shader/base.vert

@@ -0,0 +1,12 @@
+attribute vec4 aVertexCo;
+attribute vec2 aTextureCo;
+
+uniform mat4 uVertexMatrix;
+uniform mat4 uTextureMatrix;
+
+varying vec2 vTextureCo;
+
+void main(){
+    gl_Position = uVertexMatrix*aVertexCo;
+    vTextureCo = aTextureCo.xy;
+}

+ 6 - 0
OpenGLlibrary/src/main/assets/shader/base_fragment.sh

@@ -0,0 +1,6 @@
+precision mediump float;
+varying vec2 textureCoordinate;
+uniform sampler2D vTexture;
+void main() {
+    gl_FragColor = texture2D( vTexture, textureCoordinate );
+}

+ 10 - 0
OpenGLlibrary/src/main/assets/shader/base_vertex.sh

@@ -0,0 +1,10 @@
+attribute vec4 vPosition;
+attribute vec2 vCoord;
+uniform mat4 vMatrix;
+
+varying vec2 textureCoordinate;
+
+void main(){
+    gl_Position = vMatrix*vPosition;
+    textureCoordinate = vCoord;
+}

+ 62 - 0
OpenGLlibrary/src/main/assets/shader/beauty/beauty.frag

@@ -0,0 +1,62 @@
+precision highp float;
+precision highp int;
+uniform sampler2D uTexture;
+uniform int uIternum;
+uniform float uACoef; //参数
+uniform float uMixCoef; //混合系数
+varying highp vec2 vTextureCo;
+varying highp vec2 vBlurCoord1s[14];
+const float distanceNormalizationFactor = 4.0;
+const mat3 saturateMatrix = mat3(1.1102,-0.0598,-0.061,-0.0774,1.0826,-0.1186,-0.0228,-0.0228,1.1772);
+
+void main() {
+
+    vec3 centralColor;
+    float central;
+    float gaussianWeightTotal;
+    float sum;
+    float sampleColor;
+    float distanceFromCentralColor;
+    float gaussianWeight;
+
+    central = texture2D( uTexture, vTextureCo ).g;
+    gaussianWeightTotal = 0.2;
+    sum = central * 0.2;
+
+    for (int i = 0; i < 6; i++) {
+        sampleColor = texture2D( uTexture, vBlurCoord1s[i] ).g;
+        distanceFromCentralColor = min( abs( central - sampleColor ) * distanceNormalizationFactor, 1.0 );
+        gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
+        gaussianWeightTotal += gaussianWeight;
+        sum += sampleColor * gaussianWeight;
+    }
+    for (int i = 6; i < 14; i++) {
+        sampleColor = texture2D( uTexture, vBlurCoord1s[i] ).g;
+        distanceFromCentralColor = min( abs( central - sampleColor ) * distanceNormalizationFactor, 1.0 );
+        gaussianWeight = 0.1 * (1.0 - distanceFromCentralColor);
+        gaussianWeightTotal += gaussianWeight;
+        sum += sampleColor * gaussianWeight;
+    }
+
+    sum = sum / gaussianWeightTotal;
+    centralColor = texture2D( uTexture, vTextureCo ).rgb;
+    sampleColor = centralColor.g - sum + 0.5;
+    for (int i = 0; i < uIternum; ++i) {
+        if (sampleColor <= 0.5) {
+            sampleColor = sampleColor * sampleColor * 2.0;
+        }
+        else {
+            sampleColor = 1.0 - ((1.0 - sampleColor)*(1.0 - sampleColor) * 2.0);
+        }
+    }
+
+    float aa = 1.0 + pow( centralColor.g, 0.3 )*uACoef;
+    vec3 smoothColor = centralColor*aa - vec3( sampleColor )*(aa - 1.0);
+    smoothColor = clamp( smoothColor, vec3( 0.0 ), vec3( 1.0 ) );
+    smoothColor = mix( centralColor, smoothColor, pow( centralColor.g, 0.33 ) );
+    smoothColor = mix( centralColor, smoothColor, pow( centralColor.g, uMixCoef ) );
+    gl_FragColor = vec4( pow( smoothColor, vec3( 0.96 ) ), 1.0 );
+    vec3 satcolor = gl_FragColor.rgb * saturateMatrix;
+    gl_FragColor.rgb = mix( gl_FragColor.rgb, satcolor, 0.23 );
+
+}

+ 35 - 0
OpenGLlibrary/src/main/assets/shader/beauty/beauty.vert

@@ -0,0 +1,35 @@
+attribute vec4 aVertexCo;
+attribute vec2 aTextureCo;
+varying vec2 vTextureCo;
+varying vec2 vBlurCoord1s[14];
+uniform float uWidth;
+uniform float uHeight;
+uniform mat4 uVertexMatrix;
+uniform mat4 uTextureMatrix;
+void main()
+{
+    gl_Position = uVertexMatrix*aVertexCo;
+    vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;
+
+    highp float mul_x = 2.0 / uWidth;
+    highp float mul_y = 2.0 / uHeight;
+
+    vBlurCoord1s[0] = vTextureCo + vec2( 0.0 * mul_x, -10.0 * mul_y );
+    vBlurCoord1s[1] = vTextureCo + vec2( 8.0 * mul_x, -5.0 * mul_y );
+    vBlurCoord1s[2] = vTextureCo + vec2( 8.0 * mul_x, 5.0 * mul_y );
+    vBlurCoord1s[3] = aTextureCo + vec2( 0.0 * mul_x, 10.0 * mul_y );
+    vBlurCoord1s[4] = aTextureCo + vec2( -8.0 * mul_x, 5.0 * mul_y );
+    vBlurCoord1s[5] = aTextureCo + vec2( -8.0 * mul_x, -5.0 * mul_y );
+
+    mul_x = 1.2 / uWidth;
+    mul_y = 1.2 / uHeight;
+
+    vBlurCoord1s[6] = aTextureCo + vec2( 0.0 * mul_x, -6.0 * mul_y );
+    vBlurCoord1s[7] = aTextureCo + vec2( -4.0 * mul_x, -4.0 * mul_y );
+    vBlurCoord1s[8] = aTextureCo + vec2( -6.0 * mul_x, 0.0 * mul_y );
+    vBlurCoord1s[9] = aTextureCo + vec2( -4.0 * mul_x, 4.0 * mul_y );
+    vBlurCoord1s[10] = aTextureCo + vec2( 0.0 * mul_x, 6.0 * mul_y );
+    vBlurCoord1s[11] = aTextureCo + vec2( 4.0 * mul_x, 4.0 * mul_y );
+    vBlurCoord1s[12] = aTextureCo + vec2( 6.0 * mul_x, 0.0 * mul_y );
+    vBlurCoord1s[13] = aTextureCo + vec2( 4.0 * mul_x, -4.0 * mul_y );
+}

+ 147 - 0
OpenGLlibrary/src/main/assets/shader/choose/choose.frag

@@ -0,0 +1,147 @@
+precision highp float;
+
+varying vec2 vTextureCo;
+varying vec2 leftTextureCoordinate;
+varying vec2 rightTextureCoordinate;
+
+varying vec2 topTextureCoordinate;
+varying vec2 topLeftTextureCoordinate;
+varying vec2 topRightTextureCoordinate;
+
+varying vec2 bottomTextureCoordinate;
+varying vec2 bottomLeftTextureCoordinate;
+varying vec2 bottomRightTextureCoordinate;
+
+uniform sampler2D uTexture;
+
+uniform int vChangeType;
+uniform highp float intensity;
+uniform float uWidth;
+uniform float uHeight;
+
+const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);//灰度滤镜
+const highp vec3 COOL = vec3(0.0, 0.0, 0.1);//冷色调
+const highp vec3 WARM = vec3(0.1, 0.1, 0.0);//暖色调
+const vec2 texSize = vec2(1920,1080);//浮雕参数
+const lowp float intensityone = 1.0;
+const lowp mat4 colorMatrix = mat4(0.3588, 0.7044, 0.1368, 0.0,0.2990, 0.5870, 0.1140, 0.0,0.2392, 0.4696, 0.0912, 0.0,0, 0, 0, 1.0);
+const highp float threshold = 0.2;
+const highp float quantizationLevels = 10.0;
+const mediump mat3 convolutionMatrix = mat3(-1.0, 0.0, 1.0,-2.0, 0.0, 2.0,-1.0, 0.0, 1.0);
+
+const float stepcv=1.;
+const mat3 GX=mat3(-1.,0., +1., -2., 0., +2., -1., 0., +1.);
+const mat3 GY=mat3(-1., -2., -1., 0., 0., 0., +1., +2., +1.);
+
+float colorR(vec2 center,float shiftX,float shiftY){
+    return texture2D(uTexture,vec2(vTextureCo.x+shiftX/uWidth,vTextureCo.y+shiftY/uHeight)).r;
+}
+
+void main()
+{
+    vec4 textureColor = texture2D(uTexture, vTextureCo);
+    if(vChangeType == 0){
+        gl_FragColor = textureColor;
+    }
+    else if(vChangeType == 1){
+        vec4 nColor=texture2D(uTexture,vTextureCo);
+        vec4 deltaColor=nColor+vec4(COOL,0.0);
+        gl_FragColor=deltaColor;
+    }
+    else if(vChangeType == 2){
+        vec4 nColor=texture2D(uTexture,vTextureCo);
+        vec4 deltaColor=nColor+vec4(WARM,0.0);
+        gl_FragColor=deltaColor;
+    }
+    else if(vChangeType == 3){
+        gl_FragColor=vec4(vec3(dot(texture2D( uTexture, vTextureCo).rgb,W)),1.0);
+    }
+    else if(vChangeType == 4){
+        vec2 tex = vTextureCo;
+        vec2 upLeftUV = vec2(tex.x - 1.0/texSize.x, tex.y - 1.0/texSize.y);
+        vec4 upLeftColor = texture2D(uTexture,upLeftUV);
+        vec4 delColor = textureColor - upLeftColor;
+        float h = 0.3*delColor.x + 0.59*delColor.y + 0.11*delColor.z;
+        vec4 bkColor = vec4(0.5, 0.5, 0.5, 1.0);
+        gl_FragColor = vec4(h,h,h,0.0) +bkColor;
+    }
+    else if(vChangeType == 5){
+        gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
+    }
+    else if(vChangeType == 6){
+        lowp vec4 outputColor = textureColor * colorMatrix;
+        gl_FragColor = (intensityone * outputColor) + ((1.0 - intensityone) * textureColor);
+    }
+    else if(vChangeType == 7){
+        float bottomLeftIntensity = texture2D(uTexture, bottomLeftTextureCoordinate).r;
+        float topRightIntensity = texture2D(uTexture, topRightTextureCoordinate).r;
+        float topLeftIntensity = texture2D(uTexture, topLeftTextureCoordinate).r;
+        float bottomRightIntensity = texture2D(uTexture, bottomRightTextureCoordinate).r;
+        float leftIntensity = texture2D(uTexture, leftTextureCoordinate).r;
+        float rightIntensity = texture2D(uTexture, rightTextureCoordinate).r;
+        float bottomIntensity = texture2D(uTexture, bottomTextureCoordinate).r;
+        float topIntensity = texture2D(uTexture, topTextureCoordinate).r;
+        float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
+        float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
+
+        float mag = length(vec2(h, v));
+        vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;
+        float thresholdTest = 1.0 - step(threshold, mag);
+        gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);
+    }
+    else if(vChangeType == 8){
+         mediump vec4 bottomColor = texture2D(uTexture, bottomTextureCoordinate);
+         mediump vec4 bottomLeftColor = texture2D(uTexture, bottomLeftTextureCoordinate);
+         mediump vec4 bottomRightColor = texture2D(uTexture, bottomRightTextureCoordinate);
+         mediump vec4 leftColor = texture2D(uTexture, leftTextureCoordinate);
+         mediump vec4 rightColor = texture2D(uTexture, rightTextureCoordinate);
+         mediump vec4 topColor = texture2D(uTexture, topTextureCoordinate);
+         mediump vec4 topRightColor = texture2D(uTexture, topRightTextureCoordinate);
+         mediump vec4 topLeftColor = texture2D(uTexture, topLeftTextureCoordinate);
+
+         mediump vec4 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
+         resultColor += leftColor * convolutionMatrix[1][0] + textureColor * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
+         resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
+         gl_FragColor = resultColor;
+    }
+    else if(vChangeType == 9){
+        vec2 center=vec2(vTextureCo.x*uWidth,vTextureCo.y*uHeight);
+        float leftTop=colorR(center,-stepcv,-stepcv);
+        float centerTop=colorR(center,0.,-stepcv);
+        float rightTop=colorR(center,stepcv,-stepcv);
+        float leftCenter=colorR(center,-stepcv,0.);
+        float rightCenter=colorR(center,stepcv,0.);
+        float leftBottom=colorR(center,-stepcv,stepcv);
+        float centerBottom=colorR(center,0.,stepcv);
+        float rightBottom=colorR(center,stepcv,stepcv);
+        mat3 d=mat3(colorR(center,-stepcv,-stepcv),colorR(center,0.,-stepcv),colorR(center,stepcv,-stepcv),
+                     colorR(center,-stepcv,0.),colorR(center,0.,0.),colorR(center,stepcv,0.),
+                     colorR(center,-stepcv,stepcv),colorR(center,0.,stepcv),colorR(center,stepcv,stepcv));
+        float x = d[0][0]*GX[0][0]+d[1][0]*GX[1][0]+d[2][0]*GX[2][0]+
+                   d[0][1]*GX[0][1]+d[1][1]*GX[1][1]+d[2][1]*GX[2][1]+
+                   d[0][2]*GX[0][2]+d[1][2]*GX[1][2]+d[2][2]*GX[2][2];
+        float y = d[0][0]*GY[0][0]+d[1][0]*GY[1][0]+d[2][0]*GY[2][0]+
+                   d[0][1]*GY[0][1]+d[1][1]*GY[1][1]+d[2][1]*GY[2][1]+
+                   d[0][2]*GY[0][2]+d[1][2]*GY[1][2]+d[2][2]*GY[2][2];
+        gl_FragColor=vec4(vec3(length(vec2(x,y))),1.);
+    }
+    else if(vChangeType == 10){
+        float bottomLeftIntensity = texture2D(uTexture, bottomLeftTextureCoordinate).r;
+        float topRightIntensity = texture2D(uTexture, topRightTextureCoordinate).r;
+        float topLeftIntensity = texture2D(uTexture, topLeftTextureCoordinate).r;
+        float bottomRightIntensity = texture2D(uTexture, bottomRightTextureCoordinate).r;
+        float leftIntensity = texture2D(uTexture, leftTextureCoordinate).r;
+        float rightIntensity = texture2D(uTexture, rightTextureCoordinate).r;
+        float bottomIntensity = texture2D(uTexture, bottomTextureCoordinate).r;
+        float topIntensity = texture2D(uTexture, topTextureCoordinate).r;
+        float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
+        float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
+        
+        float mag = 1.0 - length(vec2(h, v));
+        gl_FragColor = vec4(vec3(mag), 1.0);
+    }
+    else{
+        gl_FragColor = textureColor;
+    }
+
+}

+ 42 - 0
OpenGLlibrary/src/main/assets/shader/choose/choose.vert

@@ -0,0 +1,42 @@
+attribute vec4 aVertexCo;
+attribute vec4 aTextureCo;
+
+uniform mat4 uVertexMatrix;
+uniform mat4 uTextureMatrix;
+
+uniform highp float texelWidth;
+uniform highp float texelHeight;
+
+varying vec2 vTextureCo;
+varying vec2 leftTextureCoordinate;
+varying vec2 rightTextureCoordinate;
+
+varying vec2 topTextureCoordinate;
+varying vec2 topLeftTextureCoordinate;
+varying vec2 topRightTextureCoordinate;
+
+varying vec2 bottomTextureCoordinate;
+varying vec2 bottomLeftTextureCoordinate;
+varying vec2 bottomRightTextureCoordinate;
+
+void main()
+{
+     gl_Position = uVertexMatrix*aVertexCo;
+     vTextureCo = aTextureCo.xy;
+
+     vec2 widthStep = vec2(texelWidth, 0.0);
+     vec2 heightStep = vec2(0.0, texelHeight);
+     vec2 widthHeightStep = vec2(texelWidth, texelHeight);
+     vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);
+
+     leftTextureCoordinate = aTextureCo.xy - widthStep;
+     rightTextureCoordinate = aTextureCo.xy + widthStep;
+
+     topTextureCoordinate = aTextureCo.xy - heightStep;
+     topLeftTextureCoordinate = aTextureCo.xy - widthHeightStep;
+     topRightTextureCoordinate = aTextureCo.xy + widthNegativeHeightStep;
+
+     bottomTextureCoordinate = aTextureCo.xy + heightStep;
+     bottomLeftTextureCoordinate = aTextureCo.xy - widthNegativeHeightStep;
+     bottomRightTextureCoordinate = aTextureCo.xy + widthHeightStep;
+}

+ 9 - 0
OpenGLlibrary/src/main/assets/shader/color/gray_fragment.frag

@@ -0,0 +1,9 @@
+precision mediump float;
+varying vec2 textureCoordinate;
+uniform sampler2D vTexture;
+void main() {
+    vec4 color=texture2D( vTexture, textureCoordinate);
+    float rgb=color.g;
+    vec4 c=vec4(rgb,rgb,rgb,color.a);
+    gl_FragColor = c;
+}

+ 8 - 0
OpenGLlibrary/src/main/assets/shader/mh/brightness.frag

@@ -0,0 +1,8 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+uniform lowp float brightness;
+void main() {
+    lowp vec4 textureColor = texture2D(uTexture, vTextureCo);
+    gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
+}

+ 8 - 0
OpenGLlibrary/src/main/assets/shader/mh/contrast.frag

@@ -0,0 +1,8 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+uniform lowp float stepcv;
+void main() {
+    lowp vec4 textureColor = texture2D(uTexture, vTextureCo);
+    gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * stepcv + vec3(0.5)), textureColor.w);
+}

+ 11 - 0
OpenGLlibrary/src/main/assets/shader/mh/saturation.frag

@@ -0,0 +1,11 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+uniform lowp float saturation;
+const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
+void main() {
+    lowp vec4 textureColor = texture2D(uTexture, vTextureCo);
+    lowp float luminance = dot(textureColor.rgb, luminanceWeighting);
+    lowp vec3 greyScaleColor = vec3(luminance);
+    gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);
+}

+ 15 - 0
OpenGLlibrary/src/main/assets/shader/mh/vignette.frag

@@ -0,0 +1,15 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+
+uniform lowp vec2 vignetteCenter;
+uniform lowp vec3 vignetteColor;
+uniform highp float vignetteStart;
+uniform highp float vignetteEnd;
+
+void main() {
+    lowp vec3 rgb = texture2D(uTexture, vTextureCo).rgb;
+    lowp float d = distance(vTextureCo, vec2(0.5,0.5));
+    rgb *= (1.0 - smoothstep(vignetteStart, vignetteEnd, d));
+    gl_FragColor = vec4(vec3(rgb),1.0);
+}

+ 7 - 0
OpenGLlibrary/src/main/assets/shader/oes.frag

@@ -0,0 +1,7 @@
+#extension GL_OES_EGL_image_external : require
+precision mediump float;
+varying vec2 vTextureCo;
+uniform samplerExternalOES uTexture;
+void main() {
+    gl_FragColor = texture2D( uTexture, vTextureCo);
+}

+ 12 - 0
OpenGLlibrary/src/main/assets/shader/oes.vert

@@ -0,0 +1,12 @@
+attribute vec4 aVertexCo;
+attribute vec2 aTextureCo;
+
+uniform mat4 uVertexMatrix;
+uniform mat4 uTextureMatrix;
+
+varying vec2 vTextureCo;
+
+void main(){
+    gl_Position = uVertexMatrix*aVertexCo;
+    vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;
+}

+ 7 - 0
OpenGLlibrary/src/main/assets/shader/oes/default_fragment.sh

@@ -0,0 +1,7 @@
+#extension GL_OES_EGL_image_external : require
+precision mediump float;
+varying vec2 textureCoordinate;
+uniform samplerExternalOES vTexture;
+void main() {
+    gl_FragColor = texture2D( vTexture, textureCoordinate );
+}

+ 10 - 0
OpenGLlibrary/src/main/assets/shader/oes/default_vertex.sh

@@ -0,0 +1,10 @@
+attribute vec4 vPosition;
+attribute vec2 vCoordinate;
+uniform mat4 vMatrix;
+
+varying vec2 aCoordinate;
+
+void main(){
+    gl_Position=vMatrix*vPosition;
+    aCoordinate=vCoordinate;
+}

+ 7 - 0
OpenGLlibrary/src/main/assets/shader/oes_base_fragment.sh

@@ -0,0 +1,7 @@
+#extension GL_OES_EGL_image_external : require
+precision mediump float;
+varying vec2 textureCoordinate;
+uniform samplerExternalOES vTexture;
+void main() {
+    gl_FragColor = texture2D( vTexture, textureCoordinate );
+}

+ 10 - 0
OpenGLlibrary/src/main/assets/shader/oes_base_vertex.sh

@@ -0,0 +1,10 @@
+attribute vec4 vPosition;
+attribute vec2 vCoord;
+uniform mat4 vMatrix;
+uniform mat4 vCoordMatrix;
+varying vec2 textureCoordinate;
+
+void main(){
+    gl_Position = vMatrix*vPosition;
+    textureCoordinate = (vCoordMatrix*vec4(vCoord,0,1)).xy;
+}

+ 10 - 0
OpenGLlibrary/src/main/assets/shader/pkm_mul.frag

@@ -0,0 +1,10 @@
+precision mediump float;
+varying vec2 aCoord;
+uniform sampler2D vTexture;
+uniform sampler2D vTextureAlpha;
+
+void main() {
+    vec4 color=texture2D( vTexture, aCoord);
+    color.a=texture2D(vTextureAlpha,aCoord).r;
+    gl_FragColor = color;
+}

+ 9 - 0
OpenGLlibrary/src/main/assets/shader/pkm_mul.vert

@@ -0,0 +1,9 @@
+attribute vec4 vPosition;
+attribute vec2 vCoord;
+varying vec2 aCoord;
+uniform mat4 vMatrix;
+
+void main(){
+    aCoord = vCoord;
+    gl_Position = vMatrix*vPosition;
+}

+ 28 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/FilterInfo.java

@@ -0,0 +1,28 @@
+package com.joe.camera2recorddemo.Entity;
+
+public class FilterInfo {
+
+    private String name;
+    private int type;
+    private int rid;
+
+    public FilterInfo(int type, String name,int rid) {
+        this.name = name;
+        this.type = type;
+        this.rid= rid;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+
+    public int getType() {
+        return type;
+    }
+
+
+    public int getRid() {
+        return rid;
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/FilterInfo.java.bak


+ 35 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/SizeInfo.java

@@ -0,0 +1,35 @@
+package com.joe.camera2recorddemo.Entity;
+
+public class SizeInfo {
+
+    private int mWidth;
+    private int mHeight;
+
+    public SizeInfo(int width, int height) {
+        mWidth = width;
+        mHeight = height;
+    }
+
+    public int getWidth() {
+        return mWidth;
+    }
+
+    public int getHeight() {
+        return mHeight;
+    }
+
+
+    public void setSize(int width, int height) {
+        this.mWidth = width;
+        this.mHeight = height;
+    }
+
+
+    @Override
+    public String toString() {
+        return "SizeInfo{" +
+                "mWidth=" + mWidth +
+                ", mHeight=" + mHeight +
+                '}';
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/SizeInfo.java.bak


+ 50 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/TrackUtils.java

@@ -0,0 +1,50 @@
+package com.joe.camera2recorddemo.MediaCodecUtil;
+
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Log;
+
+/**
+ * Created by Yj on 2017/3/29.
+ */
+
+public class TrackUtils {
+
+	private static final String TAG = "TrackUtils";
+
+	/**
+	 * 查找视频轨道
+	 * @param extractor
+	 * @return
+	 */
+	public static int selectVideoTrack(MediaExtractor extractor) {
+		int numTracks = extractor.getTrackCount();
+		for (int i = 0; i < numTracks; i++) {
+			MediaFormat format = extractor.getTrackFormat(i);
+			String mime = format.getString(MediaFormat.KEY_MIME);
+			if (mime.startsWith("video/")) {
+				Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
+				return i;
+			}
+		}
+		return -1;
+	}
+
+	/**
+	 * 查找音频轨道
+	 * @param extractor
+	 * @return
+	 */
+	public static int selectAudioTrack(MediaExtractor extractor) {
+		int numTracks = extractor.getTrackCount();
+		for (int i = 0; i < numTracks; i++) {
+			MediaFormat format = extractor.getTrackFormat(i);
+			String mime = format.getString(MediaFormat.KEY_MIME);
+			if (mime.startsWith("audio/")) {
+				Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
+				return i;
+			}
+		}
+		return -1;
+	}
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/TrackUtils.java.bak


+ 206 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/VideoDecode.java

@@ -0,0 +1,206 @@
+package com.joe.camera2recorddemo.MediaCodecUtil;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Log;
+import android.view.Surface;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+
+public class VideoDecode {
+	private static final String TAG = "VideoToFrames";
+	private static final long DEFAULT_TIMEOUT_US = 10000;
+
+    private final int decodeColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
+
+
+	public int ImageWidth = 0;
+	public int ImageHeight = 0;
+
+	MediaExtractor extractor = null;
+	MediaCodec decoder = null;
+	MediaFormat mediaFormat;
+
+	private boolean isLoop = false;//是否循环播放
+	private boolean isStop = false;//是否停止
+	private String videoFilePath;
+
+	/**
+	 * 解码器初始化
+	 * @param videoFilePath
+	 */
+	public void decodePrepare(String videoFilePath) {
+		this.videoFilePath = videoFilePath;
+		extractor = null;
+		decoder = null;
+		try {
+			File videoFile = new File(videoFilePath);
+			extractor = new MediaExtractor();
+			extractor.setDataSource(videoFile.toString());
+			int trackIndex = TrackUtils.selectVideoTrack(extractor);
+			if (trackIndex < 0) {
+				throw new RuntimeException("No video track found in " + videoFilePath);
+			}
+			extractor.selectTrack(trackIndex);
+			mediaFormat = extractor.getTrackFormat(trackIndex);
+			String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
+			decoder = MediaCodec.createDecoderByType(mime);
+			if (isColorFormatSupported(decodeColorFormat, decoder.getCodecInfo().getCapabilitiesForType(mime))) {
+				mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, decodeColorFormat);
+				Log.i(TAG, "set decode color format to type " + decodeColorFormat);
+			} else {
+				Log.i(TAG, "unable to set decode color format, color format type " + decodeColorFormat + " not supported");
+			}
+
+			//消除旋转信息,防止拉伸
+			mediaFormat.setInteger(MediaFormat.KEY_ROTATION,0);
+			//设置
+			mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+			mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+			mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2500000);
+			decoder.configure(mediaFormat,mSurface, null, 0);
+			decoder.start();
+		} catch (IOException ioe) {
+			throw new RuntimeException("failed init encoder", ioe);
+		}
+	}
+
+	public void close() {
+		try {
+			if (decoder != null) {
+				decoder.stop();
+				decoder.release();
+			}
+
+			if (extractor != null) {
+				extractor.release();
+				extractor = null;
+			}
+		}catch (IllegalStateException e){
+			e.printStackTrace();
+		}
+	}
+
+	/**
+	 * 外部调用开始解码
+	 */
+	public void excuate() {
+		try {
+			decodeFramesToImage(decoder, extractor, mediaFormat);
+		} finally {
+			close();
+			if(isLoop && !isStop){
+				decodePrepare(videoFilePath);
+				excuate();
+			}
+		}
+
+	}
+
+	/**
+	 * 设置是否循环
+	 * @param isLoop
+	 */
+	public void setLoop(boolean isLoop){
+		this.isLoop = isLoop;
+	}
+
+	/**
+	 * 检查是否支持的色彩格式
+	 * @param colorFormat
+	 * @param caps
+	 * @return
+	 */
+	private boolean isColorFormatSupported(int colorFormat, MediaCodecInfo.CodecCapabilities caps) {
+		for (int c : caps.colorFormats) {
+			if (c == colorFormat) {
+				return true;
+			}
+		}
+		return false;
+	}
+
+	/**
+	 * 开始解码
+	 * @param decoder
+	 * @param extractor
+	 * @param mediaFormat
+	 */
+	public void decodeFramesToImage(MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat) {
+		MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+		boolean sawInputEOS = false;
+		boolean sawOutputEOS = false;
+
+		final int width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+		final int height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+
+		ImageWidth = width;
+		ImageHeight = height;
+
+		long startMs = System.currentTimeMillis();
+		while (!sawOutputEOS && !isStop) {
+			if (!sawInputEOS) {
+				int inputBufferId = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
+				if (inputBufferId >= 0) {
+					ByteBuffer inputBuffer = decoder.getInputBuffer(inputBufferId);
+					int sampleSize = extractor.readSampleData(inputBuffer, 0); //将一部分视频数据读取到inputbuffer中,大小为sampleSize
+					if (sampleSize < 0) {
+						decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+						sawInputEOS = true;
+					} else {
+						long presentationTimeUs = extractor.getSampleTime();
+						Log.v(TAG, "presentationTimeUs:"+presentationTimeUs);
+						decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
+						extractor.advance();  //移动到视频文件的下一个地址
+					}
+				}
+			}
+			int outputBufferId = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
+			if (outputBufferId >= 0) {
+				if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+					sawOutputEOS = true;
+				}
+				boolean doRender = (info.size != 0);
+				if (doRender) {
+					sleepRender(info, startMs);//延迟解码
+					decoder.releaseOutputBuffer(outputBufferId, true);
+				}
+			}
+		}
+	}
+
+	public void stop(){
+		isStop = true;
+	}
+
+	public void start(){
+		isStop = false;
+	}
+
+	//======================设置输出Surface==============================
+
+	private Surface mSurface;
+
+	public void setSurface(Surface surface){
+		this.mSurface = surface;
+	}
+
+	/**
+	 * 延迟解码,按帧播放
+	 */
+	private void sleepRender(MediaCodec.BufferInfo audioBufferInfo, long startMs) {
+		while (audioBufferInfo.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
+			try {
+				Thread.sleep(10);
+			} catch (InterruptedException e) {
+				e.printStackTrace();
+				break;
+			}
+		}
+	}
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/VideoDecode.java.bak


+ 424 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/CameraRecorder.java

@@ -0,0 +1,424 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.media.AudioFormat;
+import android.media.AudioRecord;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.media.MediaRecorder;
+import android.opengl.EGLSurface;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+import android.view.Surface;
+
+import com.joe.camera2recorddemo.Entity.SizeInfo;
+import com.joe.camera2recorddemo.OpenGL.Filter.BaseFilter;
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.Semaphore;
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class CameraRecorder {
+
+    private final static long BASE_TIME = System.currentTimeMillis();
+    private final int TIME_OUT = 1000;
+    private final Object VIDEO_LOCK = new Object();
+    private final Object REC_LOCK = new Object();
+    private MediaCodec mVideoEncoder;
+    private MediaCodec mAudioEncoder;
+    private AudioRecord mAudioRecord;
+    private MediaMuxer mMuxer;
+    // 音频参数
+    private int mRecordBufferSize = 0;
+    private int mRecordSampleRate = 48000;// 音频采样率
+    private int mRecordChannelConfig = AudioFormat.CHANNEL_IN_STEREO;// 音频录制通道,默认为立体声
+    private int mRecordAudioFormat = AudioFormat.ENCODING_PCM_16BIT; // 音频录制格式,默认为PCM16Bit
+    private SurfaceTexture mInputTexture;
+    private Surface mOutputSurface;
+    private Surface mEncodeSurface;
+    private EGLHelper mShowEGLHelper;
+    private Configuration mConfig;
+    private String mOutputPath;
+    private MediaCodec.BufferInfo mAudioEncodeBufferInfo;
+    private MediaCodec.BufferInfo mVideoEncodeBufferInfo;
+    private int mAudioTrack = -1;
+    private int mVideoTrack = -1;
+    private boolean mGLThreadFlag = false;
+    private Thread mGLThread;
+    private WrapRenderer mRenderer;
+    private Semaphore mSem;
+    private boolean isMuxStarted = false;
+    private int mInputTextureId;
+    private EGLSurface mEGLEncodeSurface = null;
+    private int mPreviewWidth = 0;
+    private int mPreviewHeight = 0;
+    private int mOutputWidth = 0;
+    private int mOutputHeight = 0;
+    private boolean isRecordStarted = false;
+    private boolean isRecordVideoStarted = false;
+    private boolean isRecordAudioStarted = false;
+    private boolean isTryStopAudio = false;
+    private Thread mAudioThread;
+    private Runnable mGLRunnable = new Runnable() {
+        @Override
+        public void run() {
+            if (mOutputSurface == null) {
+                Log.e("C2D", "CameraRecorder GLThread exit : outputSurface==null");
+                return;
+            }
+            if (mPreviewWidth <= 0 || mPreviewHeight <= 0) {
+                Log.e("C2D", "CameraRecorder GLThread exit : Preview Size==0");
+                return;
+            }
+            mShowEGLHelper.setSurface(mOutputSurface);
+            boolean ret = mShowEGLHelper.createGLES(mPreviewWidth, mPreviewHeight);
+            if (!ret) {
+                Log.e("C2D", "CameraRecorder GLThread exit : createGLES failed");
+                return;
+            }
+            if (mRenderer == null) {
+                mRenderer = new WrapRenderer(null);
+            }
+            mRenderer.setFlag(WrapRenderer.TYPE_CAMERA);
+            mRenderer.create();
+            int[] t = new int[1];
+            GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, t, 0);
+            mRenderer.sizeChanged(mPreviewWidth, mPreviewHeight);
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, t[0]);
+
+            Filter mShowFilter = new BaseFilter();
+            Filter mRecFilter = new BaseFilter();
+            MatrixUtils.flip(mShowFilter.getVertexMatrix(), false, true);
+            mShowFilter.create();
+            mShowFilter.sizeChanged(mPreviewWidth, mPreviewHeight);
+
+            MatrixUtils.getMatrix(mRecFilter.getVertexMatrix(), MatrixUtils.TYPE_CENTERCROP,
+                    mPreviewWidth, mPreviewHeight,
+                    mOutputWidth, mOutputHeight);
+            MatrixUtils.flip(mRecFilter.getVertexMatrix(), false, true);
+            mRecFilter.create();
+            mRecFilter.sizeChanged(mOutputWidth, mOutputHeight);
+
+            FrameBuffer mEncodeFrameBuffer = new FrameBuffer();
+            while (mGLThreadFlag) {
+                try {
+                    mSem.acquire();
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+                if (mGLThreadFlag) {
+                    long time = (System.currentTimeMillis() - BASE_TIME) * 1000;
+                    mInputTexture.updateTexImage();
+                    mInputTexture.getTransformMatrix(mRenderer.getTextureMatrix());
+                    synchronized (VIDEO_LOCK) {
+                        if (isRecordVideoStarted) {
+                            if (mEGLEncodeSurface == null) {
+                                mEGLEncodeSurface = mShowEGLHelper.createEGLWindowSurface(mEncodeSurface);
+                            }
+                            mShowEGLHelper.makeCurrent(mEGLEncodeSurface);
+                            mEncodeFrameBuffer.bindFrameBuffer(mPreviewWidth, mPreviewHeight);
+                            mRenderer.draw(mInputTextureId);
+                            mEncodeFrameBuffer.unBindFrameBuffer();
+                            GLES20.glViewport(0, 0, mConfig.getVideoFormat().getInteger(MediaFormat.KEY_WIDTH),
+                                    mConfig.getVideoFormat().getInteger(MediaFormat.KEY_HEIGHT));
+                            mRecFilter.draw(mEncodeFrameBuffer.getCacheTextureId());
+                            mShowEGLHelper.setPresentationTime(mEGLEncodeSurface, time * 1000);
+                            videoEncodeStep(false);
+                            mShowEGLHelper.swapBuffers(mEGLEncodeSurface);
+
+                            mShowEGLHelper.makeCurrent();
+                            GLES20.glViewport(0, 0, mPreviewWidth, mPreviewHeight);
+                            mShowFilter.draw(mEncodeFrameBuffer.getCacheTextureId());
+                            mShowEGLHelper.setPresentationTime(0);
+                            mShowEGLHelper.swapBuffers();
+                        } else {
+                            GLES20.glViewport(0, 0, mPreviewWidth, mPreviewHeight);
+                            mRenderer.draw(mInputTextureId);
+                            mShowEGLHelper.swapBuffers();
+                        }
+                    }
+                }
+            }
+            mShowEGLHelper.destroyGLES();
+        }
+    };
+
+    public CameraRecorder() {
+        mShowEGLHelper = new EGLHelper();
+        // mEncodeEGLHelper=new EGLHelper();
+        mSem = new Semaphore(0);
+        mAudioEncodeBufferInfo = new MediaCodec.BufferInfo();
+        mVideoEncodeBufferInfo = new MediaCodec.BufferInfo();
+    }
+
+    public void setOutputPath(String path) {
+        this.mOutputPath = path;
+    }
+
+    public void setOutputSize(SizeInfo size) {
+        this.mConfig = new Configuration(size.getWidth(), size.getHeight());
+        this.mOutputWidth = size.getWidth();
+        this.mOutputHeight = size.getHeight();
+    }
+
+    public void setPreviewSize(int width, int height) {
+        this.mPreviewWidth = width;
+        this.mPreviewHeight = height;
+    }
+
+    public SurfaceTexture createInputSurfaceTexture() {
+        mInputTextureId = mShowEGLHelper.createTextureID();
+        mInputTexture = new SurfaceTexture(mInputTextureId);
+        new Handler(Looper.getMainLooper()).post(new Runnable() {
+            @Override
+            public void run() {
+                mInputTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+                    @Override
+                    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+                        mSem.release();
+                    }
+                });
+            }
+        });
+        return mInputTexture;
+    }
+
+    public void setConfiguration(Configuration config) {
+        this.mConfig = config;
+    }
+
+    public void setOutputSurface(Surface surface) {
+        this.mOutputSurface = surface;
+    }
+
+    public void setRenderer(Renderer renderer) {
+        mRenderer = new WrapRenderer(renderer);
+    }
+
+    public void startPreview() {
+        synchronized (REC_LOCK) {
+            Log.d("C2D", "CameraRecorder startPreview");
+            mSem.drainPermits();
+            mGLThreadFlag = true;
+            mGLThread = new Thread(mGLRunnable);
+            mGLThread.start();
+        }
+    }
+
+    public void stopPreview() throws InterruptedException {
+        synchronized (REC_LOCK) {
+            mGLThreadFlag = false;
+            mSem.release();
+            if (mGLThread != null && mGLThread.isAlive()) {
+                mGLThread.join();
+                mGLThread = null;
+            }
+            Log.d("C2D", "CameraRecorder stopPreview");
+        }
+    }
+
+    public void startRecord() throws IOException {
+        synchronized (REC_LOCK) {
+            isRecordStarted = true;
+            MediaFormat audioFormat = mConfig.getAudioFormat();
+            mAudioEncoder = MediaCodec.createEncoderByType(audioFormat.getString(MediaFormat.KEY_MIME));
+            mAudioEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+            MediaFormat videoFormat = mConfig.getVideoFormat();
+            mVideoEncoder = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
+            //此处不能用mOutputSurface,会configure失败
+            mVideoEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+            mEncodeSurface = mVideoEncoder.createInputSurface();
+
+            mAudioEncoder.start();
+            mVideoEncoder.start();
+            mMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+            mRecordBufferSize = AudioRecord.getMinBufferSize(mRecordSampleRate,
+                    mRecordChannelConfig, mRecordAudioFormat) * 2;
+            //        buffer=new byte[bufferSize];
+            mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mRecordSampleRate, mRecordChannelConfig,
+                    mRecordAudioFormat, mRecordBufferSize);
+
+            mAudioThread = new Thread(new Runnable() {
+                @Override
+                public void run() {
+                    mAudioRecord.startRecording();
+                    while (!audioEncodeStep(isTryStopAudio)) ;
+                    mAudioRecord.stop();
+                }
+            });
+            mAudioThread.start();
+            isRecordAudioStarted = true;
+        }
+    }
+
+    public void stopRecord() throws InterruptedException {
+        synchronized (REC_LOCK) {
+            if (isRecordStarted) {
+                isTryStopAudio = true;
+                if (isRecordAudioStarted) {
+                    mAudioThread.join();
+                    isRecordAudioStarted = false;
+                }
+                synchronized (VIDEO_LOCK) {
+                    if (isRecordVideoStarted) {
+                        mEGLEncodeSurface = null;
+                        videoEncodeStep(true);
+                    }
+                    isRecordVideoStarted = false;
+                }
+                mAudioEncoder.stop();
+                mAudioEncoder.release();
+                mVideoEncoder.stop();
+                mVideoEncoder.release();
+                try {
+                    if (isMuxStarted) {
+                        isMuxStarted = false;
+                        mMuxer.stop();
+                        mMuxer.release();
+                    }
+                } catch (IllegalStateException e) {
+                    e.printStackTrace();
+                    File file = new File(mOutputPath);
+                    if (file.exists() && file.delete()) {
+                        Log.d("C2D", "delete error file :" + mOutputPath);
+                    }
+                }
+
+                mAudioEncoder = null;
+                mVideoEncoder = null;
+                mMuxer = null;
+
+                mAudioTrack = -1;
+                mVideoTrack = -1;
+
+                isRecordStarted = false;
+            }
+        }
+    }
+
+    private boolean videoEncodeStep(boolean isEnd) {
+        if (isEnd) {
+            mVideoEncoder.signalEndOfInputStream();
+        }
+        while (true) {
+            int outputIndex = mVideoEncoder.dequeueOutputBuffer(mVideoEncodeBufferInfo, TIME_OUT);
+            if (outputIndex >= 0) {
+                if (isMuxStarted && mVideoEncodeBufferInfo.size > 0 && mVideoEncodeBufferInfo.presentationTimeUs > 0) {
+                    mMuxer.writeSampleData(mVideoTrack, getOutputBuffer(mVideoEncoder, outputIndex), mVideoEncodeBufferInfo);
+                }
+                mVideoEncoder.releaseOutputBuffer(outputIndex, false);
+                if (mVideoEncodeBufferInfo.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
+                    Log.d("C2D", "CameraRecorder get video encode end of stream");
+                    return true;
+                }
+            } else if (outputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
+                break;
+            } else if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+                Log.e("C2D", "get video output format changed ->" + mVideoEncoder.getOutputFormat().toString());
+                mVideoTrack = mMuxer.addTrack(mVideoEncoder.getOutputFormat());
+                mMuxer.start();
+                isMuxStarted = true;
+            }
+        }
+        return false;
+    }
+
+    private boolean audioEncodeStep(boolean isEnd) {
+        if (isRecordAudioStarted) {
+            int inputIndex = mAudioEncoder.dequeueInputBuffer(TIME_OUT);
+            if (inputIndex >= 0) {
+                ByteBuffer buffer = getInputBuffer(mAudioEncoder, inputIndex);
+                buffer.clear();
+                long time = (System.currentTimeMillis() - BASE_TIME) * 1000;
+                int length = mAudioRecord.read(buffer, mRecordBufferSize);
+                if (length >= 0) {
+                    mAudioEncoder.queueInputBuffer(inputIndex, 0, length, time,
+                            isEnd ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
+                }
+            }
+            while (true) {
+                int outputIndex = mAudioEncoder.dequeueOutputBuffer(mAudioEncodeBufferInfo, TIME_OUT);
+                if (outputIndex >= 0) {
+                    //todo 第一帧音频时间戳为0的问题
+                    if (isMuxStarted && mAudioEncodeBufferInfo.size > 0 && mAudioEncodeBufferInfo.presentationTimeUs > 0) {
+                        mMuxer.writeSampleData(mAudioTrack, getOutputBuffer(mAudioEncoder, outputIndex), mAudioEncodeBufferInfo);
+                    }
+                    mAudioEncoder.releaseOutputBuffer(outputIndex, false);
+                    if (mAudioEncodeBufferInfo.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
+                        Log.d("C2D", "CameraRecorder get audio encode end of stream");
+                        isTryStopAudio = false;
+                        isRecordAudioStarted = false;
+                        return true;
+                    }
+                } else if (outputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
+                    break;
+                } else if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+                    Log.e("C2D", "get audio output format changed ->" + mAudioEncoder.getOutputFormat().toString());
+                    synchronized (VIDEO_LOCK) {
+                        mAudioTrack = mMuxer.addTrack(mAudioEncoder.getOutputFormat());
+                        isRecordVideoStarted = true;
+                    }
+                }
+            }
+        }
+        return false;
+    }
+
+    private ByteBuffer getInputBuffer(MediaCodec codec, int index) {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+            return codec.getInputBuffer(index);
+        } else {
+            return codec.getInputBuffers()[index];
+        }
+    }
+
+    private ByteBuffer getOutputBuffer(MediaCodec codec, int index) {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+            return codec.getOutputBuffer(index);
+        } else {
+            return codec.getOutputBuffers()[index];
+        }
+    }
+
+    public static class Configuration {
+
+        private MediaFormat mAudioFormat;
+        private MediaFormat mVideoFormat;
+
+        public Configuration(MediaFormat audio, MediaFormat video) {
+            this.mAudioFormat = audio;
+            this.mVideoFormat = video;
+        }
+
+        public Configuration(int width, int height) {
+            mAudioFormat = MediaFormat.createAudioFormat("audio/mp4a-latm", 48000, 2);
+            mAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 128000);
+            mAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
+
+            mVideoFormat = MediaFormat.createVideoFormat("video/avc", width, height);
+            mVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
+            mVideoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+            mVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 5);
+            mVideoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+        }
+
+        public MediaFormat getAudioFormat() {
+            return mAudioFormat;
+        }
+
+        public MediaFormat getVideoFormat() {
+            return mVideoFormat;
+        }
+
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/CameraRecorder.java.bak


+ 196 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/EGLHelper.java

@@ -0,0 +1,196 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.annotation.TargetApi;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.util.Log;
+
+import javax.microedition.khronos.opengles.GL10;
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class EGLHelper {
+
+    private EGLSurface mEGLSurface;
+    private EGLContext mEGLContext;
+    private EGLDisplay mEGLDisplay;
+    private EGLConfig mEGLConfig;
+
+    private EGLContext mShareEGLContext= EGL14.EGL_NO_CONTEXT;
+
+    private boolean isDebug=true;
+
+    private int mEglSurfaceType= EGL14.EGL_WINDOW_BIT;
+
+    private Object mSurface;
+    private Object mCopySurface;
+
+    /**
+     * @param type one of {@link EGL14#EGL_WINDOW_BIT}、{@link EGL14#EGL_PBUFFER_BIT}、{@link EGL14#EGL_PIXMAP_BIT}
+     */
+    public void setEGLSurfaceType(int type){
+        this.mEglSurfaceType=type;
+    }
+
+    public void setSurface(Object surface){
+        this.mSurface=surface;
+    }
+
+    public void setCopySurface(Object surface){
+        this.mCopySurface=surface;
+    }
+
+    /**
+     * create the environment for OpenGLES
+     * @param eglWidth width
+     * @param eglHeight height
+     */
+    public boolean createGLES(int eglWidth, int eglHeight){
+        int[] attributes = new int[] {
+                EGL14.EGL_SURFACE_TYPE, mEglSurfaceType,      //渲染类型
+                EGL14.EGL_RED_SIZE, 8,  //指定RGB中的R大小(bits)
+                EGL14.EGL_GREEN_SIZE, 8, //指定G大小
+                EGL14.EGL_BLUE_SIZE, 8,  //指定B大小
+                EGL14.EGL_ALPHA_SIZE, 8, //指定Alpha大小,以上四项实际上指定了像素格式
+                EGL14.EGL_DEPTH_SIZE, 16, //指定深度缓存(Z Buffer)大小
+                EGL14.EGL_RENDERABLE_TYPE, 4, //指定渲染api类别, 如上一小节描述,这里或者是硬编码的4(EGL14.EGL_OPENGL_ES2_BIT)
+                EGL14.EGL_NONE };  //总是以EGL14.EGL_NONE结尾
+
+        int glAttrs[] = {
+                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,  //0x3098是EGL14.EGL_CONTEXT_CLIENT_VERSION,但是4.2以前没有EGL14
+                EGL14.EGL_NONE
+        };
+
+        int bufferAttrs[]={
+                EGL14.EGL_WIDTH,eglWidth,
+                EGL14.EGL_HEIGHT,eglHeight,
+                EGL14.EGL_NONE
+        };
+
+        //第二步 获取Display
+        //获取默认显示设备,一般为设备主屏幕
+        mEGLDisplay= EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+
+        //获取版本号,[0]为版本号,[1]为子版本号
+        int[] versions=new int[2];
+        EGL14.eglInitialize(mEGLDisplay,versions,0,versions,1);
+        log(EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_VENDOR));
+        log(EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_VERSION));
+        log(EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_EXTENSIONS));
+
+        //第三部 选择config
+        //获取EGL可用配置
+        EGLConfig[] configs = new EGLConfig[1];
+        int[] configNum = new int[1];
+        EGL14.eglChooseConfig(mEGLDisplay, attributes,0, configs,0, 1, configNum,0);
+        if(configs[0]==null){
+            log("eglChooseConfig Error:"+ EGL14.eglGetError());
+            return false;
+        }
+        mEGLConfig = configs[0];
+
+        //创建EGLContext
+        mEGLContext= EGL14.eglCreateContext(mEGLDisplay,mEGLConfig,mShareEGLContext, glAttrs,0);
+        if(mEGLContext== EGL14.EGL_NO_CONTEXT){
+            return false;
+        }
+        //获取创建后台绘制的Surface
+        switch (mEglSurfaceType){
+            case EGL14.EGL_WINDOW_BIT:
+                mEGLSurface= EGL14.eglCreateWindowSurface(mEGLDisplay,mEGLConfig,mSurface,new int[]{EGL14.EGL_NONE},0);
+                break;
+            case EGL14.EGL_PIXMAP_BIT:
+                break;
+            case EGL14.EGL_PBUFFER_BIT:
+                mEGLSurface= EGL14.eglCreatePbufferSurface(mEGLDisplay,mEGLConfig,bufferAttrs,0);
+                break;
+        }
+        if(mEGLSurface== EGL14.EGL_NO_SURFACE){
+            log("eglCreateSurface Error:"+ EGL14.eglGetError());
+
+            return false;
+        }
+
+        if(!EGL14.eglMakeCurrent(mEGLDisplay,mEGLSurface,mEGLSurface,mEGLContext)){
+            log("eglMakeCurrent Error:"+ EGL14.eglQueryString(mEGLDisplay, EGL14.eglGetError()));
+            return false;
+        }
+        log("gl environment create success");
+        return true;
+    }
+
+    public EGLSurface createEGLWindowSurface(Object object){
+        return EGL14.eglCreateWindowSurface(mEGLDisplay,mEGLConfig,object,new int[]{EGL14.EGL_NONE},0);
+    }
+
+    public void setShareEGLContext(EGLContext context){
+        this.mShareEGLContext=context;
+    }
+
+    public EGLContext getEGLContext(){
+        return mEGLContext;
+    }
+
+    public boolean makeCurrent(){
+        return EGL14.eglMakeCurrent(mEGLDisplay,mEGLSurface,mEGLSurface,mEGLContext);
+    }
+
+    public boolean makeCurrent(EGLSurface surface){
+        return EGL14.eglMakeCurrent(mEGLDisplay,surface,surface,mEGLContext);
+    }
+
+    public boolean destroyGLES(){
+        EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
+        EGL14.eglDestroySurface(mEGLDisplay,mEGLSurface);
+        EGL14.eglDestroyContext(mEGLDisplay,mEGLContext);
+        EGL14.eglTerminate(mEGLDisplay);
+        log("gl destroy gles");
+        return true;
+    }
+
+    public void setPresentationTime(long time){
+        EGLExt.eglPresentationTimeANDROID(mEGLDisplay,mEGLSurface,time);
+    }
+
+    public void setPresentationTime(EGLSurface surface,long time){
+        EGLExt.eglPresentationTimeANDROID(mEGLDisplay,surface,time);
+    }
+
+    public boolean swapBuffers(){
+        return EGL14.eglSwapBuffers(mEGLDisplay,mEGLSurface);
+    }
+
+    public boolean swapBuffers(EGLSurface surface){
+        return EGL14.eglSwapBuffers(mEGLDisplay,surface);
+    }
+
+
+    //创建视频数据流的OES TEXTURE
+    public int createTextureID() {
+        int[] texture = new int[1];
+        GLES20.glGenTextures(1, texture, 0);
+        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
+        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
+        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
+        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
+        return texture[0];
+    }
+
+    private void log(String log){
+        if(isDebug){
+            Log.e("EGLHelper",log);
+        }
+    }
+
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/EGLHelper.java.bak


+ 41 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BaseFilter.java

@@ -0,0 +1,41 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+public class BaseFilter extends Filter {
+
+    public BaseFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/base.frag");
+    }
+
+    public BaseFilter(String vert,String frag){
+        super(null,vert,frag);
+    }
+
+    public BaseFilter(){
+        super(null,"attribute vec4 aVertexCo;\n" +
+                "attribute vec2 aTextureCo;\n" +
+                "\n" +
+                "uniform mat4 uVertexMatrix;\n" +
+                "uniform mat4 uTextureMatrix;\n" +
+                "\n" +
+                "varying vec2 vTextureCo;\n" +
+                "\n" +
+                "void main(){\n" +
+                "    gl_Position = uVertexMatrix*aVertexCo;\n" +
+                "    vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;\n" +
+                "}",
+                "precision mediump float;\n" +
+                "varying vec2 vTextureCo;\n" +
+                "uniform sampler2D uTexture;\n" +
+                "void main() {\n" +
+                "    gl_FragColor = texture2D( uTexture, vTextureCo);\n" +
+                "}");
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+    }
+
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BaseFilter.java.bak


+ 75 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BeautyFilter.java

@@ -0,0 +1,75 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+/**
+ * Created by Yj on 2017/9/18.
+ */
+
+public class BeautyFilter extends Filter {
+
+    private int mGLaaCoef;
+    private int mGLmixCoef;
+    private int mGLiternum;
+
+
+    private float aaCoef;
+    private float mixCoef;
+    private int iternum;
+
+    public BeautyFilter(Resources resource) {
+        super(resource,"shader/beauty/beauty.vert", "shader/beauty/beauty.frag");
+        shaderNeedTextureSize(true);
+        setBeautyLevel(0);
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        mGLaaCoef=GLES20.glGetUniformLocation(mGLProgram,"uACoef");
+        mGLmixCoef=GLES20.glGetUniformLocation(mGLProgram,"uMixCoef");
+        mGLiternum=GLES20.glGetUniformLocation(mGLProgram,"uIternum");
+    }
+
+    public Filter setBeautyLevel(int level){
+        switch (level){
+            case 1:
+                a(1,0.19f,0.54f);
+                break;
+            case 2:
+                a(2,0.29f,0.54f);
+                break;
+            case 3:
+                a(3,0.17f,0.39f);
+                break;
+            case 4:
+                a(3,0.25f,0.54f);
+                break;
+            case 5:
+                a(4,0.13f,0.54f);
+                break;
+            case 6:
+                a(4,0.19f,0.69f);
+                break;
+            default:
+                a(0,0f,0f);
+                break;
+        }
+        return this;
+    }
+
+    private void a(int a,float b,float c){
+        this.iternum=a;
+        this.aaCoef=b;
+        this.mixCoef=c;
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform1f(mGLaaCoef,aaCoef);
+        GLES20.glUniform1f(mGLmixCoef,mixCoef);
+        GLES20.glUniform1i(mGLiternum,iternum);
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BeautyFilter.java.bak


+ 120 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/ChooseFilter.java

@@ -0,0 +1,120 @@
+/*
+ *
+ * NoFilter.java
+ * 
+ * Created by Wuwang on 2016/10/17
+ */
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+/**
+ * 滤镜集合
+ */
+public class ChooseFilter extends Filter {
+
+	//滤镜选择代码
+	private int hChangeType;
+	private int hFilterCode = 0;
+
+	//复杂
+	private int width;
+	private int height;
+	private boolean needGLWH = false;
+	private int mGLWidth;
+	private int mGLHeight;
+	private boolean needTexelWH = false;
+	private float mTexelWidth;
+	private float mTexelHeight;
+	private int mUniformTexelWidthLocation;
+	private int mUniformTexelHeightLocation;
+
+	public ChooseFilter(Resources resource) {
+		super(resource, "shader/choose/choose.vert", "shader/choose/choose.frag");
+	}
+
+	@Override
+	protected void onCreate() {
+		super.onCreate();
+		hChangeType = GLES20.glGetUniformLocation(mGLProgram, "vChangeType");
+		mGLWidth = GLES20.glGetUniformLocation(mGLProgram, "uWidth");
+		mGLHeight = GLES20.glGetUniformLocation(mGLProgram, "uHeight");
+		mUniformTexelWidthLocation = GLES20.glGetUniformLocation(mGLProgram, "texelWidth");
+		mUniformTexelHeightLocation = GLES20.glGetUniformLocation(mGLProgram, "texelHeight");
+	}
+
+	@Override
+	protected void onSizeChanged(int width, int height) {
+		super.onSizeChanged(width, height);
+		this.width = width;
+		this.height = height;
+		setTextlSize(5.0f);
+	}
+
+	@Override
+	protected void onSetExpandData() {
+		super.onSetExpandData();
+		GLES20.glUniform1i(hChangeType, hFilterCode);
+		if (needGLWH) {
+			GLES20.glUniform1f(mGLWidth, width);
+			GLES20.glUniform1f(mGLHeight, height);
+		}
+		if (needTexelWH) {
+			GLES20.glUniform1f(mUniformTexelWidthLocation, mTexelWidth);
+			GLES20.glUniform1f(mUniformTexelHeightLocation, mTexelHeight);
+		}
+	}
+
+	/**
+	 * 设置滤镜类型
+	 *
+	 * @param code
+	 */
+	public void setChangeType(int code) {
+		this.hFilterCode = code;
+		switch (code) {
+			case FilterType.TOON:
+				needTexelWH = true;
+				setTextlSize(4.2f);
+				break;
+			case FilterType.CONVOLUTION:
+				needTexelWH = true;
+				setTextlSize(1.3f);
+				break;
+			case FilterType.SOBEL:
+				needGLWH = true;
+				break;
+			case FilterType.SKETCH:
+				needTexelWH = true;
+				setTextlSize(3.0f);
+				break;
+			default:
+				needTexelWH = false;
+				needGLWH = false;
+				break;
+		}
+	}
+
+	private void setTextlSize(float size) {
+		mTexelWidth = size / width;
+		mTexelHeight = size / height;
+	}
+
+	/**
+	 * 滤镜类型
+	 */
+	public static class FilterType {
+		public static final int NORMAL = 0;
+		public static final int COOL = 1;
+		public static final int WARM = 2;
+		public static final int GRAY = 3;
+		public static final int CAMEO = 4;
+		public static final int INVERT = 5;
+		public static final int SEPIA = 6;
+		public static final int TOON = 7;
+		public static final int CONVOLUTION = 8;
+		public static final int SOBEL = 9;
+		public static final int SKETCH = 10;
+	}
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/ChooseFilter.java.bak


+ 43 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DistortionFilter.java

@@ -0,0 +1,43 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+import com.joe.camera2recorddemo.OpenGL.TransUtil;
+import com.joe.camera2recorddemo.OpenGL.Transformation;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+/**
+ * 旋转,翻转,裁剪类滤镜
+ * Created by Yj on 2017/10/31.
+ */
+
+public class DistortionFilter extends Filter {
+
+	//旋转,翻转,裁剪变换类
+	private Transformation mTransformation;
+	private float[] mTextureCo;
+
+	public DistortionFilter(Resources resource) {
+		super(resource, "shader/base.vert","shader/base.frag");
+		initTransformation();
+	}
+
+	/**
+	 * 初始化变化类
+	 */
+	private void initTransformation() {
+		mTextureCo = MatrixUtils.getOriginalTextureCo();
+		if (mTransformation == null) {
+			mTransformation = new Transformation();
+		}
+	}
+
+	/**
+	 * 设置变化类
+	 * @param transformation
+	 */
+	public void setTransformation(Transformation transformation){
+		mTransformation = transformation;
+		setTextureCo(TransUtil.getTransformationCo(mTextureCo, mTransformation));
+	}
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DistortionFilter.java.bak


+ 13 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DrawFilter.java

@@ -0,0 +1,13 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+/**
+ * Created by Administrator on 2017/11/16.
+ */
+
+public class DrawFilter extends Filter {
+	public DrawFilter(Resources resource) {
+		super(resource, "", "");
+	}
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DrawFilter.java.bak


+ 216 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Filter.java

@@ -0,0 +1,216 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+
+import com.joe.camera2recorddemo.OpenGL.FrameBuffer;
+import com.joe.camera2recorddemo.OpenGL.Renderer;
+import com.joe.camera2recorddemo.Utils.GpuUtils;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+public abstract class Filter implements Renderer {
+
+    private float[] mVertexMatrix= MatrixUtils.getOriginalMatrix();
+    private float[] mTextureMatrix=MatrixUtils.getOriginalMatrix();
+
+    private float[] mVertexCo;
+
+    protected FloatBuffer mVertexBuffer;
+    protected FloatBuffer mTextureBuffer;
+
+    protected int mWidth;
+    protected int mHeight;
+
+    protected Resources mRes;
+    private String mVertex;
+    private String mFragment;
+
+    protected int mGLProgram;
+    protected int mGLVertexCo;
+    protected int mGLTextureCo;
+    protected int mGLVertexMatrix;
+    protected int mGLTextureMatrix;
+    protected int mGLTexture;
+
+    private int mGLWidth;
+    private int mGLHeight;
+    private boolean isUseSize=false;
+
+    private FrameBuffer mFrameTemp;
+
+    protected Filter(Resources resource,String vertex,String fragment){
+        this.mRes=resource;
+        this.mVertex=vertex;
+        this.mFragment=fragment;
+        mFrameTemp=new FrameBuffer();
+        initBuffer();
+    }
+
+    protected void initBuffer(){
+        ByteBuffer vertex=ByteBuffer.allocateDirect(32);
+        vertex.order(ByteOrder.nativeOrder());
+        mVertexBuffer=vertex.asFloatBuffer();
+        mVertexBuffer.put(MatrixUtils.getOriginalVertexCo());
+        mVertexBuffer.position(0);
+        ByteBuffer texture=ByteBuffer.allocateDirect(32);
+        texture.order(ByteOrder.nativeOrder());
+        mTextureBuffer=texture.asFloatBuffer();
+        mTextureBuffer.put(MatrixUtils.getOriginalTextureCo());
+        mTextureBuffer.position(0);
+    }
+
+    public void setVertexCo(float[] vertexCo){
+        mVertexCo = vertexCo;
+        mVertexBuffer.clear();
+        mVertexBuffer.put(vertexCo);
+        mVertexBuffer.position(0);
+    }
+
+    public void setTextureCo(float[] textureCo){
+        mTextureBuffer.clear();
+        mTextureBuffer.put(textureCo);
+        mTextureBuffer.position(0);
+    }
+
+    public void setVertexBuffer(FloatBuffer vertexBuffer){
+        this.mVertexBuffer=vertexBuffer;
+    }
+
+    public void setTextureBuffer(FloatBuffer textureBuffer){
+        this.mTextureBuffer=textureBuffer;
+    }
+
+    public void setVertexMatrix(float[] matrix){
+        this.mVertexMatrix=matrix;
+    }
+
+    public void setTextureMatrix(float[] matrix){
+        this.mTextureMatrix=matrix;
+    }
+
+    public float[] getVertexMatrix(){
+        return mVertexMatrix;
+    }
+
+    public float[] getTextureMatrix(){
+        return mTextureMatrix;
+    }
+
+    public float[] getVertexCo() { return mVertexCo;}
+
+    protected void shaderNeedTextureSize(boolean need){
+        this.isUseSize=need;
+    }
+
+    protected void onCreate(){
+        if(mRes!=null){
+            mGLProgram= GpuUtils.createGLProgramByAssetsFile(mRes,mVertex,mFragment);
+        }else{
+            mGLProgram= GpuUtils.createGLProgram(mVertex,mFragment);
+        }
+        mGLVertexCo=GLES20.glGetAttribLocation(mGLProgram,"aVertexCo");
+        mGLTextureCo=GLES20.glGetAttribLocation(mGLProgram,"aTextureCo");
+        mGLVertexMatrix=GLES20.glGetUniformLocation(mGLProgram,"uVertexMatrix");
+        mGLTextureMatrix=GLES20.glGetUniformLocation(mGLProgram,"uTextureMatrix");
+        mGLTexture=GLES20.glGetUniformLocation(mGLProgram,"uTexture");
+
+        if(isUseSize){
+            mGLWidth=GLES20.glGetUniformLocation(mGLProgram,"uWidth");
+            mGLHeight=GLES20.glGetUniformLocation(mGLProgram,"uHeight");
+        }
+    }
+
+    protected void onSizeChanged(int width,int height){
+
+    }
+
+    @Override
+    public final void create() {
+        if(mVertex!=null&&mFragment!=null){
+            onCreate();
+        }
+    }
+
+    @Override
+    public void sizeChanged(int width, int height) {
+        onSizeChanged(width, height);
+        this.mWidth=width;
+        this.mHeight=height;
+
+        mFrameTemp.destroyFrameBuffer();
+    }
+
+    @Override
+    public void draw(int texture) {
+        onClear();
+        onUseProgram();
+        onSetExpandData();
+        onBindTexture(texture);
+        onDraw();
+    }
+
+    public int drawToTexture(int texture){
+        mFrameTemp.bindFrameBuffer(mWidth,mHeight);
+        onClear();
+        onUseProgram();
+        MatrixUtils.flip(mVertexMatrix,false,true);
+        onSetExpandData();
+        MatrixUtils.flip(mVertexMatrix,false,true);
+        onBindTexture(texture);
+        onDraw();
+        mFrameTemp.unBindFrameBuffer();
+        return mFrameTemp.getCacheTextureId();
+    }
+
+    @Override
+    public void destroy() {
+        mFrameTemp.destroyFrameBuffer();
+        GLES20.glDeleteProgram(mGLProgram);
+    }
+
+    protected void onUseProgram(){
+        GLES20.glUseProgram(mGLProgram);
+    }
+
+    protected void onDraw(){
+        GLES20.glEnableVertexAttribArray(mGLVertexCo);
+        GLES20.glVertexAttribPointer(mGLVertexCo,2, GLES20.GL_FLOAT, false, 0,mVertexBuffer);
+        GLES20.glEnableVertexAttribArray(mGLTextureCo);
+        GLES20.glVertexAttribPointer(mGLTextureCo, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer);
+        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4);
+        GLES20.glDisableVertexAttribArray(mGLVertexCo);
+        GLES20.glDisableVertexAttribArray(mGLTextureCo);
+    }
+
+    protected void onClear(){
+        GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
+    }
+
+    /**
+     * 设置其他扩展数据
+     */
+    protected void onSetExpandData(){
+        GLES20.glUniformMatrix4fv(mGLVertexMatrix,1,false,mVertexMatrix,0);
+        GLES20.glUniformMatrix4fv(mGLTextureMatrix,1,false,mTextureMatrix,0);
+        if(isUseSize){
+            GLES20.glUniform1f(mGLWidth,mWidth);
+            GLES20.glUniform1f(mGLHeight,mHeight);
+        }
+    }
+
+    /**
+     * 绑定默认纹理
+     */
+    protected void onBindTexture(int textureId){
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,textureId);
+        GLES20.glUniform1i(mGLTexture,0);
+    }
+
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Filter.java.bak


+ 113 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/GroupFilter.java

@@ -0,0 +1,113 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+import java.util.Iterator;
+import java.util.Vector;
+
+/**
+ * Created by Yj on 2017/9/24 0024.
+ */
+
+public class GroupFilter extends BaseFilter {
+
+    private Vector<Filter> mGroup;
+    private Vector<Filter> mTempGroup;
+
+    public GroupFilter(Resources resource) {
+        super(resource);
+    }
+
+    public GroupFilter(){
+        super();
+    }
+
+    @Override
+    protected void initBuffer() {
+        super.initBuffer();
+        mGroup=new Vector<>();
+        mTempGroup=new Vector<>();
+    }
+
+    public synchronized void addFilter(Filter filter){
+        mGroup.add(filter);
+        mTempGroup.add(filter);
+    }
+
+    public synchronized void addFilter(int index,Filter filter){
+        mGroup.add(index, filter);
+        mTempGroup.add(filter);
+    }
+
+    public synchronized Filter removeFilter(int index){
+        return mGroup.remove(index);
+    }
+
+    public boolean removeFilter(Filter filter){
+        return mGroup.remove(filter);
+    }
+
+    public synchronized Filter element(int index){
+        return mGroup.elementAt(index);
+    }
+
+    public synchronized Iterator<Filter> iterator(){
+        return mGroup.iterator();
+    }
+
+    public synchronized boolean isEmpty(){
+        return mGroup.isEmpty();
+    }
+
+    @Override
+    protected synchronized void onCreate() {
+        super.onCreate();
+        for (Filter filter : mGroup) {
+            filter.create();
+        }
+        mTempGroup.clear();
+    }
+
+    private void tempFilterInit(int width,int height){
+        for (Filter filter : mTempGroup) {
+            filter.create();
+            filter.sizeChanged(width, height);
+        }
+        mTempGroup.removeAllElements();
+    }
+
+    @Override
+    protected synchronized void onSizeChanged(int width, int height) {
+        super.onSizeChanged(width, height);
+        for (Filter filter : mGroup) {
+            filter.sizeChanged(width, height);
+        }
+    }
+
+    @Override
+    public void draw(int texture) {
+        if(mTempGroup.size()>0){
+            tempFilterInit(mWidth, mHeight);
+        }
+        int tempTextureId=texture;
+        for (int i=0;i<mGroup.size();i++){
+            Filter filter=mGroup.get(i);
+            tempTextureId=filter.drawToTexture(tempTextureId);
+        }
+        super.draw(tempTextureId);
+    }
+
+    @Override
+    public int drawToTexture(int texture) {
+        if(mTempGroup.size()>0){
+            tempFilterInit(mWidth, mHeight);
+        }
+        int tempTextureId=texture;
+        for (int i=0;i<mGroup.size();i++){
+            Filter filter=mGroup.get(i);
+            tempTextureId=filter.drawToTexture(tempTextureId);
+        }
+        return super.drawToTexture(tempTextureId);
+    }
+
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/GroupFilter.java.bak


+ 34 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Mp4EditFilter.java

@@ -0,0 +1,34 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+/**
+ * 综合滤镜
+ * Created by Yj on 2017/11/1.
+ */
+
+public class Mp4EditFilter extends GroupFilter {
+	private ChooseFilter chooseFilter;
+	private DistortionFilter distortionFilter;
+
+	public Mp4EditFilter(Resources resource) {
+		super(resource);
+	}
+
+	@Override
+	protected void initBuffer() {
+		super.initBuffer();
+		chooseFilter = new ChooseFilter(mRes);
+		distortionFilter = new DistortionFilter(mRes);
+		addFilter(chooseFilter);
+		addFilter(distortionFilter);
+	}
+
+	public ChooseFilter getChooseFilter() {
+		return chooseFilter;
+	}
+
+	public DistortionFilter getDistortionFilter() {
+		return distortionFilter;
+	}
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Mp4EditFilter.java.bak


+ 48 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/OesFilter.java

@@ -0,0 +1,48 @@
+/*
+ * Created by Wuwang on 2017/9/11
+ * Copyright © 2017年 深圳哎吖科技. All rights reserved.
+ */
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.annotation.TargetApi;
+import android.content.res.Resources;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+
+@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
+public class OesFilter extends Filter {
+
+    public OesFilter(Resources resource) {
+        super(resource,"shader/oes.vert", "shader/oes.frag");
+    }
+
+    public OesFilter(){
+        super(null,"attribute vec4 aVertexCo;\n" +
+                "attribute vec2 aTextureCo;\n" +
+                "\n" +
+                "uniform mat4 uVertexMatrix;\n" +
+                "uniform mat4 uTextureMatrix;\n" +
+                "\n" +
+                "varying vec2 vTextureCo;\n" +
+                "\n" +
+                "void main(){\n" +
+                "    gl_Position = uVertexMatrix*aVertexCo;\n" +
+                "    vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;\n" +
+                "}",
+                "#extension GL_OES_EGL_image_external : require\n" +
+                "precision mediump float;\n" +
+                "varying vec2 vTextureCo;\n" +
+                "uniform samplerExternalOES uTexture;\n" +
+                "void main() {\n" +
+                "    gl_FragColor = texture2D( uTexture, vTextureCo);\n" +
+                "}");
+    }
+
+    @Override
+    protected void onBindTexture(int textureId) {
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,textureId);
+        GLES20.glUniform1i(mGLTexture,0);
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/OesFilter.java.bak


+ 87 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/FrameBuffer.java

@@ -0,0 +1,87 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.opengl.GLES20;
+
+/**
+ * Created by Yj on 2017/9/13.
+ */
+
+public class FrameBuffer {
+
+    private int[] mFrameTemp;
+
+    public int bindFrameBuffer(int width,int height){
+        if(mFrameTemp==null){
+            mFrameTemp=new int[3];
+            GLES20.glGenFramebuffers(1,mFrameTemp,0);
+            GLES20.glGenTextures(1,mFrameTemp,1);
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,mFrameTemp[1]);
+            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0,GLES20.GL_RGBA, width, height,
+                    0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+            //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
+            //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
+            //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE);
+            //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE);
+
+            GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
+            GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+                    GLES20.GL_TEXTURE_2D, mFrameTemp[1], 0);
+        }else{
+            GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameTemp[0]);
+        }
+        return GLES20.glGetError();
+    }
+
+    public void createFrameBuffer(int width,int height,int tex_type,int tex_format,
+                                  int min_params,int max_params,int wrap_s,int wrap_t){
+        mFrameTemp=new int[3];
+        GLES20.glGenFramebuffers(1,mFrameTemp,0);
+        GLES20.glGenTextures(1,mFrameTemp,1);
+        GLES20.glBindTexture(tex_type,mFrameTemp[1]);
+        GLES20.glTexImage2D(tex_type, 0,tex_format, width, height,
+                0, tex_format, GLES20.GL_UNSIGNED_BYTE, null);
+        //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
+        GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_MIN_FILTER,min_params);
+        //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
+        GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_MAG_FILTER,max_params);
+        //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
+        GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_WRAP_S,wrap_s);
+        //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
+        GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_WRAP_T,wrap_t);
+
+        GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
+        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+                tex_type, mFrameTemp[1], 0);
+    }
+
+    public int bindFrameBuffer(){
+        if(mFrameTemp==null)return -1;
+        GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
+        return GLES20.glGetError();
+    }
+
+    public void unBindFrameBuffer(){
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[2]);
+    }
+
+    public int getCacheTextureId(){
+        return mFrameTemp!=null?mFrameTemp[1]:-1;
+    }
+
+    public void destroyFrameBuffer(){
+        if(mFrameTemp!=null){
+            GLES20.glDeleteFramebuffers(1,mFrameTemp,0);
+            GLES20.glDeleteTextures(1,mFrameTemp,1);
+            mFrameTemp=null;
+        }
+    }
+
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/FrameBuffer.java.bak


+ 415 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MP4Edior.java

@@ -0,0 +1,415 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+import android.util.Size;
+import android.view.Surface;
+
+import com.joe.camera2recorddemo.MediaCodecUtil.TrackUtils;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.Semaphore;
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class MP4Edior {
+
+	//============================  OpenGL =========================
+
+	private SurfaceTexture mInputTexture;
+	private Surface mOutputSurface;
+	//    private EGLHelper mEncodeEGLHelper;
+	private EGLHelper mShowEGLHelper;
+
+	private boolean mGLThreadFlag = false;
+	private Thread mGLThread;
+	private WrapRenderer mRenderer;
+	private Semaphore mSem;
+	private int mInputTextureId;
+
+	private int mPreviewWidth = -1;
+	private int mPreviewHeight = -1;
+	private int mInputWidth = -1;
+	private int mInputHeight = -1;
+
+	private final Object VIDEO_LOCK = new Object();
+	private final Object REC_LOCK = new Object();
+
+	//===========================  MeidaCodec ========================
+
+	private static final String TAG = "VideoToFrames";
+	private static final long DEFAULT_TIMEOUT_US = 10000;
+
+	private final int decodeColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
+
+	MediaExtractor extractor = null;
+	MediaCodec decoder = null;
+	MediaFormat mediaFormat;
+
+	private boolean isLoop = false;//是否循环播放
+	private boolean isStop = false;//是否停止
+	private String videoFilePath;
+
+	private Size mSize;//输入视频的尺寸
+
+	public MP4Edior() {
+		mShowEGLHelper = new EGLHelper();
+		mSem = new Semaphore(0);
+	}
+
+	public Surface createInputSurfaceTexture() {
+		mInputTextureId = mShowEGLHelper.createTextureID();
+		mInputTexture = new SurfaceTexture(mInputTextureId);
+		new Handler(Looper.getMainLooper()).post(new Runnable() {
+			@Override
+			public void run() {
+				mInputTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+					@Override
+					public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+						mSem.release();
+					}
+				});
+			}
+		});
+		return new Surface(mInputTexture);
+	}
+
+	public void setOutputSurface(Surface surface, int width, int height) {
+		this.mOutputSurface = surface;
+		this.mPreviewWidth = width;
+		this.mPreviewHeight = height;
+	}
+
+	public void setRenderer(Renderer renderer) {
+		mRenderer = new WrapRenderer(renderer);
+	}
+
+	/**
+	 * 开始预览
+	 */
+	public void startPreview() {
+		synchronized (REC_LOCK) {
+			mSem.drainPermits();
+			mGLThreadFlag = true;
+			mGLThread = new Thread(mGLRunnable);
+			mGLThread.start();
+		}
+	}
+
+	/**
+	 * 停止预览
+	 *
+	 * @throws InterruptedException
+	 */
+	public void stopPreview() throws InterruptedException {
+		synchronized (REC_LOCK) {
+			mGLThreadFlag = false;
+			mSem.release();
+			if (mGLThread != null && mGLThread.isAlive()) {
+				mGLThread.join();
+				mGLThread = null;
+			}
+			Log.d("C2D", "CameraRecorder stopPreview");
+		}
+	}
+
+	private Runnable mGLRunnable = new Runnable() {
+		@Override
+		public void run() {
+			if (mOutputSurface == null) {
+				Log.e("C2D", "CameraRecorder GLThread exit : outputSurface==null");
+				return;
+			}
+			if (mPreviewWidth <= 0 || mPreviewHeight <= 0) {
+				Log.e("C2D", "CameraRecorder GLThread exit : Preview Size==0");
+				return;
+			}
+			mShowEGLHelper.setSurface(mOutputSurface);
+			boolean ret = mShowEGLHelper.createGLES(mPreviewWidth, mPreviewHeight);
+			if (!ret) {
+				Log.e("C2D", "CameraRecorder GLThread exit : createGLES failed");
+				return;
+			}
+			if (mRenderer == null) {
+				mRenderer = new WrapRenderer(null);
+			}
+			mRenderer.setFlag(WrapRenderer.TYPE_SURFACE);
+			mRenderer.create();
+			int[] t = new int[1];
+			GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, t, 0);
+			mRenderer.sizeChanged(mPreviewWidth, mPreviewHeight);
+			GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, t[0]);
+			while (mGLThreadFlag) {
+				try {
+					mSem.acquire();
+				} catch (InterruptedException e) {
+					e.printStackTrace();
+				}
+				if (mGLThreadFlag) {
+					mInputTexture.updateTexImage();
+					mInputTexture.getTransformMatrix(mRenderer.getTextureMatrix());
+					synchronized (VIDEO_LOCK) {
+						GLES20.glViewport(0, 0, mPreviewWidth, mPreviewHeight);
+						mRenderer.draw(mInputTextureId);
+						mShowEGLHelper.swapBuffers();
+					}
+				}
+			}
+			mShowEGLHelper.destroyGLES();
+		}
+	};
+
+	/**
+	 * =====================================    MediaCodec   ===============================
+	 */
+
+	/**
+	 * 解码器初始化
+	 *
+	 * @param videoFilePath
+	 */
+	public void decodePrepare(String videoFilePath) {
+		this.videoFilePath = videoFilePath;
+		extractor = null;
+		decoder = null;
+		try {
+			File videoFile = new File(videoFilePath);
+			extractor = new MediaExtractor();
+			extractor.setDataSource(videoFile.toString());
+			int trackIndex = TrackUtils.selectVideoTrack(extractor);
+			if (trackIndex < 0) {
+				throw new RuntimeException("No video track found in " + videoFilePath);
+			}
+			extractor.selectTrack(trackIndex);
+			mediaFormat = extractor.getTrackFormat(trackIndex);
+			String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
+			decoder = MediaCodec.createDecoderByType(mime);
+			if (isColorFormatSupported(decodeColorFormat, decoder.getCodecInfo().getCapabilitiesForType(mime))) {
+				mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, decodeColorFormat);
+				Log.i(TAG, "set decode color format to type " + decodeColorFormat);
+			} else {
+				Log.i(TAG, "unable to set decode color format, color format type " + decodeColorFormat + " not supported");
+			}
+
+			//获取宽高信息
+			int rotation = mediaFormat.containsKey(MediaFormat.KEY_ROTATION) ? mediaFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
+			if (rotation == 90 || rotation == 270) {
+				mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+				mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+			} else {
+				mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+				mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+			}
+			mSize = new Size(mInputWidth, mInputHeight);
+
+			//设置
+			mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+			mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+			mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2500000);
+			decoder.configure(mediaFormat, createInputSurfaceTexture(), null, 0);
+			decoder.start();
+		} catch (IOException ioe) {
+			throw new RuntimeException("failed init encoder", ioe);
+		}
+	}
+
+	public void close() {
+		try {
+			if (decoder != null) {
+				decoder.stop();
+				decoder.release();
+			}
+
+			if (extractor != null) {
+				extractor.release();
+				extractor = null;
+			}
+		} catch (IllegalStateException e) {
+			e.printStackTrace();
+		}
+	}
+
+	/**
+	 * 外部调用开始解码
+	 */
+	public void excuate() {
+		try {
+			decodeFramesToImage(decoder, extractor, mediaFormat);
+		} finally {
+			close();
+			if (isLoop && !isStop) {
+				decodePrepare(videoFilePath);
+				excuate();
+			}
+		}
+
+	}
+
+	/**
+	 * 设置是否循环
+	 *
+	 * @param isLoop
+	 */
+	public void setLoop(boolean isLoop) {
+		this.isLoop = isLoop;
+	}
+
+	/**
+	 * 检查是否支持的色彩格式
+	 *
+	 * @param colorFormat
+	 * @param caps
+	 * @return
+	 */
+	private boolean isColorFormatSupported(int colorFormat, MediaCodecInfo.CodecCapabilities caps) {
+		for (int c : caps.colorFormats) {
+			if (c == colorFormat) {
+				return true;
+			}
+		}
+		return false;
+	}
+
+	/**
+	 * 开始解码
+	 *
+	 * @param decoder
+	 * @param extractor
+	 * @param mediaFormat
+	 */
+	public void decodeFramesToImage(MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat) {
+		MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+		boolean sawInputEOS = false;
+		boolean sawOutputEOS = false;
+
+		long startMs = System.currentTimeMillis();
+		while (!sawOutputEOS && !isStop) {
+			if (!sawInputEOS) {
+				int inputBufferId = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
+				if (inputBufferId >= 0) {
+					ByteBuffer inputBuffer = decoder.getInputBuffer(inputBufferId);
+					int sampleSize = extractor.readSampleData(inputBuffer, 0); //将一部分视频数据读取到inputbuffer中,大小为sampleSize
+					if (sampleSize < 0) {
+						decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+						sawInputEOS = true;
+					} else {
+						long presentationTimeUs = extractor.getSampleTime();
+						Log.v(TAG, "presentationTimeUs:" + presentationTimeUs);
+						decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
+						extractor.advance();  //移动到视频文件的下一个地址
+					}
+				}
+			}
+			int outputBufferId = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
+			if (outputBufferId >= 0) {
+				if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+					sawOutputEOS = true;
+				}
+				boolean doRender = (info.size != 0);
+				if (doRender) {
+					sleepRender(info, startMs);//延迟解码
+					decoder.releaseOutputBuffer(outputBufferId, true);
+				}
+			}
+		}
+	}
+
+	/**
+	 * 停止解码播放
+	 */
+	public void stop() {
+		isStop = true;
+	}
+
+	/**
+	 * 开始解码播放
+	 */
+	public void start() {
+		isStop = false;
+	}
+
+	/**
+	 * 获取视频尺寸
+	 *
+	 * @return 视频尺寸
+	 */
+	public Size getSize() {
+		return mSize;
+	}
+
+	/**
+	 * 延迟解码,按帧播放
+	 */
+	private void sleepRender(MediaCodec.BufferInfo audioBufferInfo, long startMs) {
+		while (audioBufferInfo.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
+			try {
+				Thread.sleep(10);
+			} catch (InterruptedException e) {
+				e.printStackTrace();
+				break;
+			}
+		}
+	}
+
+	/**
+	 * 设置变换类型
+	 *
+	 * @param transformation
+	 */
+	public void setTransformation(Transformation transformation) {
+		float[] vms = mRenderer.getmFilter().getVertexMatrix();
+		if (transformation.getScaleType() == MatrixUtils.TYPE_CENTERINSIDE) {
+			if (transformation.getRotation() == 90 || transformation.getRotation() == 270) {
+				MatrixUtils.getMatrix(vms, MatrixUtils.TYPE_CENTERINSIDE, transformation.getInputSize().getWidth(), transformation.getInputSize().getHeight()
+						, transformation.getOutputSize().getHeight(), transformation.getOutputSize().getWidth());
+			} else {
+				MatrixUtils.getMatrix(vms, MatrixUtils.TYPE_CENTERINSIDE, transformation.getInputSize().getHeight(), transformation.getInputSize().getWidth()
+						, transformation.getOutputSize().getHeight(), transformation.getOutputSize().getWidth());
+			}
+		}
+
+		//设置旋转
+		if (transformation.getRotation() != 0) {
+			MatrixUtils.rotation(vms, transformation.getRotation());
+		}
+
+		//设置裁剪
+		if (transformation.getCropRect() != null) {
+			float[] vtCo = new float[8];
+			MatrixUtils.crop(vtCo,transformation.getCropRect().x,transformation.getCropRect().y
+					,transformation.getCropRect().width,transformation.getCropRect().height);
+			mRenderer.getmFilter().setTextureCo(vtCo);
+		}
+
+		//设置翻转
+		if (transformation.getFlip() != Transformation.FLIP_NONE) {
+			switch (transformation.getFlip()) {
+				case Transformation.FLIP_HORIZONTAL:
+					MatrixUtils.flip(vms, true, false);
+					break;
+				case Transformation.FLIP_VERTICAL:
+					MatrixUtils.flip(vms, false, true);
+					break;
+				case Transformation.FLIP_HORIZONTAL_VERTICAL:
+					MatrixUtils.flip(vms, true, true);
+					break;
+				default:
+					break;
+			}
+		}
+
+		//设置投影矩阵
+		mRenderer.getmFilter().setVertexMatrix(vms);
+	}
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MP4Edior.java.bak


+ 59 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/AdjustFilter.java

@@ -0,0 +1,59 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.ChooseFilter;
+import com.joe.camera2recorddemo.OpenGL.Filter.DistortionFilter;
+import com.joe.camera2recorddemo.OpenGL.Filter.GroupFilter;
+
+/**
+ * 调整滤镜
+ * Created by Yj on 2018/6/14.
+ */
+
+public class AdjustFilter extends GroupFilter {
+	private ContrastFilter contrastFilter;
+	private BrightnessFilter brightnessFilter;
+	private SaturationFilter saturationFilter;
+	private VignetteFilter vignetteFilter;
+	private DistortionFilter distortionFilter;
+
+	public AdjustFilter(Resources resource) {
+		super(resource);
+	}
+
+	@Override
+	protected void initBuffer() {
+		super.initBuffer();
+		contrastFilter = new ContrastFilter(mRes);
+		brightnessFilter = new BrightnessFilter(mRes);
+		saturationFilter = new SaturationFilter(mRes);
+		distortionFilter = new DistortionFilter(mRes);
+		vignetteFilter = new VignetteFilter(mRes);
+		addFilter(contrastFilter);
+		addFilter(brightnessFilter);
+		addFilter(saturationFilter);
+		addFilter(vignetteFilter);
+		addFilter(distortionFilter);
+	}
+
+	public ContrastFilter getContrastFilter() {
+		return contrastFilter;
+	}
+
+	public BrightnessFilter getBrightnessFilter() {
+		return brightnessFilter;
+	}
+
+	public SaturationFilter getSaturationFilter() {
+		return saturationFilter;
+	}
+
+	public DistortionFilter getDistortionFilter() {
+		return distortionFilter;
+	}
+
+	public VignetteFilter getVignetteFilter() {
+		return vignetteFilter;
+	}
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/AdjustFilter.java.bak


+ 35 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/BrightnessFilter.java

@@ -0,0 +1,35 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+
+/**
+ * 亮度滤镜
+ */
+public class BrightnessFilter extends Filter {
+
+    private int brightnessType;
+    private float brightnessCode = 0.0f;
+
+    public BrightnessFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/mh/brightness.frag");
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        brightnessType = GLES20.glGetUniformLocation(mGLProgram, "brightness");
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform1f(brightnessType, brightnessCode);
+    }
+
+    public void setBrightnessCode(float brightnessCode) {
+        this.brightnessCode = brightnessCode;
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/BrightnessFilter.java.bak


+ 35 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/ContrastFilter.java

@@ -0,0 +1,35 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+
+/**
+ * 对比度滤镜
+ */
+public class ContrastFilter extends Filter {
+
+    private int contrastType;
+    private float contrastCode = 1.0f;
+
+    public ContrastFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/mh/contrast.frag");
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        contrastType = GLES20.glGetUniformLocation(mGLProgram, "stepcv");
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform1f(contrastType, contrastCode);
+    }
+
+    public void setContrastCode(float contrastCode) {
+        this.contrastCode = contrastCode;
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/ContrastFilter.java.bak


+ 35 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/SaturationFilter.java

@@ -0,0 +1,35 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+
+/**
+ * 饱和度滤镜
+ */
+public class SaturationFilter extends Filter {
+
+    private int saturationType;
+    private float saturationCode = 1.0f;
+
+    public SaturationFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/mh/saturation.frag");
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        saturationType = GLES20.glGetUniformLocation(mGLProgram, "saturation");
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform1f(saturationType, saturationCode);
+    }
+
+    public void setSaturationCode(float saturationCode) {
+        this.saturationCode = saturationCode;
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/SaturationFilter.java.bak


+ 57 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/VignetteFilter.java

@@ -0,0 +1,57 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+import android.graphics.PointF;
+import android.opengl.GLES20;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+
+import java.nio.FloatBuffer;
+
+/**
+ * 暗角滤镜
+ */
+public class VignetteFilter extends Filter {
+
+    private int mVignetteCenterLocation;
+    private PointF mVignetteCenter = new PointF();
+    private int mVignetteColorLocation;
+    private float[] mVignetteColor = new float[] {0.0f, 0.0f, 0.0f};
+    private int mVignetteStartLocation;
+    private float mVignetteStart = 0.75f;
+    private int mVignetteEndLocation;
+    private float mVignetteEnd = 0.75f;
+    private float[] vec2 = new float[2];
+
+    public VignetteFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/mh/vignette.frag");
+        vec2[0] = mVignetteCenter.x;
+        vec2[1] = mVignetteCenter.y;
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        mVignetteCenterLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteCenter");
+        mVignetteColorLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteColor");
+        mVignetteStartLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteStart");
+        mVignetteEndLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteEnd");
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform2fv(mVignetteCenterLocation,1,vec2,0);
+        GLES20.glUniform3fv(mVignetteColorLocation,1, FloatBuffer.wrap(mVignetteColor));
+        GLES20.glUniform1f(mVignetteStartLocation, mVignetteStart);
+        GLES20.glUniform1f(mVignetteEndLocation, mVignetteEnd);
+    }
+
+    public void setmVignetteStart(float mVignetteStart) {
+        this.mVignetteStart = mVignetteStart;
+    }
+
+    public void setmVignetteEnd(float mVignetteEnd) {
+        this.mVignetteEnd = mVignetteEnd;
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/VignetteFilter.java.bak


+ 582 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Mp4Processor.java

@@ -0,0 +1,582 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.MediaMetadataRetriever;
+import android.media.MediaMuxer;
+import android.os.Build;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.Surface;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.Semaphore;
+
+
+/**
+ * MP4处理工具,暂时只用于处理图像。
+ * 4.4的手机不支持video/mp4v-es格式的视频流,MediaMuxer混合无法stop,5.0以上可以
+ */
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class Mp4Processor {
+
+	private final int TIME_OUT = 1000;
+
+	private String mInputPath;                  //输入路径
+	private String mOutputPath;                 //输出路径
+
+	private MediaCodec mVideoDecoder;           //视频解码器
+	private MediaCodec mVideoEncoder;           //视频编码器
+	//private MediaCodec mAudioDecoder;           //音频解码器
+	//private MediaCodec mAudioEncoder;           //音频编码器
+	private MediaExtractor mExtractor;          //音视频分离器
+	private MediaMuxer mMuxer;                  //音视频混合器
+	private EGLHelper mEGLHelper;               //GL环境创建的帮助类
+	private MediaCodec.BufferInfo mVideoDecoderBufferInfo;  //用于存储当前帧的视频解码信息
+	//private MediaCodec.BufferInfo mAudioDecoderBufferInfo;  //用于存储当前帧的音频解码信息
+	private MediaCodec.BufferInfo mVideoEncoderBufferInfo;  //用于存储当前帧的视频编码信息
+	private MediaCodec.BufferInfo mAudioEncoderBufferInfo;  //用于纯粹当前帧的音频编码信息
+
+	private int mAudioEncoderTrack = -1;     //解码音轨
+	private int mVideoEncoderTrack = -1;     //解码视轨
+	private int mAudioDecoderTrack = -1;     //编码音轨
+	private int mVideoDecoderTrack = -1;     //编码视轨
+
+	//private String mAudioMime;
+	//private String mVideoMime;
+
+	private int mInputVideoWidth = 0;     //输入视频的宽度
+	private int mInputVideoHeight = 0;    //输入视频的高度
+
+	private int mOutputVideoWidth = 0;    //输出视频的宽度
+	private int mOutputVideoHeight = 0;   //输出视频的高度
+	private int mVideoTextureId;        //原始视频图像的纹理
+	private SurfaceTexture mVideoSurfaceTexture;    //用于接收原始视频的解码的图像流
+
+	private boolean isRenderToWindowSurface;        //是否渲染到用户设置的WindowBuffer上,用于测试
+	private Surface mOutputSurface;                 //视频输出的Surface
+
+	private Thread mDecodeThread;
+	private Thread mGLThread;
+	private boolean mCodecFlag = false;
+	private boolean isVideoExtractorEnd = false;
+	private boolean isAudioExtractorEnd = false;
+	private boolean isStarted = false;
+	private WrapRenderer mRenderer;
+	private boolean mGLThreadFlag = false;
+	private Semaphore mSem;
+	private Semaphore mDecodeSem;
+
+	private final Object Extractor_LOCK = new Object();
+	private final Object MUX_LOCK = new Object();
+	private final Object PROCESS_LOCK = new Object();
+
+	private OnProgressListener mProgressListener;
+
+	private boolean isUserWantToStop = false;
+	private long mVideoStopTimeStamp = 0;     //视频停止时的时间戳,用于外部主动停止处理时,音频截取
+
+	private long mTotalVideoTime = 0;     //视频的总时长
+
+	private int filterRotation = 0;//滤镜的旋转角度
+
+	public Mp4Processor() {
+		mEGLHelper = new EGLHelper();
+		mVideoDecoderBufferInfo = new MediaCodec.BufferInfo();
+		//mAudioDecoderBufferInfo=new MediaCodec.BufferInfo();
+		mVideoEncoderBufferInfo = new MediaCodec.BufferInfo();
+		mAudioEncoderBufferInfo = new MediaCodec.BufferInfo();
+	}
+
+
+	/**
+	 * 设置用于处理的MP4文件
+	 *
+	 * @param path 文件路径
+	 */
+	public void setInputPath(String path) {
+		this.mInputPath = path;
+	}
+
+	/**
+	 * 设置处理后的mp4存储的位置
+	 *
+	 * @param path 文件路径
+	 */
+	public void setOutputPath(String path) {
+		this.mOutputPath = path;
+	}
+
+	/**
+	 * 设置滤镜的旋转角度
+	 *
+	 * @param rotation
+	 */
+	public void setFilterRotation(int rotation) {
+		this.filterRotation = rotation;
+	}
+
+	/**
+	 * 设置直接渲染到指定的Surface上,测试用
+	 *
+	 * @param surface 渲染的位置
+	 */
+	public void setOutputSurface(Surface surface) {
+		this.mOutputSurface = surface;
+		this.isRenderToWindowSurface = surface != null;
+	}
+
+	/**
+	 * 设置用户处理接口
+	 *
+	 * @param renderer 处理接口
+	 */
+	public void setRenderer(Renderer renderer) {
+		mRenderer = new WrapRenderer(renderer);
+	}
+
+	public int getVideoSurfaceTextureId() {
+		return mVideoTextureId;
+	}
+
+	public SurfaceTexture getVideoSurfaceTexture() {
+		return mVideoSurfaceTexture;
+	}
+
+	/**
+	 * 设置输出Mp4的图像大小,默认为输出大小
+	 *
+	 * @param width  视频图像宽度
+	 * @param height 视频图像高度
+	 */
+	public void setOutputSize(int width, int height) {
+		this.mOutputVideoWidth = width;
+		this.mOutputVideoHeight = height;
+	}
+
+	public void setOnCompleteListener(OnProgressListener listener) {
+		this.mProgressListener = listener;
+	}
+
+	private boolean prepare() throws IOException {
+		//todo 获取视频旋转信息,并做出相应处理
+		synchronized (PROCESS_LOCK) {
+			int videoRotation = 0;
+			MediaMetadataRetriever mMetRet = new MediaMetadataRetriever();
+			mMetRet.setDataSource(mInputPath);
+			mExtractor = new MediaExtractor();
+			mExtractor.setDataSource(mInputPath);
+			int count = mExtractor.getTrackCount();
+			//解析Mp4
+			for (int i = 0; i < count; i++) {
+				MediaFormat format = mExtractor.getTrackFormat(i);
+				String mime = format.getString(MediaFormat.KEY_MIME);
+				Log.d("Mp4Processor", "extractor format-->" + mExtractor.getTrackFormat(i));
+				if (mime.startsWith("audio")) {
+					mAudioDecoderTrack = i;
+					//todo 暂时不对音频处理,后续需要对音频处理时再修改这个
+					/*mAudioDecoder=MediaCodec.createDecoderByType(mime);
+					mAudioDecoder.configure(format,null,null,0);
+                    if(!isRenderToWindowSurface){
+                        Log.e("wuwang", format.toString());
+                        MediaFormat audioFormat=MediaFormat.createAudioFormat(mime,
+                                format.getInteger(MediaFormat.KEY_SAMPLE_RATE),
+                                format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
+                        audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,
+                                format.getInteger(MediaFormat.KEY_AAC_PROFILE));
+                        audioFormat.setInteger(MediaFormat.KEY_BIT_RATE,
+                                Integer.valueOf(mMetRet.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE)));
+                        mAudioEncoder=MediaCodec.createEncoderByType(mime);
+                        mAudioEncoder.configure(audioFormat,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
+                    }*/
+				} else if (mime.startsWith("video")) {
+					//5.0以下,不能解析mp4v-es //todo 5.0以上也可能存在问题,目前还不知道原因
+//                    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP&&mime.equals(MediaFormat.MIMETYPE_VIDEO_MPEG4)) {
+//                        return false;
+//                    }
+					mVideoDecoderTrack = i;
+					mTotalVideoTime = Long.valueOf(mMetRet.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION));
+					String rotation = mMetRet.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
+					if (rotation != null) {
+						videoRotation = Integer.valueOf(rotation);
+					}
+					if (videoRotation == 90 || videoRotation == 270) {
+						mInputVideoHeight = format.getInteger(MediaFormat.KEY_WIDTH);
+						mInputVideoWidth = format.getInteger(MediaFormat.KEY_HEIGHT);
+					} else {
+						mInputVideoWidth = format.getInteger(MediaFormat.KEY_WIDTH);
+						mInputVideoHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
+					}
+					Log.e("Mp4Processor", "createDecoder");
+					mVideoDecoder = MediaCodec.createDecoderByType(mime);
+					Log.e("Mp4Processor", "createDecoder end");
+					mVideoTextureId = mEGLHelper.createTextureID();
+					mVideoSurfaceTexture = new SurfaceTexture(mVideoTextureId);
+					mVideoSurfaceTexture.setOnFrameAvailableListener(mFrameAvaListener);
+					mVideoDecoder.configure(format, new Surface(mVideoSurfaceTexture), null, 0);
+					if (!isRenderToWindowSurface) {
+						if (mOutputVideoWidth == 0 || mOutputVideoHeight == 0) {
+							mOutputVideoWidth = mInputVideoWidth;
+							mOutputVideoHeight = mInputVideoHeight;
+						}
+						//判断滤镜旋转角度
+						if(filterRotation == 90 || filterRotation == 270){
+							int temp = mOutputVideoWidth;
+							mOutputVideoWidth = mOutputVideoHeight;
+							mOutputVideoHeight = temp;
+						}
+						Log.v("Mp4ProcessorHH","w:"+mOutputVideoWidth+";h:"+mOutputVideoHeight+";r:"+filterRotation);
+						MediaFormat videoFormat = MediaFormat.createVideoFormat(/*mime*/"video/avc", mOutputVideoWidth, mOutputVideoHeight);
+						videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+						videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, mOutputVideoHeight * mOutputVideoWidth * 5);
+						videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 24);
+						videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+//						videoFormat.setInteger(MediaFormat.KEY_ROTATION,0);
+						mVideoEncoder = MediaCodec.createEncoderByType(/*mime*/"video/avc");
+						mVideoEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+						mOutputSurface = mVideoEncoder.createInputSurface();
+						Bundle bundle = new Bundle();
+						if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
+							bundle.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, mOutputVideoHeight * mOutputVideoWidth * 5);
+							mVideoEncoder.setParameters(bundle);
+						}
+					}
+				}
+			}
+			if (!isRenderToWindowSurface) {
+				//如果用户没有设置渲染到指定Surface,就需要导出视频,暂时不对音频做处理
+				mMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+//                mMuxer.setOrientationHint(videoRotation);
+				Log.d("Mp4Processor", "video rotation:" + videoRotation);
+				//如果mp4中有音轨
+				if (mAudioDecoderTrack >= 0) {
+					MediaFormat format = mExtractor.getTrackFormat(mAudioDecoderTrack);
+					Log.d("Mp4Processor", "audio track-->" + format.toString());
+
+					mAudioEncoderTrack = mMuxer.addTrack(format);
+				}
+			}
+		}
+		return true;
+	}
+
+	public boolean start() throws IOException {
+		synchronized (PROCESS_LOCK) {
+			if (!isStarted) {
+				if (!prepare()) {
+					Log.e("Mp4Processor", "prepare failed");
+					return false;
+				}
+
+				isUserWantToStop = false;
+
+				isVideoExtractorEnd = false;
+				isVideoExtractorEnd = false;
+				mGLThreadFlag = true;
+				mVideoDecoder.start();
+				//mAudioDecoder.start();
+				if (!isRenderToWindowSurface) {
+					//mAudioEncoder.start();
+					mVideoEncoder.start();
+				}
+
+				mGLThread = new Thread(new Runnable() {
+					@Override
+					public void run() {
+						glRunnable();
+					}
+				});
+				mGLThread.start();
+
+				mCodecFlag = true;
+				mDecodeThread = new Thread(new Runnable() {
+					@Override
+					public void run() {
+						//视频处理
+						if (mVideoDecoderTrack >= 0) {
+							Log.d("Mp4Processor", "videoDecodeStep start");
+							while (mCodecFlag && !videoDecodeStep()) ;
+							Log.d("Mp4Processor", "videoDecodeStep end");
+							mGLThreadFlag = false;
+							try {
+								mSem.release();
+								mGLThread.join();
+							} catch (InterruptedException e) {
+								e.printStackTrace();
+							}
+						}
+						//将原视频中的音频复制到新视频中
+						if (mAudioDecoderTrack >= 0 && mVideoEncoderTrack >= 0) {
+							ByteBuffer buffer = ByteBuffer.allocate(1024 * 32);
+							while (mCodecFlag && !audioDecodeStep(buffer)) ;
+							buffer.clear();
+						}
+
+						Log.d("Mp4Processor", "codec thread_finish");
+						mCodecFlag = false;
+						avStop();
+						//todo 判断是用户取消了的情况
+						if (mProgressListener != null) {
+							mProgressListener.onComplete(mOutputPath);
+						}
+					}
+				});
+				mDecodeThread.start();
+				isStarted = true;
+			}
+		}
+		return true;
+	}
+
+	/**
+	 * 等待解码线程执行完毕,异步线程同步等待
+	 */
+	public void waitProcessFinish() throws InterruptedException {
+		if (mDecodeThread != null && mDecodeThread.isAlive()) {
+			mDecodeThread.join();
+		}
+	}
+
+	private boolean audioDecodeStep(ByteBuffer buffer) {
+		boolean isTimeEnd = false;
+		buffer.clear();
+		synchronized (Extractor_LOCK) {
+			mExtractor.selectTrack(mAudioDecoderTrack);
+			int length = mExtractor.readSampleData(buffer, 0);
+			if (length != -1) {
+				int flags = mExtractor.getSampleFlags();
+				mAudioEncoderBufferInfo.size = length;
+				mAudioEncoderBufferInfo.flags = flags;
+				mAudioEncoderBufferInfo.presentationTimeUs = mExtractor.getSampleTime();
+				mAudioEncoderBufferInfo.offset = 0;
+				isTimeEnd = mExtractor.getSampleTime() >= mVideoStopTimeStamp;
+				mMuxer.writeSampleData(mAudioEncoderTrack, buffer, mAudioEncoderBufferInfo);
+			}
+			isAudioExtractorEnd = !mExtractor.advance();
+		}
+		return isAudioExtractorEnd || isTimeEnd;
+	}
+
+	//视频解码到SurfaceTexture上,以供后续处理。返回值为是否是最后一帧视频
+	private boolean videoDecodeStep() {
+		int mInputIndex = mVideoDecoder.dequeueInputBuffer(TIME_OUT);
+		if (mInputIndex >= 0) {
+			ByteBuffer buffer = getInputBuffer(mVideoDecoder, mInputIndex);
+			buffer.clear();
+			synchronized (Extractor_LOCK) {
+				mExtractor.selectTrack(mVideoDecoderTrack);
+				int ret = mExtractor.readSampleData(buffer, 0);
+				if (ret != -1) {
+					mVideoStopTimeStamp = mExtractor.getSampleTime();
+					Log.d("Mp4Processor", "mVideoStopTimeStamp:" + mVideoStopTimeStamp);
+					mVideoDecoder.queueInputBuffer(mInputIndex, 0, ret, mVideoStopTimeStamp, mExtractor.getSampleFlags());
+				}
+				isVideoExtractorEnd = !mExtractor.advance();
+			}
+		}
+		while (true) {
+			int mOutputIndex = mVideoDecoder.dequeueOutputBuffer(mVideoDecoderBufferInfo, TIME_OUT);
+			if (mOutputIndex >= 0) {
+				try {
+					Log.d("Mp4Processor", " mDecodeSem.acquire ");
+					mSem.release();
+					if (!isUserWantToStop) {
+						mDecodeSem.acquire();
+					}
+					Log.d("Mp4Processor", " mDecodeSem.acquire end ");
+				} catch (InterruptedException e) {
+					e.printStackTrace();
+				}
+				mVideoDecoder.releaseOutputBuffer(mOutputIndex, true);
+			} else if (mOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+				//MediaFormat format=mVideoDecoder.getOutputFormat();
+			} else if (mOutputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
+				break;
+			}
+		}
+		return isVideoExtractorEnd || isUserWantToStop;
+	}
+
+	private boolean videoEncodeStep(boolean isEnd) {
+		if (isEnd) {
+			mVideoEncoder.signalEndOfInputStream();
+		}
+		while (true) {
+			int mOutputIndex = mVideoEncoder.dequeueOutputBuffer(mVideoEncoderBufferInfo, TIME_OUT);
+			Log.d("Mp4Processor", "videoEncodeStep-------------------mOutputIndex=" + mOutputIndex + "/" + mVideoEncoderBufferInfo.presentationTimeUs);
+			if (mOutputIndex >= 0) {
+				ByteBuffer buffer = getOutputBuffer(mVideoEncoder, mOutputIndex);
+				if (mVideoEncoderBufferInfo.size > 0) {
+					mMuxer.writeSampleData(mVideoEncoderTrack, buffer, mVideoEncoderBufferInfo);
+				}
+				mVideoEncoder.releaseOutputBuffer(mOutputIndex, false);
+			} else if (mOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+				MediaFormat format = mVideoEncoder.getOutputFormat();
+				Log.d("Mp4Processor", "video format -->" + format.toString());
+				mVideoEncoderTrack = mMuxer.addTrack(format);
+				mMuxer.start();
+				synchronized (MUX_LOCK) {
+					MUX_LOCK.notifyAll();
+				}
+			} else if (mOutputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
+				break;
+			}
+		}
+		return false;
+	}
+
+	private void glRunnable() {
+		mSem = new Semaphore(0);
+		mDecodeSem = new Semaphore(0);
+		mEGLHelper.setSurface(mOutputSurface);
+		boolean ret = mEGLHelper.createGLES(mOutputVideoWidth, mOutputVideoHeight);
+		if (!ret) return;
+		if (mRenderer == null) {
+			mRenderer = new WrapRenderer(null);
+		}
+		mRenderer.setFlag(WrapRenderer.TYPE_MOVE);
+		mRenderer.create();
+		mRenderer.sizeChanged(mOutputVideoWidth, mOutputVideoHeight);
+		while (mGLThreadFlag) {
+			try {
+				Log.d("Mp4Processor", " mSem.acquire ");
+				mSem.acquire();
+				Log.d("Mp4Processor", " mSem.acquire end");
+			} catch (InterruptedException e) {
+				e.printStackTrace();
+			}
+			if (mGLThreadFlag) {
+				mVideoSurfaceTexture.updateTexImage();
+				//todo 带有rotation的视频,还需要处理
+				mVideoSurfaceTexture.getTransformMatrix(mRenderer.getTextureMatrix());
+				mRenderer.draw(mVideoTextureId);
+				mEGLHelper.setPresentationTime(mVideoDecoderBufferInfo.presentationTimeUs * 1000);
+				if (!isRenderToWindowSurface) {
+					videoEncodeStep(false);
+				}
+				mEGLHelper.swapBuffers();
+			}
+			if (mProgressListener != null) {
+				mProgressListener.onProgress(getTotalVideoTime() * 1000L, mVideoDecoderBufferInfo.presentationTimeUs);
+			}
+			mDecodeSem.release();
+		}
+		if (!isRenderToWindowSurface) {
+			videoEncodeStep(true);
+		}
+		mRenderer.destroy();
+		mEGLHelper.destroyGLES();
+	}
+
+	public long getPresentationTime() {
+		return mVideoDecoderBufferInfo.presentationTimeUs * 1000;
+	}
+
+	public long getTotalVideoTime() {
+		return mTotalVideoTime;
+	}
+
+	private SurfaceTexture.OnFrameAvailableListener mFrameAvaListener = new SurfaceTexture.OnFrameAvailableListener() {
+		@Override
+		public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+			Log.e("Mp4Processor", "mSem.release ");
+//            mSem.release();
+		}
+	};
+
+	private void avStop() {
+		if (isStarted) {
+			if (mVideoDecoder != null) {
+				mVideoDecoder.stop();
+				mVideoDecoder.release();
+				mVideoDecoder = null;
+			}
+			if (!isRenderToWindowSurface && mVideoEncoder != null) {
+				mVideoEncoder.stop();
+				mVideoEncoder.release();
+				mVideoEncoder = null;
+			}
+			if (!isRenderToWindowSurface) {
+				if (mMuxer != null && mVideoEncoderTrack >= 0) {
+					try {
+						mMuxer.stop();
+					} catch (IllegalStateException e) {
+						e.printStackTrace();
+					}
+				}
+				if (mMuxer != null) {
+					try {
+						mMuxer.release();
+					} catch (IllegalStateException e) {
+						e.printStackTrace();
+					}
+					mMuxer = null;
+				}
+			}
+			if (mExtractor != null) {
+				mExtractor.release();
+			}
+			isStarted = false;
+			mVideoEncoderTrack = -1;
+			mVideoDecoderTrack = -1;
+			mAudioEncoderTrack = -1;
+			mAudioDecoderTrack = -1;
+		}
+	}
+
+	public boolean stop() throws InterruptedException {
+		synchronized (PROCESS_LOCK) {
+			if (isStarted) {
+				if (mCodecFlag) {
+					mDecodeSem.release();
+					isUserWantToStop = true;
+					if (mDecodeThread != null && mDecodeThread.isAlive()) {
+						Log.d("Mp4Processor", "try to stop decode thread");
+						mDecodeThread.join();
+						Log.d("Mp4Processor", "decode thread stoped");
+					}
+					isUserWantToStop = false;
+				}
+			}
+		}
+		return true;
+	}
+
+	public boolean release() throws InterruptedException {
+		synchronized (PROCESS_LOCK) {
+			if (mCodecFlag) {
+				stop();
+			}
+		}
+		return true;
+	}
+
+	private ByteBuffer getInputBuffer(MediaCodec codec, int index) {
+		if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+			return codec.getInputBuffer(index);
+		} else {
+			return codec.getInputBuffers()[index];
+		}
+	}
+
+	private ByteBuffer getOutputBuffer(MediaCodec codec, int index) {
+		if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+			return codec.getOutputBuffer(index);
+		} else {
+			return codec.getOutputBuffers()[index];
+		}
+	}
+
+	public interface OnProgressListener {
+		void onProgress(long max, long current);
+
+		void onComplete(String path);
+	}
+
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Mp4Processor.java.bak


+ 13 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Renderer.java

@@ -0,0 +1,13 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+public interface Renderer {
+
+    void create();
+
+    void sizeChanged(int width, int height);
+
+    void draw(int texture);
+
+    void destroy();
+
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Renderer.java.bak


+ 180 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/TransUtil.java

@@ -0,0 +1,180 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+
+/**
+ * Created by Yj on 2017/10/30.
+ * 变换的帮助类
+ */
+
+public class TransUtil {
+
+	public static float[] textureCoords;
+	/**
+	 * 获得变换后的数据
+	 * @param tc 				原始数据
+	 * @param transformation	变化类型
+	 * @return
+	 */
+	public static float[] getTransformationCo(float[] tc,final Transformation transformation) {
+		textureCoords = tc;
+		if (transformation.getCropRect() != null) {
+			resolveCrop(transformation.getCropRect().x, transformation.getCropRect().y,
+					transformation.getCropRect().width, transformation.getCropRect().height);
+		} else {
+			resolveCrop(Transformation.FULL_RECT.x, Transformation.FULL_RECT.y,
+					Transformation.FULL_RECT.width, Transformation.FULL_RECT.height);
+		}
+		resolveFlip(transformation.getFlip());
+		resolveRotate(transformation.getRotation());
+		return textureCoords;
+	}
+
+	private static void resolveCrop(float x, float y, float width, float height) {
+		float minX = x;
+		float minY = y;
+		float maxX = minX + width;
+		float maxY = minY + height;
+
+		// left bottom
+		textureCoords[0] = minX;
+		textureCoords[1] = minY;
+		// right bottom
+		textureCoords[2] = maxX;
+		textureCoords[3] = minY;
+		// left top
+		textureCoords[4] = minX;
+		textureCoords[5] = maxY;
+		// right top
+		textureCoords[6] = maxX;
+		textureCoords[7] = maxY;
+	}
+
+	private static void resolveFlip(int flip) {
+		switch (flip) {
+			case Transformation.FLIP_HORIZONTAL:
+				swap(textureCoords, 0, 2);
+				swap(textureCoords, 4, 6);
+				break;
+			case Transformation.FLIP_VERTICAL:
+				swap(textureCoords, 1, 5);
+				swap(textureCoords, 3, 7);
+				break;
+			case Transformation.FLIP_HORIZONTAL_VERTICAL:
+				swap(textureCoords, 0, 2);
+				swap(textureCoords, 4, 6);
+
+				swap(textureCoords, 1, 5);
+				swap(textureCoords, 3, 7);
+				break;
+			case Transformation.FLIP_NONE:
+			default:
+				break;
+		}
+	}
+
+	private static void resolveRotate(int rotation) {
+		float x, y;
+		switch (rotation) {
+			case 90:
+				x = textureCoords[0];
+				y = textureCoords[1];
+				textureCoords[0] = textureCoords[4];
+				textureCoords[1] = textureCoords[5];
+				textureCoords[4] = textureCoords[6];
+				textureCoords[5] = textureCoords[7];
+				textureCoords[6] = textureCoords[2];
+				textureCoords[7] = textureCoords[3];
+				textureCoords[2] = x;
+				textureCoords[3] = y;
+				break;
+			case 180:
+				swap(textureCoords, 0, 6);
+				swap(textureCoords, 1, 7);
+				swap(textureCoords, 2, 4);
+				swap(textureCoords, 3, 5);
+				break;
+			case 270:
+				x = textureCoords[0];
+				y = textureCoords[1];
+				textureCoords[0] = textureCoords[2];
+				textureCoords[1] = textureCoords[3];
+				textureCoords[2] = textureCoords[6];
+				textureCoords[3] = textureCoords[7];
+				textureCoords[6] = textureCoords[4];
+				textureCoords[7] = textureCoords[5];
+				textureCoords[4] = x;
+				textureCoords[5] = y;
+				break;
+			case 0:
+			default:
+				break;
+		}
+	}
+
+//	/**
+//	 * 缩放变换
+//	 * @param vertices 		顶点坐标系
+//	 * @param inputWidth 	输入宽度
+//	 * @param inputHeight 	输入高度
+//	 * @param outputWidth	输出宽度
+//	 * @param outputHeight	输出高度
+//	 * @param scaleType		缩放类型
+//	 * @return
+//	 */
+//	public static float[] resolveScale(float[] vertices,int inputWidth, int inputHeight, int outputWidth, int outputHeight,
+//							  int scaleType) {
+//		if (scaleType == Transformation.SCALE_TYPE_FIT_XY) {
+//			// The default is FIT_XY
+//			return vertices;
+//		}
+//
+//		// Note: scale type need to be implemented by adjusting
+//		// the vertices (not textureCoords).
+//		if (inputWidth * outputHeight == inputHeight * outputWidth) {
+//			// Optional optimization: If input w/h aspect is the same as output's,
+//			// there is no need to adjust vertices at all.
+//			return vertices;
+//		}
+//
+//		float inputAspect = inputWidth / (float) inputHeight;
+//		float outputAspect = outputWidth / (float) outputHeight;
+//
+//		if (scaleType == Transformation.SCALE_TYPE_CENTER_CROP) {
+//			if (inputAspect < outputAspect) {
+//				float heightRatio = outputAspect / inputAspect;
+//				vertices[1] *= heightRatio;
+//				vertices[3] *= heightRatio;
+//				vertices[5] *= heightRatio;
+//				vertices[7] *= heightRatio;
+//			} else {
+//				float widthRatio = inputAspect / outputAspect;
+//				vertices[0] *= widthRatio;
+//				vertices[2] *= widthRatio;
+//				vertices[4] *= widthRatio;
+//				vertices[6] *= widthRatio;
+//			}
+//		} else if (scaleType == Transformation.SCALE_TYPE_CENTER_INSIDE) {
+//			if (inputAspect < outputAspect) {
+//				float widthRatio = inputAspect / outputAspect;
+//				vertices[0] *= widthRatio;
+//				vertices[2] *= widthRatio;
+//				vertices[4] *= widthRatio;
+//				vertices[6] *= widthRatio;
+//			} else {
+//				float heightRatio = outputAspect / inputAspect;
+//				vertices[1] *= heightRatio;
+//				vertices[3] *= heightRatio;
+//				vertices[5] *= heightRatio;
+//				vertices[7] *= heightRatio;
+//			}
+//		}
+//		return vertices;
+//	}
+
+
+	private static void swap(float[] arr, int index1, int index2) {
+		float temp = arr[index1];
+		arr[index1] = arr[index2];
+		arr[index2] = temp;
+	}
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/TransUtil.java.bak


+ 88 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Transformation.java

@@ -0,0 +1,88 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.util.Size;
+
+/**
+ * Created by Yj on 2017/10/30.
+ * 图形旋转,翻转,缩放,裁剪类
+ */
+
+public class Transformation {
+    public static final Rect FULL_RECT = new Rect(0, 0, 1, 1);
+
+    public static final int FLIP_NONE = 2001;
+    public static final int FLIP_HORIZONTAL = 2002;
+    public static final int FLIP_VERTICAL = 2003;
+    public static final int FLIP_HORIZONTAL_VERTICAL = 2004;
+
+    private Rect cropRect = FULL_RECT;
+    private int flip = FLIP_NONE;
+    private int rotation = 0;
+    private Size inputSize;
+    private Size outputSize;
+    private int scaleType = 0;
+
+    public void setCrop(Rect cropRect) {
+        this.cropRect = cropRect;
+    }
+
+    public void setFlip(int flip) {
+        this.flip = flip;
+    }
+
+    public void setRotation(int rotation) {
+        this.rotation = rotation;
+    }
+
+    public void setInputSize(Size inputSize) {
+        this.inputSize = inputSize;
+    }
+
+    public void setOutputSize(Size outputSize) {
+        this.outputSize = outputSize;
+    }
+
+    public Rect getCropRect() {
+        return cropRect;
+    }
+
+    public int getFlip() {
+        return flip;
+    }
+
+    public int getRotation() {
+        return rotation;
+    }
+
+    public Size getInputSize() {
+        return inputSize;
+    }
+
+    public Size getOutputSize() {
+        return outputSize;
+    }
+
+    public int getScaleType() {
+        return scaleType;
+    }
+
+    public void setScale(Size inputSize, Size outputSize, int scaleType) {
+        this.inputSize = inputSize;
+        this.outputSize = outputSize;
+        this.scaleType = scaleType;
+    }
+
+    public static class Rect {
+        final float x;
+        final float y;
+        final float width;
+        final float height;
+
+        public Rect(final float x, final float y, final float width, final float height) {
+            this.x = x;
+            this.y = y;
+            this.width = width;
+            this.height = height;
+        }
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Transformation.java.bak


+ 80 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/WrapRenderer.java

@@ -0,0 +1,80 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+
+import com.joe.camera2recorddemo.OpenGL.Filter.OesFilter;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+/**
+ * Created by aiya on 2017/9/12.
+ */
+
+class WrapRenderer implements Renderer{
+
+    private Renderer mRenderer;
+    private OesFilter mFilter;
+    private FrameBuffer mFrameBuffer;
+
+    public static final int TYPE_MOVE=0;
+    public static final int TYPE_CAMERA=1;
+    public static final int TYPE_SURFACE=2;
+
+    public WrapRenderer(Renderer renderer){
+        this.mRenderer=renderer;
+        mFrameBuffer=new FrameBuffer();
+        mFilter=new OesFilter();
+        if(renderer!=null){
+            MatrixUtils.flip(mFilter.getVertexMatrix(),false,true);
+        }
+    }
+
+    public OesFilter getmFilter() {
+        return mFilter;
+    }
+
+    public void setFlag(int flag){
+        if(flag==TYPE_SURFACE){
+            mFilter.setVertexCo(MatrixUtils.getSurfaceVertexCo());
+        }else if(flag==TYPE_CAMERA){
+            mFilter.setVertexCo(MatrixUtils.getCameraVertexCo());
+        }else if(flag==TYPE_MOVE){
+            mFilter.setVertexCo(MatrixUtils.getMoveVertexCo());
+        }
+    }
+
+    public float[] getTextureMatrix(){
+        return mFilter.getTextureMatrix();
+    }
+
+    @Override
+    public void create() {
+        mFilter.create();
+        if(mRenderer!=null){
+            mRenderer.create();
+        }
+    }
+
+    @Override
+    public void sizeChanged(int width, int height) {
+        mFilter.sizeChanged(width, height);
+        if(mRenderer!=null){
+            mRenderer.sizeChanged(width, height);
+        }
+    }
+
+    @Override
+    public void draw(int texture) {
+        if(mRenderer!=null){
+            mRenderer.draw(mFilter.drawToTexture(texture));
+        }else{
+            mFilter.draw(texture);
+        }
+    }
+
+    @Override
+    public void destroy() {
+        if(mRenderer!=null){
+            mRenderer.destroy();
+        }
+        mFilter.destroy();
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/WrapRenderer.java.bak


+ 150 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraParamUtil.java

@@ -0,0 +1,150 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.util.Log;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+public class CameraParamUtil {
+    private static final String TAG = "JCameraView";
+    private static CameraParamUtil cameraParamUtil = null;
+    private CameraSizeComparator sizeComparator = new CameraSizeComparator();
+
+    private CameraParamUtil() {
+
+    }
+
+    public static CameraParamUtil getInstance() {
+        if (cameraParamUtil == null) {
+            cameraParamUtil = new CameraParamUtil();
+            return cameraParamUtil;
+        } else {
+            return cameraParamUtil;
+        }
+    }
+
+    public Camera.Size getPreviewSize(List<Camera.Size> list, int th, float rate) {
+        Collections.sort(list, sizeComparator);
+        int i = 0;
+        for (Camera.Size s : list) {
+            if ((s.width > th) && equalRate(s, rate)) {
+                Log.i(TAG, "MakeSure Preview :w = " + s.width + " h = " + s.height);
+                break;
+            }
+            i++;
+        }
+        if (i == list.size()) {
+            return getBestSize(list, rate);
+        } else {
+            return list.get(i);
+        }
+    }
+
+    public Camera.Size getPictureSize(List<Camera.Size> list, int th, float rate) {
+        Collections.sort(list, sizeComparator);
+        int i = 0;
+        for (Camera.Size s : list) {
+            if ((s.width > th) && equalRate(s, rate)) {
+                Log.i(TAG, "MakeSure Picture :w = " + s.width + " h = " + s.height);
+                break;
+            }
+            i++;
+        }
+        if (i == list.size()) {
+            return getBestSize(list, rate);
+        } else {
+            return list.get(i);
+        }
+    }
+
+    private Camera.Size getBestSize(List<Camera.Size> list, float rate) {
+        float previewDisparity = 100;
+        int index = 0;
+        for (int i = 0; i < list.size(); i++) {
+            Camera.Size cur = list.get(i);
+            float prop = (float) cur.width / (float) cur.height;
+            if (Math.abs(rate - prop) < previewDisparity) {
+                previewDisparity = Math.abs(rate - prop);
+                index = i;
+            }
+        }
+        return list.get(index);
+    }
+
+
+    private boolean equalRate(Camera.Size s, float rate) {
+        float r = (float) (s.width) / (float) (s.height);
+        return Math.abs(r - rate) <= 0.2;
+    }
+
+    public boolean isSupportedFocusMode(List<String> focusList, String focusMode) {
+        for (int i = 0; i < focusList.size(); i++) {
+            if (focusMode.equals(focusList.get(i))) {
+                Log.i(TAG, "FocusMode supported " + focusMode);
+                return true;
+            }
+        }
+        Log.i(TAG, "FocusMode not supported " + focusMode);
+        return false;
+    }
+
+    public boolean isSupportedPictureFormats(List<Integer> supportedPictureFormats, int jpeg) {
+        for (int i = 0; i < supportedPictureFormats.size(); i++) {
+            if (jpeg == supportedPictureFormats.get(i)) {
+                Log.i(TAG, "Formats supported " + jpeg);
+                return true;
+            }
+        }
+        Log.i(TAG, "Formats not supported " + jpeg);
+        return false;
+    }
+
+    public int getCameraDisplayOrientation(Context context, int cameraId) {
+        Camera.CameraInfo info = new Camera.CameraInfo();
+        Camera.getCameraInfo(cameraId, info);
+        WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+        int rotation = wm.getDefaultDisplay().getRotation();
+        int degrees = 0;
+        switch (rotation) {
+            case Surface.ROTATION_0:
+                degrees = 0;
+                break;
+            case Surface.ROTATION_90:
+                degrees = 90;
+                break;
+            case Surface.ROTATION_180:
+                degrees = 180;
+                break;
+            case Surface.ROTATION_270:
+                degrees = 270;
+                break;
+        }
+        int result;
+        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+            result = (info.orientation + degrees) % 360;
+            result = (360 - result) % 360;   // compensate the mirror
+        } else {
+            // back-facing
+            result = (info.orientation - degrees + 360) % 360;
+        }
+        return result;
+    }
+
+    private class CameraSizeComparator implements Comparator<Camera.Size> {
+        public int compare(Camera.Size lhs, Camera.Size rhs) {
+            if (lhs.width == rhs.width) {
+                return 0;
+            } else if (lhs.width > rhs.width) {
+                return 1;
+            } else {
+                return -1;
+            }
+        }
+
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraParamUtil.java.bak


+ 47 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraUtils.java

@@ -0,0 +1,47 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.app.Activity;
+import android.hardware.Camera;
+import android.view.Surface;
+
+/**
+ * Created by Administrator on 2017/10/10.
+ */
+
+public class CameraUtils {
+    /**
+     * 解决前置摄像头上下颠倒的问题
+     *
+     * @param cameraId
+     * @param camera
+     */
+    public static void setCameraDisplayOrientation(Activity activity, int cameraId, Camera camera) {
+        Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+        Camera.getCameraInfo(cameraId, info);
+        int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
+        int degrees = 0;
+        switch (rotation) {
+            case Surface.ROTATION_0:
+                degrees = 90;
+                break;
+            case Surface.ROTATION_90:
+                degrees = 180;
+                break;
+            case Surface.ROTATION_180:
+                degrees = 270;
+                break;
+            case Surface.ROTATION_270:
+                degrees = 0;
+                break;
+        }
+
+        int result;
+        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+            result = (info.orientation + degrees) % 360;
+            result = (360 - result) % 360;  // compensate the mirror
+        } else {  // back-facing
+            result = (info.orientation - degrees + 360) % 360;
+        }
+        camera.setDisplayOrientation(result);
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraUtils.java.bak


+ 87 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/FormatUtils.java

@@ -0,0 +1,87 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Size;
+
+import com.joe.camera2recorddemo.MediaCodecUtil.TrackUtils;
+
+/**
+ * 获取视频信息类
+ * Created by Administrator on 2017/12/6.
+ */
+
+public class FormatUtils {
+
+    /**
+     * 获取视频尺寸
+     *
+     * @param url 视频地址
+     * @return 返回视频尺寸
+     */
+    public static Size getVideoSize(String url) {
+        int mInputHeight = 0, mInputWidth = 0;
+        MediaExtractor extractor = new MediaExtractor();
+        try {
+            extractor.setDataSource(url);
+            int trackIndex = TrackUtils.selectVideoTrack(extractor);
+            if (trackIndex < 0) {
+                throw new RuntimeException("No video track found in " + url);
+            }
+            extractor.selectTrack(trackIndex);
+            MediaFormat mediaFormat = extractor.getTrackFormat(trackIndex);
+            //获取宽高信息
+            int rotation = mediaFormat.containsKey(MediaFormat.KEY_ROTATION) ? mediaFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
+            if (rotation == 90 || rotation == 270) {
+                mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+                mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+            } else {
+                mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+                mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return new Size(mInputWidth, mInputHeight);
+    }
+
+    /**
+     * 获取视频信息
+     *
+     * @param url
+     * @return
+     */
+    public static VideoFormat getVideoFormat(String url) {
+        VideoFormat videoFormat = new VideoFormat();
+        videoFormat.height = 0;
+        videoFormat.width = 0;
+        MediaExtractor extractor = new MediaExtractor();
+        try {
+            extractor.setDataSource(url);
+            int trackIndex = TrackUtils.selectVideoTrack(extractor);
+            if (trackIndex < 0) {
+                throw new RuntimeException("No video track found in " + url);
+            }
+            extractor.selectTrack(trackIndex);
+            MediaFormat mediaFormat = extractor.getTrackFormat(trackIndex);
+            //获取宽高信息
+            videoFormat.rotation = mediaFormat.containsKey(MediaFormat.KEY_ROTATION) ? mediaFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
+            if (videoFormat.rotation == 90 || videoFormat.rotation == 270) {
+                videoFormat.height = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+                videoFormat.width = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+            } else {
+                videoFormat.width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+                videoFormat.height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return videoFormat;
+    }
+
+    public static class VideoFormat {
+        public int width;
+        public int height;
+        public int rotation;
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/FormatUtils.java.bak


+ 133 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GetPathFromUri4kitkat.java

@@ -0,0 +1,133 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.annotation.SuppressLint;
+import android.content.ContentUris;
+import android.content.Context;
+import android.database.Cursor;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Environment;
+import android.provider.DocumentsContract;
+import android.provider.MediaStore;
+
+public class GetPathFromUri4kitkat {
+
+    /**
+     * 专为Android4.4设计的从Uri获取文件绝对路径,以前的方法已不好使
+     */
+    @SuppressLint("NewApi")
+    public static String getPath(final Context context, final Uri uri) {
+
+        final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
+
+        // DocumentProvider
+        if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
+            // ExternalStorageProvider
+            if (isExternalStorageDocument(uri)) {
+                final String docId = DocumentsContract.getDocumentId(uri);
+                final String[] split = docId.split(":");
+                final String type = split[0];
+
+                if ("primary".equalsIgnoreCase(type)) {
+                    return Environment.getExternalStorageDirectory() + "/" + split[1];
+                }
+
+                // TODO handle non-primary volumes
+            }
+            // DownloadsProvider
+            else if (isDownloadsDocument(uri)) {
+
+                final String id = DocumentsContract.getDocumentId(uri);
+                final Uri contentUri = ContentUris.withAppendedId(
+                        Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
+
+                return getDataColumn(context, contentUri, null, null);
+            }
+            // MediaProvider
+            else if (isMediaDocument(uri)) {
+                final String docId = DocumentsContract.getDocumentId(uri);
+                final String[] split = docId.split(":");
+                final String type = split[0];
+
+                Uri contentUri = null;
+                if ("image".equals(type)) {
+                    contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
+                } else if ("video".equals(type)) {
+                    contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
+                } else if ("audio".equals(type)) {
+                    contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
+                }
+
+                final String selection = "_id=?";
+                final String[] selectionArgs = new String[]{split[1]};
+
+                return getDataColumn(context, contentUri, selection, selectionArgs);
+            }
+        }
+        // MediaStore (and general)
+        else if ("content".equalsIgnoreCase(uri.getScheme())) {
+            return getDataColumn(context, uri, null, null);
+        }
+        // File
+        else if ("file".equalsIgnoreCase(uri.getScheme())) {
+            return uri.getPath();
+        }
+
+        return null;
+    }
+
+    /**
+     * Get the value of the data column for this Uri. This is useful for
+     * MediaStore Uris, and other file-based ContentProviders.
+     *
+     * @param context       The context.
+     * @param uri           The Uri to query.
+     * @param selection     (Optional) Filter used in the query.
+     * @param selectionArgs (Optional) Selection arguments used in the query.
+     * @return The value of the _data column, which is typically a file path.
+     */
+    public static String getDataColumn(Context context, Uri uri, String selection,
+                                       String[] selectionArgs) {
+
+        Cursor cursor = null;
+        final String column = "_data";
+        final String[] projection = {column};
+
+        try {
+            cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,
+                    null);
+            if (cursor != null && cursor.moveToFirst()) {
+                final int column_index = cursor.getColumnIndexOrThrow(column);
+                return cursor.getString(column_index);
+            }
+        } finally {
+            if (cursor != null)
+                cursor.close();
+        }
+        return null;
+    }
+
+    /**
+     * @param uri The Uri to check.
+     * @return Whether the Uri authority is ExternalStorageProvider.
+     */
+    public static boolean isExternalStorageDocument(Uri uri) {
+        return "com.android.externalstorage.documents".equals(uri.getAuthority());
+    }
+
+    /**
+     * @param uri The Uri to check.
+     * @return Whether the Uri authority is DownloadsProvider.
+     */
+    public static boolean isDownloadsDocument(Uri uri) {
+        return "com.android.providers.downloads.documents".equals(uri.getAuthority());
+    }
+
+    /**
+     * @param uri The Uri to check.
+     * @return Whether the Uri authority is MediaProvider.
+     */
+    public static boolean isMediaDocument(Uri uri) {
+        return "com.android.providers.media.documents".equals(uri.getAuthority());
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GetPathFromUri4kitkat.java.bak


+ 106 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GpuUtils.java

@@ -0,0 +1,106 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+import android.util.Log;
+
+import java.io.InputStream;
+
+public enum GpuUtils {
+    ;
+
+    /**
+     * 读取Assets中的文本文件
+     *
+     * @param mRes res
+     * @param path 文件路径
+     * @return 文本内容
+     */
+    public static String readText(Resources mRes, String path) {
+        StringBuilder result = new StringBuilder();
+        try {
+            InputStream is = mRes.getAssets().open(path);
+            int ch;
+            byte[] buffer = new byte[1024];
+            while (-1 != (ch = is.read(buffer))) {
+                result.append(new String(buffer, 0, ch));
+            }
+        } catch (Exception e) {
+            return null;
+        }
+        return result.toString().replaceAll("\\r\\n", "\n");
+    }
+
+    /**
+     * 加载Shader
+     *
+     * @param shaderType Shader类型
+     * @param source     Shader代码
+     * @return shaderId
+     */
+    public static int loadShader(int shaderType, String source) {
+        if (source == null) {
+            glError(1, "Shader source ==null : shaderType =" + shaderType);
+            return 0;
+        }
+        int shader = GLES20.glCreateShader(shaderType);
+        if (0 != shader) {
+            GLES20.glShaderSource(shader, source);
+            GLES20.glCompileShader(shader);
+            int[] compiled = new int[1];
+            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+            if (compiled[0] == 0) {
+                glError(1, "Could not compile shader:" + shaderType);
+                glError(1, "GLES20 Error:" + GLES20.glGetShaderInfoLog(shader));
+                GLES20.glDeleteShader(shader);
+                shader = 0;
+            }
+        }
+        return shader;
+    }
+
+    /**
+     * 通过字符串创建GL程序
+     *
+     * @param vertexSource   顶点着色器
+     * @param fragmentSource 片元着色器
+     * @return programId
+     */
+    public static int createGLProgram(String vertexSource, String fragmentSource) {
+        int vertex = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+        if (vertex == 0) return 0;
+        int fragment = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+        if (fragment == 0) return 0;
+        int program = GLES20.glCreateProgram();
+        if (program != 0) {
+            GLES20.glAttachShader(program, vertex);
+            GLES20.glAttachShader(program, fragment);
+            GLES20.glLinkProgram(program);
+            int[] linkStatus = new int[1];
+            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+            if (linkStatus[0] != GLES20.GL_TRUE) {
+                glError(1, "Could not link program:" + GLES20.glGetProgramInfoLog(program));
+                GLES20.glDeleteProgram(program);
+                program = 0;
+            }
+        }
+        return program;
+    }
+
+    /**
+     * 通过assets中的文件创建GL程序
+     *
+     * @param res      res
+     * @param vertex   顶点作色器路径
+     * @param fragment 片元着色器路径
+     * @return programId
+     */
+    public static int createGLProgramByAssetsFile(Resources res, String vertex, String fragment) {
+        return createGLProgram(readText(res, vertex), readText(res, fragment));
+    }
+
+    private static void glError(int code, Object index) {
+        Log.e("C2D", "glError:" + code + "---" + index);
+    }
+
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GpuUtils.java.bak


+ 195 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/MatrixUtils.java

@@ -0,0 +1,195 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.opengl.Matrix;
+
+public enum MatrixUtils {
+    ;
+    public static final int TYPE_FITXY = 0;
+    public static final int TYPE_CENTERCROP = 1;
+    public static final int TYPE_CENTERINSIDE = 2;
+    public static final int TYPE_FITSTART = 3;
+    public static final int TYPE_FITEND = 4;
+
+    /**
+     * @return the original texture coordinate
+     */
+    public static float[] getOriginalTextureCo() {
+        return new float[]{
+                0.0f, 0.0f,
+                0.0f, 1.0f,
+                1.0f, 0.0f,
+                1.0f, 1.0f
+        };
+    }
+
+    /**
+     * @return the original vertex coordinate
+     */
+    public static float[] getOriginalVertexCo() {
+        return new float[]{
+                -1.0f, 1.0f,
+                -1.0f, -1.0f,
+                1.0f, 1.0f,
+                1.0f, -1.0f
+        };
+    }
+
+    /**
+     * @return the original matrix
+     */
+    public static float[] getOriginalMatrix() {
+        return new float[]{
+                1, 0, 0, 0,
+                0, 1, 0, 0,
+                0, 0, 1, 0,
+                0, 0, 0, 1
+        };
+    }
+
+    /**
+     * calculate appointed matrix by image size and view size
+     *
+     * @param matrix     returns the result
+     * @param type       one of TYPE_FITEND,TYPE_CENTERCROP,TYPE_CENTERINSIDE,TYPE_FITSTART,TYPE_FITXY
+     * @param imgWidth   image width
+     * @param imgHeight  image height
+     * @param viewWidth  view width
+     * @param viewHeight view height
+     */
+    public static void getMatrix(float[] matrix, int type, int imgWidth, int imgHeight, int viewWidth,
+                                 int viewHeight) {
+        if (imgHeight > 0 && imgWidth > 0 && viewWidth > 0 && viewHeight > 0) {
+            float[] projection = new float[16];
+            float[] camera = new float[16];
+            if (type == TYPE_FITXY) {
+                Matrix.orthoM(projection, 0, -1, 1, -1, 1, 1, 3);
+                Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
+                Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
+            }
+            float sWhView = (float) viewWidth / viewHeight;
+            float sWhImg = (float) imgWidth / imgHeight;
+            if (sWhImg > sWhView) {
+                switch (type) {
+                    case TYPE_CENTERCROP:
+                        Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3);
+                        break;
+                    case TYPE_CENTERINSIDE:
+                        Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3);
+                        break;
+                    case TYPE_FITSTART:
+                        Matrix.orthoM(projection, 0, -1, 1, 1 - 2 * sWhImg / sWhView, 1, 1, 3);
+                        break;
+                    case TYPE_FITEND:
+                        Matrix.orthoM(projection, 0, -1, 1, -1, 2 * sWhImg / sWhView - 1, 1, 3);
+                        break;
+                }
+            } else {
+                switch (type) {
+                    case TYPE_CENTERCROP:
+                        Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3);
+                        break;
+                    case TYPE_CENTERINSIDE:
+                        Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3);
+                        break;
+                    case TYPE_FITSTART:
+                        Matrix.orthoM(projection, 0, -1, 2 * sWhView / sWhImg - 1, -1, 1, 1, 3);
+                        break;
+                    case TYPE_FITEND:
+                        Matrix.orthoM(projection, 0, 1 - 2 * sWhView / sWhImg, 1, -1, 1, 1, 3);
+                        break;
+                }
+            }
+            Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
+            Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
+        }
+    }
+
+    /**
+     * @param m 待翻转的4X4矩阵
+     * @param x X轴翻转
+     * @param y Y轴翻转
+     * @return
+     */
+    public static float[] flip(float[] m, boolean x, boolean y) {
+        if (x || y) {
+            Matrix.scaleM(m, 0, x ? -1 : 1, y ? -1 : 1, 1);
+        }
+        return m;
+    }
+
+    /**
+     * 旋转矩阵
+     *
+     * @param m 待旋转的4X4矩阵
+     * @param r 旋转角度
+     * @return
+     */
+    public static float[] rotation(float[] m, float r) {
+        Matrix.rotateM(m, 0, r, 0.0f, 0.0f, 1.0f);
+        return m;
+    }
+
+    public static float[] crop(float[] m, float x, float y, float width, float height) {
+        float minX = x;
+        float minY = y;
+        float maxX = minX + width;
+        float maxY = minY + height;
+
+        // left bottom
+        m[0] = minX;
+        m[1] = minY;
+        // right bottom
+        m[2] = maxX;
+        m[3] = minY;
+        // left top
+        m[4] = minX;
+        m[5] = maxY;
+        // right top
+        m[6] = maxX;
+        m[7] = maxY;
+
+        return m;
+    }
+
+    /**
+     * 获取Surface的顶点坐标系
+     *
+     * @return
+     */
+    public static float[] getSurfaceVertexCo() {
+        return new float[]{
+                -1.0f, -1.0f,
+                -1.0f, 1.0f,
+                1.0f, -1.0f,
+                1.0f, 1.0f,
+        };
+    }
+
+    /**
+     * 获取Camera的顶点坐标系
+     *
+     * @return
+     */
+    public static float[] getCameraVertexCo() {
+        return new float[]{
+                -1.0f, 1.0f,
+                1.0f, 1.0f,
+                -1.0f, -1.0f,
+                1.0f, -1.0f,
+        };
+    }
+
+    /**
+     * 获取本地视频处理的顶点坐标系
+     *
+     * @return
+     */
+    public static float[] getMoveVertexCo() {
+        return new float[]{
+                1.0f, -1.0f,
+                -1.0f, -1.0f,
+                1.0f, 1.0f,
+                -1.0f, 1.0f,
+        };
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/MatrixUtils.java.bak


+ 36 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/UriUtils.java

@@ -0,0 +1,36 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.content.ContentResolver;
+import android.content.Context;
+import android.net.Uri;
+import android.util.Log;
+
+/**
+ * Created by Yj on 2017/10/16.
+ */
+
+public class UriUtils {
+
+    /**
+     * 获取URI的绝对路径
+     *
+     * @param context
+     * @param uri
+     * @return
+     */
+    public static String getRealFilePath(Context context, final Uri uri) {
+        if (null == uri) return null;
+        final String scheme = uri.getScheme();
+        String data = null;
+        if (scheme == null) {
+            Log.e("UriUtils", "scheme is null");
+            data = uri.getPath();
+        } else if (ContentResolver.SCHEME_FILE.equals(scheme)) {
+            data = uri.getPath();
+            Log.e("UriUtils", "SCHEME_FILE");
+        } else if (ContentResolver.SCHEME_CONTENT.equals(scheme)) {
+            data = GetPathFromUri4kitkat.getPath(context, uri);
+        }
+        return data;
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/UriUtils.java.bak


+ 228 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/View/CameraRecordView.java

@@ -0,0 +1,228 @@
+package com.joe.camera2recorddemo.View;
+
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.util.AttributeSet;
+import android.view.Surface;
+import android.view.TextureView;
+
+import com.joe.camera2recorddemo.Entity.SizeInfo;
+import com.joe.camera2recorddemo.OpenGL.CameraRecorder;
+import com.joe.camera2recorddemo.OpenGL.Filter.Mp4EditFilter;
+import com.joe.camera2recorddemo.OpenGL.Renderer;
+import com.joe.camera2recorddemo.Utils.CameraParamUtil;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+import java.io.IOException;
+
+public class CameraRecordView extends TextureView implements Renderer {
+
+    private static final int STATE_INIT = 0;
+    private static final int STATE_RECORDING = 1;
+    private static final int STATE_PAUSE = 2;
+    private int mRecorderState;
+    // 摄像头
+    private Camera mCamera;
+    private Camera.Parameters mParams;
+    private float screenProp;
+    private int mCurrentCameraState = 0;
+
+    private CameraRecorder mCameraRecord;
+    private Mp4EditFilter mFilter;
+    private int mCurrentFilterIndex;// 当前滤镜
+
+    private int mCameraWidth;
+    private int mCameraHeight;
+    // private SizeInfo recordSize;
+
+    public CameraRecordView(Context context) {
+        this(context, null);
+    }
+
+    public CameraRecordView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+        init();
+    }
+
+    private void init() {
+        mFilter = new Mp4EditFilter(getResources());
+        mCameraRecord = new CameraRecorder();
+
+        setSurfaceTextureListener(new SurfaceTextureListener() {
+            @Override
+            public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
+                mCamera = Camera.open(0);
+                screenProp = (float) height / (float) width;
+                initCamera(screenProp);
+
+                mCameraRecord.setOutputSurface(new Surface(surface));
+                Camera.Size videoSize;
+                if (mParams.getSupportedVideoSizes() == null) {
+                    videoSize = CameraParamUtil.getInstance().getPreviewSize(mParams.getSupportedPreviewSizes(), 600,
+                            screenProp);
+                } else {
+                    videoSize = CameraParamUtil.getInstance().getPreviewSize(mParams.getSupportedVideoSizes(), 600,
+                            screenProp);
+                }
+                SizeInfo sizeInfo;
+                if (videoSize.width == videoSize.height) {
+                    sizeInfo = new SizeInfo(720, 720);
+                } else {
+                    sizeInfo = new SizeInfo(videoSize.height, videoSize.width);
+                }
+                mCameraRecord.setOutputSize(sizeInfo);
+                mCameraRecord.setRenderer(CameraRecordView.this);
+                mCameraRecord.setPreviewSize(width, height);
+                mCameraRecord.startPreview();
+            }
+
+            @Override
+            public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
+                mCameraRecord.setPreviewSize(width, height);
+            }
+
+            @Override
+            public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+                // 停止录制
+                if (mRecorderState == STATE_RECORDING) {
+                    try {
+                        stopRecord();
+                    } catch (InterruptedException e) {
+                        e.printStackTrace();
+                    }
+                }
+
+                stopPreview();
+                return true;
+            }
+
+            @Override
+            public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+
+            }
+        });
+    }
+
+    @Override
+    public void create() {
+        try {
+            mCamera.setPreviewTexture(mCameraRecord.createInputSurfaceTexture());
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        Camera.Size mSize = mCamera.getParameters().getPreviewSize();
+        mCameraWidth = mSize.height;
+        mCameraHeight = mSize.width;
+
+        mCamera.startPreview();
+        mFilter.create();
+    }
+
+    @Override
+    public void sizeChanged(int width, int height) {
+        mFilter.sizeChanged(width, height);
+        MatrixUtils.getMatrix(mFilter.getVertexMatrix(), MatrixUtils.TYPE_CENTERCROP,
+                mCameraWidth, mCameraHeight, width, height);
+        MatrixUtils.flip(mFilter.getVertexMatrix(), false, true);
+    }
+
+    @Override
+    public void draw(int texture) {
+        mFilter.draw(texture);
+    }
+
+    @Override
+    public void destroy() {
+        mFilter.destroy();
+    }
+
+    public void initCamera(float screenProp) {
+        if (mCamera != null) {
+            mParams = mCamera.getParameters();
+            Camera.Size previewSize = CameraParamUtil.getInstance().getPreviewSize(mParams
+                    .getSupportedPreviewSizes(), 1000, screenProp);
+            Camera.Size pictureSize = CameraParamUtil.getInstance().getPictureSize(mParams
+                    .getSupportedPictureSizes(), 1200, screenProp);
+            mParams.setPreviewSize(previewSize.width, previewSize.height);
+            mParams.setPictureSize(pictureSize.width, pictureSize.height);
+            if (CameraParamUtil.getInstance().isSupportedFocusMode(
+                    mParams.getSupportedFocusModes(),
+                    Camera.Parameters.FOCUS_MODE_AUTO)) {
+                mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
+            }
+
+            if (CameraParamUtil.getInstance().isSupportedPictureFormats(mParams.getSupportedPictureFormats(),
+                    ImageFormat.JPEG)) {
+                mParams.setPictureFormat(ImageFormat.JPEG);
+                mParams.setJpegQuality(100);
+            }
+            mCamera.setParameters(mParams);
+            mParams = mCamera.getParameters();
+        }
+    }
+
+    public void switchFilter(int index) {
+        if (mCurrentFilterIndex != index) {
+            mCurrentFilterIndex = index;
+            mFilter.getChooseFilter().setChangeType(mCurrentFilterIndex);
+        }
+    }
+
+    public void switchCamera() {
+        if (Camera.getNumberOfCameras() > 1) {
+            stopPreview();
+            mCurrentCameraState += 1;
+            if (mCurrentCameraState > Camera.getNumberOfCameras() - 1)
+                mCurrentCameraState = 0;
+            mCamera = Camera.open(mCurrentCameraState);
+            initCamera(screenProp);// 切换摄像头之后需要重新setParameters
+/*
+            if (mParams == null) {
+                initCamera(screenProp);
+            } else {
+                // 部分机型 java.lang.RuntimeException: setParameters failed
+               mCamera.setParameters(mParams);
+            }
+*/
+            mCameraRecord.startPreview();
+        }
+    }
+
+    public void stopPreview() {
+        try {
+            mCameraRecord.stopPreview();
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+        if (mCamera != null) {
+            mCamera.stopPreview();
+            mCamera.release();
+            mCamera = null;
+        }
+    }
+
+    /**
+     * 开始录制
+     */
+    public void startRecord(String filePath) throws IOException {
+        mCameraRecord.setOutputPath(filePath);
+        mCameraRecord.startRecord();
+
+        mRecorderState = STATE_RECORDING;
+    }
+
+    /**
+     * 停止录制
+     */
+    public void stopRecord() throws InterruptedException {
+        mCameraRecord.stopRecord();
+
+        mRecorderState = STATE_INIT;
+    }
+
+    public Camera getCamera() {
+        return mCamera;
+    }
+}

BIN
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/View/CameraRecordView.java.bak


+ 5 - 0
OpenGLlibrary/src/main/res/drawable/btn_shutter_background.xml

@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<selector xmlns:android="http://schemas.android.com/apk/res/android" >
+    <item android:state_pressed="true" android:drawable="@mipmap/btn_camera_all_click"/>
+	<item android:drawable="@mipmap/btn_camera_all"/>
+</selector>

BIN
OpenGLlibrary/src/main/res/drawable/change_camera.png


+ 0 - 0
OpenGLlibrary/src/main/res/drawable/editchoose_backgroud.xml


Some files were not shown because too many files changed in this diff