jixionghui пре 4 година
комит
68fd1a04b7
100 измењених фајлова са 6639 додато и 0 уклоњено
  1. 59 0
      .gitignore
  2. 7 0
      .svnignore
  3. 1 0
      OpenGLlibrary/.gitignore
  4. 26 0
      OpenGLlibrary/build.gradle
  5. 25 0
      OpenGLlibrary/proguard-rules.pro
  6. 28 0
      OpenGLlibrary/src/main/AndroidManifest.xml
  7. 6 0
      OpenGLlibrary/src/main/assets/shader/base.frag
  8. 12 0
      OpenGLlibrary/src/main/assets/shader/base.vert
  9. 6 0
      OpenGLlibrary/src/main/assets/shader/base_fragment.sh
  10. 10 0
      OpenGLlibrary/src/main/assets/shader/base_vertex.sh
  11. 62 0
      OpenGLlibrary/src/main/assets/shader/beauty/beauty.frag
  12. 35 0
      OpenGLlibrary/src/main/assets/shader/beauty/beauty.vert
  13. 147 0
      OpenGLlibrary/src/main/assets/shader/choose/choose.frag
  14. 42 0
      OpenGLlibrary/src/main/assets/shader/choose/choose.vert
  15. 9 0
      OpenGLlibrary/src/main/assets/shader/color/gray_fragment.frag
  16. 8 0
      OpenGLlibrary/src/main/assets/shader/mh/brightness.frag
  17. 8 0
      OpenGLlibrary/src/main/assets/shader/mh/contrast.frag
  18. 11 0
      OpenGLlibrary/src/main/assets/shader/mh/saturation.frag
  19. 15 0
      OpenGLlibrary/src/main/assets/shader/mh/vignette.frag
  20. 7 0
      OpenGLlibrary/src/main/assets/shader/oes.frag
  21. 12 0
      OpenGLlibrary/src/main/assets/shader/oes.vert
  22. 7 0
      OpenGLlibrary/src/main/assets/shader/oes/default_fragment.sh
  23. 10 0
      OpenGLlibrary/src/main/assets/shader/oes/default_vertex.sh
  24. 7 0
      OpenGLlibrary/src/main/assets/shader/oes_base_fragment.sh
  25. 10 0
      OpenGLlibrary/src/main/assets/shader/oes_base_vertex.sh
  26. 10 0
      OpenGLlibrary/src/main/assets/shader/pkm_mul.frag
  27. 9 0
      OpenGLlibrary/src/main/assets/shader/pkm_mul.vert
  28. 28 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/FilterInfo.java
  29. 35 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/SizeInfo.java
  30. 50 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/TrackUtils.java
  31. 206 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/VideoDecode.java
  32. 424 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/CameraRecorder.java
  33. 196 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/EGLHelper.java
  34. 41 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BaseFilter.java
  35. 75 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BeautyFilter.java
  36. 120 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/ChooseFilter.java
  37. 43 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DistortionFilter.java
  38. 13 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DrawFilter.java
  39. 216 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Filter.java
  40. 113 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/GroupFilter.java
  41. 34 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Mp4EditFilter.java
  42. 48 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/OesFilter.java
  43. 87 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/FrameBuffer.java
  44. 415 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MP4Edior.java
  45. 59 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/AdjustFilter.java
  46. 35 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/BrightnessFilter.java
  47. 35 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/ContrastFilter.java
  48. 35 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/SaturationFilter.java
  49. 57 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/VignetteFilter.java
  50. 582 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Mp4Processor.java
  51. 13 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Renderer.java
  52. 180 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/TransUtil.java
  53. 88 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Transformation.java
  54. 80 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/WrapRenderer.java
  55. 150 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraParamUtil.java
  56. 47 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraUtils.java
  57. 87 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/FormatUtils.java
  58. 133 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GetPathFromUri4kitkat.java
  59. 106 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GpuUtils.java
  60. 195 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/MatrixUtils.java
  61. 36 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/UriUtils.java
  62. 228 0
      OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/View/CameraRecordView.java
  63. 5 0
      OpenGLlibrary/src/main/res/drawable/btn_shutter_background.xml
  64. BIN
      OpenGLlibrary/src/main/res/drawable/change_camera.png
  65. 9 0
      OpenGLlibrary/src/main/res/drawable/editchoose_backgroud.xml
  66. 9 0
      OpenGLlibrary/src/main/res/drawable/filterchoose_backgroud.xml
  67. BIN
      OpenGLlibrary/src/main/res/drawable/mp4_crop.png
  68. BIN
      OpenGLlibrary/src/main/res/drawable/mp4_flip.png
  69. BIN
      OpenGLlibrary/src/main/res/drawable/mp4_rotation.png
  70. BIN
      OpenGLlibrary/src/main/res/drawable/mp4_save.png
  71. BIN
      OpenGLlibrary/src/main/res/drawable/oneto.png
  72. BIN
      OpenGLlibrary/src/main/res/drawable/record_start.png
  73. BIN
      OpenGLlibrary/src/main/res/drawable/record_stop.png
  74. BIN
      OpenGLlibrary/src/main/res/mipmap-xxhdpi/btn_camera_all.png
  75. BIN
      OpenGLlibrary/src/main/res/mipmap-xxhdpi/btn_camera_all_click.png
  76. 33 0
      OpenGLlibrary/src/main/res/values/attrs.xml
  77. 6 0
      OpenGLlibrary/src/main/res/values/colors.xml
  78. 11 0
      OpenGLlibrary/src/main/res/values/strings.xml
  79. 11 0
      OpenGLlibrary/src/main/res/values/styles.xml
  80. 0 0
      README.md
  81. 1 0
      YZxing-lib/.gitignore
  82. 32 0
      YZxing-lib/build.gradle
  83. 25 0
      YZxing-lib/proguard-rules.pro
  84. 27 0
      YZxing-lib/src/androidTest/java/com/example/qrcode/ExampleInstrumentedTest.java
  85. 25 0
      YZxing-lib/src/main/AndroidManifest.xml
  86. 110 0
      YZxing-lib/src/main/java/com/example/qrcode/BeepManager.java
  87. 29 0
      YZxing-lib/src/main/java/com/example/qrcode/Constant.java
  88. 345 0
      YZxing-lib/src/main/java/com/example/qrcode/ScannerActivity.java
  89. 27 0
      YZxing-lib/src/main/java/com/example/qrcode/ScannerFragment.java
  90. 78 0
      YZxing-lib/src/main/java/com/example/qrcode/SetActionBarActivity.java
  91. 102 0
      YZxing-lib/src/main/java/com/example/qrcode/ShowResultActivity.java
  92. 46 0
      YZxing-lib/src/main/java/com/example/qrcode/callback/PreviewCallback.java
  93. 121 0
      YZxing-lib/src/main/java/com/example/qrcode/camera/AutoFocusManager.java
  94. 115 0
      YZxing-lib/src/main/java/com/example/qrcode/camera/CameraConfigurationManager.java
  95. 13 0
      YZxing-lib/src/main/java/com/example/qrcode/camera/CameraFacing.java
  96. 301 0
      YZxing-lib/src/main/java/com/example/qrcode/camera/CameraManager.java
  97. 40 0
      YZxing-lib/src/main/java/com/example/qrcode/camera/OpenCamera.java
  98. 88 0
      YZxing-lib/src/main/java/com/example/qrcode/camera/OpenCameraInterface.java
  99. 151 0
      YZxing-lib/src/main/java/com/example/qrcode/decode/DecodeHandler.java
  100. 0 0
      YZxing-lib/src/main/java/com/example/qrcode/decode/DecodeThread.java

+ 59 - 0
.gitignore

@@ -0,0 +1,59 @@
+# IntelliJ IDEA(IDE相关的设置) 
+.idea 
+*.iml 
+*.ipr 
+*.iws
+
+# Built application files(构建的app文件)
+*.apk
+*.ap_
+
+# Files for the ART/Dalvik VM(dex包文件) 
+*.dex
+
+# Java class files(java编译的class字节码文件) 
+*.class
+
+# Generated files(工程自动生成的文件) 
+bin/
+gen/
+out/
+lib/
+
+# Eclipse project files(使用eclipse工程的一些文件) 
+.classpath 
+.project 
+.settings/ 
+eclipsebin 
+.metadata/
+
+# Gradle files(gradle相关的) 
+.gradle/
+gradlew.bat 
+build/
+
+# Local configuration file (sdk path, etc)(本地的配置文件:sdk的路径等)
+local.properties
+reports 
+/captures 
+jacoco.exec
+
+Mac system files(mac系统下的文件) 
+.DS_Store
+
+# Proguard folder generated by Eclipse(使用eclipse工程生成的Proguard混淆文件夹) 
+proguard/
+
+# Log Files(log文件) 
+*.log
+
+# Android Studio Navigation editor temp files(AS导航编辑临时文件) 
+.navigation/
+
+# Android Studio captures folder
+captures/
+
+# External native build folder generated in Android Studio 2.2 and later
+.externalNativeBuild
+# Google Services (e.g. APIs or Firebase)
+google-services.json

+ 7 - 0
.svnignore

@@ -0,0 +1,7 @@
+build
+.idea
+*.iml
+.gradle
+local.properties
+release
+

+ 1 - 0
OpenGLlibrary/.gitignore

@@ -0,0 +1 @@
+/build

+ 26 - 0
OpenGLlibrary/build.gradle

@@ -0,0 +1,26 @@
+apply plugin: 'com.android.library'
+
+android {
+    compileSdkVersion compile_version
+    defaultConfig {
+        minSdkVersion min_version
+        targetSdkVersion target_version
+        versionCode 1
+        versionName "1.0"
+    }
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+        }
+    }
+}
+
+dependencies {
+    api fileTree(include: ['*.jar'], dir: 'libs')
+    api 'androidx.appcompat:appcompat:1.0.0'
+    api 'androidx.recyclerview:recyclerview:1.0.0'
+    api 'androidx.constraintlayout:constraintlayout:1.1.3'
+//    api 'com.android.support:support-v4:25.3.1'
+//    api 'com.android.support:support-v13:25.3.1'
+}

+ 25 - 0
OpenGLlibrary/proguard-rules.pro

@@ -0,0 +1,25 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in C:\Users\Administrator\AppData\Local\Android\Sdk/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile

+ 28 - 0
OpenGLlibrary/src/main/AndroidManifest.xml

@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+          package="com.joe.camera2recorddemo">
+
+    <uses-feature
+        android:glEsVersion="0x00020000"
+        android:required="true"/>
+
+    <uses-permission android:name="android.permission.CAMERA"/>
+    <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
+    <uses-permission android:name="android.permission.WAKE_LOCK"/>
+
+    <!--<application-->
+        <!--android:allowBackup="true"-->
+        <!--android:icon="@mipmap/ic_launcher"-->
+        <!--android:label="@string/app_name"-->
+        <!--android:roundIcon="@mipmap/ic_launcher_round"-->
+        <!--android:supportsRtl="true"-->
+        <!--android:theme="@style/AppTheme">-->
+        <!--<activity-->
+            <!--android:name=".Activity.MP4Activity"-->
+            <!--android:theme="@style/Theme.AppCompat.NoActionBar">-->
+        <!--</activity>-->
+    <!--</application>-->
+
+</manifest>

+ 6 - 0
OpenGLlibrary/src/main/assets/shader/base.frag

@@ -0,0 +1,6 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+void main() {
+    gl_FragColor = texture2D( uTexture, vTextureCo);
+}

+ 12 - 0
OpenGLlibrary/src/main/assets/shader/base.vert

@@ -0,0 +1,12 @@
+attribute vec4 aVertexCo;
+attribute vec2 aTextureCo;
+
+uniform mat4 uVertexMatrix;
+uniform mat4 uTextureMatrix;
+
+varying vec2 vTextureCo;
+
+void main(){
+    gl_Position = uVertexMatrix*aVertexCo;
+    vTextureCo = aTextureCo.xy;
+}

+ 6 - 0
OpenGLlibrary/src/main/assets/shader/base_fragment.sh

@@ -0,0 +1,6 @@
+precision mediump float;
+varying vec2 textureCoordinate;
+uniform sampler2D vTexture;
+void main() {
+    gl_FragColor = texture2D( vTexture, textureCoordinate );
+}

+ 10 - 0
OpenGLlibrary/src/main/assets/shader/base_vertex.sh

@@ -0,0 +1,10 @@
+attribute vec4 vPosition;
+attribute vec2 vCoord;
+uniform mat4 vMatrix;
+
+varying vec2 textureCoordinate;
+
+void main(){
+    gl_Position = vMatrix*vPosition;
+    textureCoordinate = vCoord;
+}

+ 62 - 0
OpenGLlibrary/src/main/assets/shader/beauty/beauty.frag

@@ -0,0 +1,62 @@
+precision highp float;
+precision highp int;
+uniform sampler2D uTexture;
+uniform int uIternum;
+uniform float uACoef; //参数
+uniform float uMixCoef; //混合系数
+varying highp vec2 vTextureCo;
+varying highp vec2 vBlurCoord1s[14];
+const float distanceNormalizationFactor = 4.0;
+const mat3 saturateMatrix = mat3(1.1102,-0.0598,-0.061,-0.0774,1.0826,-0.1186,-0.0228,-0.0228,1.1772);
+
+void main() {
+
+    vec3 centralColor;
+    float central;
+    float gaussianWeightTotal;
+    float sum;
+    float sampleColor;
+    float distanceFromCentralColor;
+    float gaussianWeight;
+
+    central = texture2D( uTexture, vTextureCo ).g;
+    gaussianWeightTotal = 0.2;
+    sum = central * 0.2;
+
+    for (int i = 0; i < 6; i++) {
+        sampleColor = texture2D( uTexture, vBlurCoord1s[i] ).g;
+        distanceFromCentralColor = min( abs( central - sampleColor ) * distanceNormalizationFactor, 1.0 );
+        gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
+        gaussianWeightTotal += gaussianWeight;
+        sum += sampleColor * gaussianWeight;
+    }
+    for (int i = 6; i < 14; i++) {
+        sampleColor = texture2D( uTexture, vBlurCoord1s[i] ).g;
+        distanceFromCentralColor = min( abs( central - sampleColor ) * distanceNormalizationFactor, 1.0 );
+        gaussianWeight = 0.1 * (1.0 - distanceFromCentralColor);
+        gaussianWeightTotal += gaussianWeight;
+        sum += sampleColor * gaussianWeight;
+    }
+
+    sum = sum / gaussianWeightTotal;
+    centralColor = texture2D( uTexture, vTextureCo ).rgb;
+    sampleColor = centralColor.g - sum + 0.5;
+    for (int i = 0; i < uIternum; ++i) {
+        if (sampleColor <= 0.5) {
+            sampleColor = sampleColor * sampleColor * 2.0;
+        }
+        else {
+            sampleColor = 1.0 - ((1.0 - sampleColor)*(1.0 - sampleColor) * 2.0);
+        }
+    }
+
+    float aa = 1.0 + pow( centralColor.g, 0.3 )*uACoef;
+    vec3 smoothColor = centralColor*aa - vec3( sampleColor )*(aa - 1.0);
+    smoothColor = clamp( smoothColor, vec3( 0.0 ), vec3( 1.0 ) );
+    smoothColor = mix( centralColor, smoothColor, pow( centralColor.g, 0.33 ) );
+    smoothColor = mix( centralColor, smoothColor, pow( centralColor.g, uMixCoef ) );
+    gl_FragColor = vec4( pow( smoothColor, vec3( 0.96 ) ), 1.0 );
+    vec3 satcolor = gl_FragColor.rgb * saturateMatrix;
+    gl_FragColor.rgb = mix( gl_FragColor.rgb, satcolor, 0.23 );
+
+}

+ 35 - 0
OpenGLlibrary/src/main/assets/shader/beauty/beauty.vert

@@ -0,0 +1,35 @@
+attribute vec4 aVertexCo;
+attribute vec2 aTextureCo;
+varying vec2 vTextureCo;
+varying vec2 vBlurCoord1s[14];
+uniform float uWidth;
+uniform float uHeight;
+uniform mat4 uVertexMatrix;
+uniform mat4 uTextureMatrix;
+void main()
+{
+    gl_Position = uVertexMatrix*aVertexCo;
+    vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;
+
+    highp float mul_x = 2.0 / uWidth;
+    highp float mul_y = 2.0 / uHeight;
+
+    vBlurCoord1s[0] = vTextureCo + vec2( 0.0 * mul_x, -10.0 * mul_y );
+    vBlurCoord1s[1] = vTextureCo + vec2( 8.0 * mul_x, -5.0 * mul_y );
+    vBlurCoord1s[2] = vTextureCo + vec2( 8.0 * mul_x, 5.0 * mul_y );
+    vBlurCoord1s[3] = aTextureCo + vec2( 0.0 * mul_x, 10.0 * mul_y );
+    vBlurCoord1s[4] = aTextureCo + vec2( -8.0 * mul_x, 5.0 * mul_y );
+    vBlurCoord1s[5] = aTextureCo + vec2( -8.0 * mul_x, -5.0 * mul_y );
+
+    mul_x = 1.2 / uWidth;
+    mul_y = 1.2 / uHeight;
+
+    vBlurCoord1s[6] = aTextureCo + vec2( 0.0 * mul_x, -6.0 * mul_y );
+    vBlurCoord1s[7] = aTextureCo + vec2( -4.0 * mul_x, -4.0 * mul_y );
+    vBlurCoord1s[8] = aTextureCo + vec2( -6.0 * mul_x, 0.0 * mul_y );
+    vBlurCoord1s[9] = aTextureCo + vec2( -4.0 * mul_x, 4.0 * mul_y );
+    vBlurCoord1s[10] = aTextureCo + vec2( 0.0 * mul_x, 6.0 * mul_y );
+    vBlurCoord1s[11] = aTextureCo + vec2( 4.0 * mul_x, 4.0 * mul_y );
+    vBlurCoord1s[12] = aTextureCo + vec2( 6.0 * mul_x, 0.0 * mul_y );
+    vBlurCoord1s[13] = aTextureCo + vec2( 4.0 * mul_x, -4.0 * mul_y );
+}

+ 147 - 0
OpenGLlibrary/src/main/assets/shader/choose/choose.frag

@@ -0,0 +1,147 @@
+precision highp float;
+
+varying vec2 vTextureCo;
+varying vec2 leftTextureCoordinate;
+varying vec2 rightTextureCoordinate;
+
+varying vec2 topTextureCoordinate;
+varying vec2 topLeftTextureCoordinate;
+varying vec2 topRightTextureCoordinate;
+
+varying vec2 bottomTextureCoordinate;
+varying vec2 bottomLeftTextureCoordinate;
+varying vec2 bottomRightTextureCoordinate;
+
+uniform sampler2D uTexture;
+
+uniform int vChangeType;
+uniform highp float intensity;
+uniform float uWidth;
+uniform float uHeight;
+
+const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);//灰度滤镜
+const highp vec3 COOL = vec3(0.0, 0.0, 0.1);//冷色调
+const highp vec3 WARM = vec3(0.1, 0.1, 0.0);//暖色调
+const vec2 texSize = vec2(1920,1080);//浮雕参数
+const lowp float intensityone = 1.0;
+const lowp mat4 colorMatrix = mat4(0.3588, 0.7044, 0.1368, 0.0,0.2990, 0.5870, 0.1140, 0.0,0.2392, 0.4696, 0.0912, 0.0,0, 0, 0, 1.0);
+const highp float threshold = 0.2;
+const highp float quantizationLevels = 10.0;
+const mediump mat3 convolutionMatrix = mat3(-1.0, 0.0, 1.0,-2.0, 0.0, 2.0,-1.0, 0.0, 1.0);
+
+const float stepcv=1.;
+const mat3 GX=mat3(-1.,0., +1., -2., 0., +2., -1., 0., +1.);
+const mat3 GY=mat3(-1., -2., -1., 0., 0., 0., +1., +2., +1.);
+
+float colorR(vec2 center,float shiftX,float shiftY){
+    return texture2D(uTexture,vec2(vTextureCo.x+shiftX/uWidth,vTextureCo.y+shiftY/uHeight)).r;
+}
+
+void main()
+{
+    vec4 textureColor = texture2D(uTexture, vTextureCo);
+    if(vChangeType == 0){
+        gl_FragColor = textureColor;
+    }
+    else if(vChangeType == 1){
+        vec4 nColor=texture2D(uTexture,vTextureCo);
+        vec4 deltaColor=nColor+vec4(COOL,0.0);
+        gl_FragColor=deltaColor;
+    }
+    else if(vChangeType == 2){
+        vec4 nColor=texture2D(uTexture,vTextureCo);
+        vec4 deltaColor=nColor+vec4(WARM,0.0);
+        gl_FragColor=deltaColor;
+    }
+    else if(vChangeType == 3){
+        gl_FragColor=vec4(vec3(dot(texture2D( uTexture, vTextureCo).rgb,W)),1.0);
+    }
+    else if(vChangeType == 4){
+        vec2 tex = vTextureCo;
+        vec2 upLeftUV = vec2(tex.x - 1.0/texSize.x, tex.y - 1.0/texSize.y);
+        vec4 upLeftColor = texture2D(uTexture,upLeftUV);
+        vec4 delColor = textureColor - upLeftColor;
+        float h = 0.3*delColor.x + 0.59*delColor.y + 0.11*delColor.z;
+        vec4 bkColor = vec4(0.5, 0.5, 0.5, 1.0);
+        gl_FragColor = vec4(h,h,h,0.0) +bkColor;
+    }
+    else if(vChangeType == 5){
+        gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
+    }
+    else if(vChangeType == 6){
+        lowp vec4 outputColor = textureColor * colorMatrix;
+        gl_FragColor = (intensityone * outputColor) + ((1.0 - intensityone) * textureColor);
+    }
+    else if(vChangeType == 7){
+        float bottomLeftIntensity = texture2D(uTexture, bottomLeftTextureCoordinate).r;
+        float topRightIntensity = texture2D(uTexture, topRightTextureCoordinate).r;
+        float topLeftIntensity = texture2D(uTexture, topLeftTextureCoordinate).r;
+        float bottomRightIntensity = texture2D(uTexture, bottomRightTextureCoordinate).r;
+        float leftIntensity = texture2D(uTexture, leftTextureCoordinate).r;
+        float rightIntensity = texture2D(uTexture, rightTextureCoordinate).r;
+        float bottomIntensity = texture2D(uTexture, bottomTextureCoordinate).r;
+        float topIntensity = texture2D(uTexture, topTextureCoordinate).r;
+        float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
+        float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
+
+        float mag = length(vec2(h, v));
+        vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;
+        float thresholdTest = 1.0 - step(threshold, mag);
+        gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);
+    }
+    else if(vChangeType == 8){
+         mediump vec4 bottomColor = texture2D(uTexture, bottomTextureCoordinate);
+         mediump vec4 bottomLeftColor = texture2D(uTexture, bottomLeftTextureCoordinate);
+         mediump vec4 bottomRightColor = texture2D(uTexture, bottomRightTextureCoordinate);
+         mediump vec4 leftColor = texture2D(uTexture, leftTextureCoordinate);
+         mediump vec4 rightColor = texture2D(uTexture, rightTextureCoordinate);
+         mediump vec4 topColor = texture2D(uTexture, topTextureCoordinate);
+         mediump vec4 topRightColor = texture2D(uTexture, topRightTextureCoordinate);
+         mediump vec4 topLeftColor = texture2D(uTexture, topLeftTextureCoordinate);
+
+         mediump vec4 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
+         resultColor += leftColor * convolutionMatrix[1][0] + textureColor * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
+         resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
+         gl_FragColor = resultColor;
+    }
+    else if(vChangeType == 9){
+        vec2 center=vec2(vTextureCo.x*uWidth,vTextureCo.y*uHeight);
+        float leftTop=colorR(center,-stepcv,-stepcv);
+        float centerTop=colorR(center,0.,-stepcv);
+        float rightTop=colorR(center,stepcv,-stepcv);
+        float leftCenter=colorR(center,-stepcv,0.);
+        float rightCenter=colorR(center,stepcv,0.);
+        float leftBottom=colorR(center,-stepcv,stepcv);
+        float centerBottom=colorR(center,0.,stepcv);
+        float rightBottom=colorR(center,stepcv,stepcv);
+        mat3 d=mat3(colorR(center,-stepcv,-stepcv),colorR(center,0.,-stepcv),colorR(center,stepcv,-stepcv),
+                     colorR(center,-stepcv,0.),colorR(center,0.,0.),colorR(center,stepcv,0.),
+                     colorR(center,-stepcv,stepcv),colorR(center,0.,stepcv),colorR(center,stepcv,stepcv));
+        float x = d[0][0]*GX[0][0]+d[1][0]*GX[1][0]+d[2][0]*GX[2][0]+
+                   d[0][1]*GX[0][1]+d[1][1]*GX[1][1]+d[2][1]*GX[2][1]+
+                   d[0][2]*GX[0][2]+d[1][2]*GX[1][2]+d[2][2]*GX[2][2];
+        float y = d[0][0]*GY[0][0]+d[1][0]*GY[1][0]+d[2][0]*GY[2][0]+
+                   d[0][1]*GY[0][1]+d[1][1]*GY[1][1]+d[2][1]*GY[2][1]+
+                   d[0][2]*GY[0][2]+d[1][2]*GY[1][2]+d[2][2]*GY[2][2];
+        gl_FragColor=vec4(vec3(length(vec2(x,y))),1.);
+    }
+    else if(vChangeType == 10){
+        float bottomLeftIntensity = texture2D(uTexture, bottomLeftTextureCoordinate).r;
+        float topRightIntensity = texture2D(uTexture, topRightTextureCoordinate).r;
+        float topLeftIntensity = texture2D(uTexture, topLeftTextureCoordinate).r;
+        float bottomRightIntensity = texture2D(uTexture, bottomRightTextureCoordinate).r;
+        float leftIntensity = texture2D(uTexture, leftTextureCoordinate).r;
+        float rightIntensity = texture2D(uTexture, rightTextureCoordinate).r;
+        float bottomIntensity = texture2D(uTexture, bottomTextureCoordinate).r;
+        float topIntensity = texture2D(uTexture, topTextureCoordinate).r;
+        float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
+        float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
+        
+        float mag = 1.0 - length(vec2(h, v));
+        gl_FragColor = vec4(vec3(mag), 1.0);
+    }
+    else{
+        gl_FragColor = textureColor;
+    }
+
+}

+ 42 - 0
OpenGLlibrary/src/main/assets/shader/choose/choose.vert

@@ -0,0 +1,42 @@
+attribute vec4 aVertexCo;
+attribute vec4 aTextureCo;
+
+uniform mat4 uVertexMatrix;
+uniform mat4 uTextureMatrix;
+
+uniform highp float texelWidth;
+uniform highp float texelHeight;
+
+varying vec2 vTextureCo;
+varying vec2 leftTextureCoordinate;
+varying vec2 rightTextureCoordinate;
+
+varying vec2 topTextureCoordinate;
+varying vec2 topLeftTextureCoordinate;
+varying vec2 topRightTextureCoordinate;
+
+varying vec2 bottomTextureCoordinate;
+varying vec2 bottomLeftTextureCoordinate;
+varying vec2 bottomRightTextureCoordinate;
+
+void main()
+{
+     gl_Position = uVertexMatrix*aVertexCo;
+     vTextureCo = aTextureCo.xy;
+
+     vec2 widthStep = vec2(texelWidth, 0.0);
+     vec2 heightStep = vec2(0.0, texelHeight);
+     vec2 widthHeightStep = vec2(texelWidth, texelHeight);
+     vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);
+
+     leftTextureCoordinate = aTextureCo.xy - widthStep;
+     rightTextureCoordinate = aTextureCo.xy + widthStep;
+
+     topTextureCoordinate = aTextureCo.xy - heightStep;
+     topLeftTextureCoordinate = aTextureCo.xy - widthHeightStep;
+     topRightTextureCoordinate = aTextureCo.xy + widthNegativeHeightStep;
+
+     bottomTextureCoordinate = aTextureCo.xy + heightStep;
+     bottomLeftTextureCoordinate = aTextureCo.xy - widthNegativeHeightStep;
+     bottomRightTextureCoordinate = aTextureCo.xy + widthHeightStep;
+}

+ 9 - 0
OpenGLlibrary/src/main/assets/shader/color/gray_fragment.frag

@@ -0,0 +1,9 @@
+precision mediump float;
+varying vec2 textureCoordinate;
+uniform sampler2D vTexture;
+void main() {
+    vec4 color=texture2D( vTexture, textureCoordinate);
+    float rgb=color.g;
+    vec4 c=vec4(rgb,rgb,rgb,color.a);
+    gl_FragColor = c;
+}

+ 8 - 0
OpenGLlibrary/src/main/assets/shader/mh/brightness.frag

@@ -0,0 +1,8 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+uniform lowp float brightness;
+void main() {
+    lowp vec4 textureColor = texture2D(uTexture, vTextureCo);
+    gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
+}

+ 8 - 0
OpenGLlibrary/src/main/assets/shader/mh/contrast.frag

@@ -0,0 +1,8 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+uniform lowp float stepcv;
+void main() {
+    lowp vec4 textureColor = texture2D(uTexture, vTextureCo);
+    gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * stepcv + vec3(0.5)), textureColor.w);
+}

+ 11 - 0
OpenGLlibrary/src/main/assets/shader/mh/saturation.frag

@@ -0,0 +1,11 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+uniform lowp float saturation;
+const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
+void main() {
+    lowp vec4 textureColor = texture2D(uTexture, vTextureCo);
+    lowp float luminance = dot(textureColor.rgb, luminanceWeighting);
+    lowp vec3 greyScaleColor = vec3(luminance);
+    gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);
+}

+ 15 - 0
OpenGLlibrary/src/main/assets/shader/mh/vignette.frag

@@ -0,0 +1,15 @@
+precision mediump float;
+varying vec2 vTextureCo;
+uniform sampler2D uTexture;
+
+uniform lowp vec2 vignetteCenter;
+uniform lowp vec3 vignetteColor;
+uniform highp float vignetteStart;
+uniform highp float vignetteEnd;
+
+void main() {
+    lowp vec3 rgb = texture2D(uTexture, vTextureCo).rgb;
+    lowp float d = distance(vTextureCo, vec2(0.5,0.5));
+    rgb *= (1.0 - smoothstep(vignetteStart, vignetteEnd, d));
+    gl_FragColor = vec4(vec3(rgb),1.0);
+}

+ 7 - 0
OpenGLlibrary/src/main/assets/shader/oes.frag

@@ -0,0 +1,7 @@
+#extension GL_OES_EGL_image_external : require
+precision mediump float;
+varying vec2 vTextureCo;
+uniform samplerExternalOES uTexture;
+void main() {
+    gl_FragColor = texture2D( uTexture, vTextureCo);
+}

+ 12 - 0
OpenGLlibrary/src/main/assets/shader/oes.vert

@@ -0,0 +1,12 @@
+attribute vec4 aVertexCo;
+attribute vec2 aTextureCo;
+
+uniform mat4 uVertexMatrix;
+uniform mat4 uTextureMatrix;
+
+varying vec2 vTextureCo;
+
+void main(){
+    gl_Position = uVertexMatrix*aVertexCo;
+    vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;
+}

+ 7 - 0
OpenGLlibrary/src/main/assets/shader/oes/default_fragment.sh

@@ -0,0 +1,7 @@
+#extension GL_OES_EGL_image_external : require
+precision mediump float;
+varying vec2 textureCoordinate;
+uniform samplerExternalOES vTexture;
+void main() {
+    gl_FragColor = texture2D( vTexture, textureCoordinate );
+}

+ 10 - 0
OpenGLlibrary/src/main/assets/shader/oes/default_vertex.sh

@@ -0,0 +1,10 @@
+attribute vec4 vPosition;
+attribute vec2 vCoordinate;
+uniform mat4 vMatrix;
+
+varying vec2 aCoordinate;
+
+void main(){
+    gl_Position=vMatrix*vPosition;
+    aCoordinate=vCoordinate;
+}

+ 7 - 0
OpenGLlibrary/src/main/assets/shader/oes_base_fragment.sh

@@ -0,0 +1,7 @@
+#extension GL_OES_EGL_image_external : require
+precision mediump float;
+varying vec2 textureCoordinate;
+uniform samplerExternalOES vTexture;
+void main() {
+    gl_FragColor = texture2D( vTexture, textureCoordinate );
+}

+ 10 - 0
OpenGLlibrary/src/main/assets/shader/oes_base_vertex.sh

@@ -0,0 +1,10 @@
+attribute vec4 vPosition;
+attribute vec2 vCoord;
+uniform mat4 vMatrix;
+uniform mat4 vCoordMatrix;
+varying vec2 textureCoordinate;
+
+void main(){
+    gl_Position = vMatrix*vPosition;
+    textureCoordinate = (vCoordMatrix*vec4(vCoord,0,1)).xy;
+}

+ 10 - 0
OpenGLlibrary/src/main/assets/shader/pkm_mul.frag

@@ -0,0 +1,10 @@
+precision mediump float;
+varying vec2 aCoord;
+uniform sampler2D vTexture;
+uniform sampler2D vTextureAlpha;
+
+void main() {
+    vec4 color=texture2D( vTexture, aCoord);
+    color.a=texture2D(vTextureAlpha,aCoord).r;
+    gl_FragColor = color;
+}

+ 9 - 0
OpenGLlibrary/src/main/assets/shader/pkm_mul.vert

@@ -0,0 +1,9 @@
+attribute vec4 vPosition;
+attribute vec2 vCoord;
+varying vec2 aCoord;
+uniform mat4 vMatrix;
+
+void main(){
+    aCoord = vCoord;
+    gl_Position = vMatrix*vPosition;
+}

+ 28 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/FilterInfo.java

@@ -0,0 +1,28 @@
+package com.joe.camera2recorddemo.Entity;
+
+public class FilterInfo {
+
+    private String name;
+    private int type;
+    private int rid;
+
+    public FilterInfo(int type, String name,int rid) {
+        this.name = name;
+        this.type = type;
+        this.rid= rid;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+
+    public int getType() {
+        return type;
+    }
+
+
+    public int getRid() {
+        return rid;
+    }
+}

+ 35 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Entity/SizeInfo.java

@@ -0,0 +1,35 @@
+package com.joe.camera2recorddemo.Entity;
+
+public class SizeInfo {
+
+    private int mWidth;
+    private int mHeight;
+
+    public SizeInfo(int width, int height) {
+        mWidth = width;
+        mHeight = height;
+    }
+
+    public int getWidth() {
+        return mWidth;
+    }
+
+    public int getHeight() {
+        return mHeight;
+    }
+
+
+    public void setSize(int width, int height) {
+        this.mWidth = width;
+        this.mHeight = height;
+    }
+
+
+    @Override
+    public String toString() {
+        return "SizeInfo{" +
+                "mWidth=" + mWidth +
+                ", mHeight=" + mHeight +
+                '}';
+    }
+}

+ 50 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/TrackUtils.java

@@ -0,0 +1,50 @@
+package com.joe.camera2recorddemo.MediaCodecUtil;
+
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Log;
+
+/**
+ * Created by Yj on 2017/3/29.
+ */
+
+public class TrackUtils {
+
+	private static final String TAG = "TrackUtils";
+
+	/**
+	 * 查找视频轨道
+	 * @param extractor
+	 * @return
+	 */
+	public static int selectVideoTrack(MediaExtractor extractor) {
+		int numTracks = extractor.getTrackCount();
+		for (int i = 0; i < numTracks; i++) {
+			MediaFormat format = extractor.getTrackFormat(i);
+			String mime = format.getString(MediaFormat.KEY_MIME);
+			if (mime.startsWith("video/")) {
+				Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
+				return i;
+			}
+		}
+		return -1;
+	}
+
+	/**
+	 * 查找音频轨道
+	 * @param extractor
+	 * @return
+	 */
+	public static int selectAudioTrack(MediaExtractor extractor) {
+		int numTracks = extractor.getTrackCount();
+		for (int i = 0; i < numTracks; i++) {
+			MediaFormat format = extractor.getTrackFormat(i);
+			String mime = format.getString(MediaFormat.KEY_MIME);
+			if (mime.startsWith("audio/")) {
+				Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
+				return i;
+			}
+		}
+		return -1;
+	}
+}

+ 206 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/MediaCodecUtil/VideoDecode.java

@@ -0,0 +1,206 @@
+package com.joe.camera2recorddemo.MediaCodecUtil;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Log;
+import android.view.Surface;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+
+public class VideoDecode {
+	private static final String TAG = "VideoToFrames";
+	private static final long DEFAULT_TIMEOUT_US = 10000;
+
+    private final int decodeColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
+
+
+	public int ImageWidth = 0;
+	public int ImageHeight = 0;
+
+	MediaExtractor extractor = null;
+	MediaCodec decoder = null;
+	MediaFormat mediaFormat;
+
+	private boolean isLoop = false;//是否循环播放
+	private boolean isStop = false;//是否停止
+	private String videoFilePath;
+
+	/**
+	 * 解码器初始化
+	 * @param videoFilePath
+	 */
+	public void decodePrepare(String videoFilePath) {
+		this.videoFilePath = videoFilePath;
+		extractor = null;
+		decoder = null;
+		try {
+			File videoFile = new File(videoFilePath);
+			extractor = new MediaExtractor();
+			extractor.setDataSource(videoFile.toString());
+			int trackIndex = TrackUtils.selectVideoTrack(extractor);
+			if (trackIndex < 0) {
+				throw new RuntimeException("No video track found in " + videoFilePath);
+			}
+			extractor.selectTrack(trackIndex);
+			mediaFormat = extractor.getTrackFormat(trackIndex);
+			String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
+			decoder = MediaCodec.createDecoderByType(mime);
+			if (isColorFormatSupported(decodeColorFormat, decoder.getCodecInfo().getCapabilitiesForType(mime))) {
+				mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, decodeColorFormat);
+				Log.i(TAG, "set decode color format to type " + decodeColorFormat);
+			} else {
+				Log.i(TAG, "unable to set decode color format, color format type " + decodeColorFormat + " not supported");
+			}
+
+			//消除旋转信息,防止拉伸
+			mediaFormat.setInteger(MediaFormat.KEY_ROTATION,0);
+			//设置
+			mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+			mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+			mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2500000);
+			decoder.configure(mediaFormat,mSurface, null, 0);
+			decoder.start();
+		} catch (IOException ioe) {
+			throw new RuntimeException("failed init encoder", ioe);
+		}
+	}
+
+	public void close() {
+		try {
+			if (decoder != null) {
+				decoder.stop();
+				decoder.release();
+			}
+
+			if (extractor != null) {
+				extractor.release();
+				extractor = null;
+			}
+		}catch (IllegalStateException e){
+			e.printStackTrace();
+		}
+	}
+
+	/**
+	 * 外部调用开始解码
+	 */
+	public void excuate() {
+		try {
+			decodeFramesToImage(decoder, extractor, mediaFormat);
+		} finally {
+			close();
+			if(isLoop && !isStop){
+				decodePrepare(videoFilePath);
+				excuate();
+			}
+		}
+
+	}
+
+	/**
+	 * 设置是否循环
+	 * @param isLoop
+	 */
+	public void setLoop(boolean isLoop){
+		this.isLoop = isLoop;
+	}
+
+	/**
+	 * 检查是否支持的色彩格式
+	 * @param colorFormat
+	 * @param caps
+	 * @return
+	 */
+	private boolean isColorFormatSupported(int colorFormat, MediaCodecInfo.CodecCapabilities caps) {
+		for (int c : caps.colorFormats) {
+			if (c == colorFormat) {
+				return true;
+			}
+		}
+		return false;
+	}
+
+	/**
+	 * 开始解码
+	 * @param decoder
+	 * @param extractor
+	 * @param mediaFormat
+	 */
+	public void decodeFramesToImage(MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat) {
+		MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+		boolean sawInputEOS = false;
+		boolean sawOutputEOS = false;
+
+		final int width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+		final int height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+
+		ImageWidth = width;
+		ImageHeight = height;
+
+		long startMs = System.currentTimeMillis();
+		while (!sawOutputEOS && !isStop) {
+			if (!sawInputEOS) {
+				int inputBufferId = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
+				if (inputBufferId >= 0) {
+					ByteBuffer inputBuffer = decoder.getInputBuffer(inputBufferId);
+					int sampleSize = extractor.readSampleData(inputBuffer, 0); //将一部分视频数据读取到inputbuffer中,大小为sampleSize
+					if (sampleSize < 0) {
+						decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+						sawInputEOS = true;
+					} else {
+						long presentationTimeUs = extractor.getSampleTime();
+						Log.v(TAG, "presentationTimeUs:"+presentationTimeUs);
+						decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
+						extractor.advance();  //移动到视频文件的下一个地址
+					}
+				}
+			}
+			int outputBufferId = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
+			if (outputBufferId >= 0) {
+				if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+					sawOutputEOS = true;
+				}
+				boolean doRender = (info.size != 0);
+				if (doRender) {
+					sleepRender(info, startMs);//延迟解码
+					decoder.releaseOutputBuffer(outputBufferId, true);
+				}
+			}
+		}
+	}
+
+	public void stop(){
+		isStop = true;
+	}
+
+	public void start(){
+		isStop = false;
+	}
+
+	//======================设置输出Surface==============================
+
+	private Surface mSurface;
+
+	public void setSurface(Surface surface){
+		this.mSurface = surface;
+	}
+
+	/**
+	 * 延迟解码,按帧播放
+	 */
+	private void sleepRender(MediaCodec.BufferInfo audioBufferInfo, long startMs) {
+		while (audioBufferInfo.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
+			try {
+				Thread.sleep(10);
+			} catch (InterruptedException e) {
+				e.printStackTrace();
+				break;
+			}
+		}
+	}
+}

+ 424 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/CameraRecorder.java

@@ -0,0 +1,424 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.media.AudioFormat;
+import android.media.AudioRecord;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.media.MediaRecorder;
+import android.opengl.EGLSurface;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+import android.view.Surface;
+
+import com.joe.camera2recorddemo.Entity.SizeInfo;
+import com.joe.camera2recorddemo.OpenGL.Filter.BaseFilter;
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.Semaphore;
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class CameraRecorder {
+
+    private final static long BASE_TIME = System.currentTimeMillis();
+    private final int TIME_OUT = 1000;
+    private final Object VIDEO_LOCK = new Object();
+    private final Object REC_LOCK = new Object();
+    private MediaCodec mVideoEncoder;
+    private MediaCodec mAudioEncoder;
+    private AudioRecord mAudioRecord;
+    private MediaMuxer mMuxer;
+    // 音频参数
+    private int mRecordBufferSize = 0;
+    private int mRecordSampleRate = 48000;// 音频采样率
+    private int mRecordChannelConfig = AudioFormat.CHANNEL_IN_STEREO;// 音频录制通道,默认为立体声
+    private int mRecordAudioFormat = AudioFormat.ENCODING_PCM_16BIT; // 音频录制格式,默认为PCM16Bit
+    private SurfaceTexture mInputTexture;
+    private Surface mOutputSurface;
+    private Surface mEncodeSurface;
+    private EGLHelper mShowEGLHelper;
+    private Configuration mConfig;
+    private String mOutputPath;
+    private MediaCodec.BufferInfo mAudioEncodeBufferInfo;
+    private MediaCodec.BufferInfo mVideoEncodeBufferInfo;
+    private int mAudioTrack = -1;
+    private int mVideoTrack = -1;
+    private boolean mGLThreadFlag = false;
+    private Thread mGLThread;
+    private WrapRenderer mRenderer;
+    private Semaphore mSem;
+    private boolean isMuxStarted = false;
+    private int mInputTextureId;
+    private EGLSurface mEGLEncodeSurface = null;
+    private int mPreviewWidth = 0;
+    private int mPreviewHeight = 0;
+    private int mOutputWidth = 0;
+    private int mOutputHeight = 0;
+    private boolean isRecordStarted = false;
+    private boolean isRecordVideoStarted = false;
+    private boolean isRecordAudioStarted = false;
+    private boolean isTryStopAudio = false;
+    private Thread mAudioThread;
+    private Runnable mGLRunnable = new Runnable() {
+        @Override
+        public void run() {
+            if (mOutputSurface == null) {
+                Log.e("C2D", "CameraRecorder GLThread exit : outputSurface==null");
+                return;
+            }
+            if (mPreviewWidth <= 0 || mPreviewHeight <= 0) {
+                Log.e("C2D", "CameraRecorder GLThread exit : Preview Size==0");
+                return;
+            }
+            mShowEGLHelper.setSurface(mOutputSurface);
+            boolean ret = mShowEGLHelper.createGLES(mPreviewWidth, mPreviewHeight);
+            if (!ret) {
+                Log.e("C2D", "CameraRecorder GLThread exit : createGLES failed");
+                return;
+            }
+            if (mRenderer == null) {
+                mRenderer = new WrapRenderer(null);
+            }
+            mRenderer.setFlag(WrapRenderer.TYPE_CAMERA);
+            mRenderer.create();
+            int[] t = new int[1];
+            GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, t, 0);
+            mRenderer.sizeChanged(mPreviewWidth, mPreviewHeight);
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, t[0]);
+
+            Filter mShowFilter = new BaseFilter();
+            Filter mRecFilter = new BaseFilter();
+            MatrixUtils.flip(mShowFilter.getVertexMatrix(), false, true);
+            mShowFilter.create();
+            mShowFilter.sizeChanged(mPreviewWidth, mPreviewHeight);
+
+            MatrixUtils.getMatrix(mRecFilter.getVertexMatrix(), MatrixUtils.TYPE_CENTERCROP,
+                    mPreviewWidth, mPreviewHeight,
+                    mOutputWidth, mOutputHeight);
+            MatrixUtils.flip(mRecFilter.getVertexMatrix(), false, true);
+            mRecFilter.create();
+            mRecFilter.sizeChanged(mOutputWidth, mOutputHeight);
+
+            FrameBuffer mEncodeFrameBuffer = new FrameBuffer();
+            while (mGLThreadFlag) {
+                try {
+                    mSem.acquire();
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+                if (mGLThreadFlag) {
+                    long time = (System.currentTimeMillis() - BASE_TIME) * 1000;
+                    mInputTexture.updateTexImage();
+                    mInputTexture.getTransformMatrix(mRenderer.getTextureMatrix());
+                    synchronized (VIDEO_LOCK) {
+                        if (isRecordVideoStarted) {
+                            if (mEGLEncodeSurface == null) {
+                                mEGLEncodeSurface = mShowEGLHelper.createEGLWindowSurface(mEncodeSurface);
+                            }
+                            mShowEGLHelper.makeCurrent(mEGLEncodeSurface);
+                            mEncodeFrameBuffer.bindFrameBuffer(mPreviewWidth, mPreviewHeight);
+                            mRenderer.draw(mInputTextureId);
+                            mEncodeFrameBuffer.unBindFrameBuffer();
+                            GLES20.glViewport(0, 0, mConfig.getVideoFormat().getInteger(MediaFormat.KEY_WIDTH),
+                                    mConfig.getVideoFormat().getInteger(MediaFormat.KEY_HEIGHT));
+                            mRecFilter.draw(mEncodeFrameBuffer.getCacheTextureId());
+                            mShowEGLHelper.setPresentationTime(mEGLEncodeSurface, time * 1000);
+                            videoEncodeStep(false);
+                            mShowEGLHelper.swapBuffers(mEGLEncodeSurface);
+
+                            mShowEGLHelper.makeCurrent();
+                            GLES20.glViewport(0, 0, mPreviewWidth, mPreviewHeight);
+                            mShowFilter.draw(mEncodeFrameBuffer.getCacheTextureId());
+                            mShowEGLHelper.setPresentationTime(0);
+                            mShowEGLHelper.swapBuffers();
+                        } else {
+                            GLES20.glViewport(0, 0, mPreviewWidth, mPreviewHeight);
+                            mRenderer.draw(mInputTextureId);
+                            mShowEGLHelper.swapBuffers();
+                        }
+                    }
+                }
+            }
+            mShowEGLHelper.destroyGLES();
+        }
+    };
+
+    public CameraRecorder() {
+        mShowEGLHelper = new EGLHelper();
+        // mEncodeEGLHelper=new EGLHelper();
+        mSem = new Semaphore(0);
+        mAudioEncodeBufferInfo = new MediaCodec.BufferInfo();
+        mVideoEncodeBufferInfo = new MediaCodec.BufferInfo();
+    }
+
+    public void setOutputPath(String path) {
+        this.mOutputPath = path;
+    }
+
+    public void setOutputSize(SizeInfo size) {
+        this.mConfig = new Configuration(size.getWidth(), size.getHeight());
+        this.mOutputWidth = size.getWidth();
+        this.mOutputHeight = size.getHeight();
+    }
+
+    public void setPreviewSize(int width, int height) {
+        this.mPreviewWidth = width;
+        this.mPreviewHeight = height;
+    }
+
+    public SurfaceTexture createInputSurfaceTexture() {
+        mInputTextureId = mShowEGLHelper.createTextureID();
+        mInputTexture = new SurfaceTexture(mInputTextureId);
+        new Handler(Looper.getMainLooper()).post(new Runnable() {
+            @Override
+            public void run() {
+                mInputTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+                    @Override
+                    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+                        mSem.release();
+                    }
+                });
+            }
+        });
+        return mInputTexture;
+    }
+
+    public void setConfiguration(Configuration config) {
+        this.mConfig = config;
+    }
+
+    public void setOutputSurface(Surface surface) {
+        this.mOutputSurface = surface;
+    }
+
+    public void setRenderer(Renderer renderer) {
+        mRenderer = new WrapRenderer(renderer);
+    }
+
+    public void startPreview() {
+        synchronized (REC_LOCK) {
+            Log.d("C2D", "CameraRecorder startPreview");
+            mSem.drainPermits();
+            mGLThreadFlag = true;
+            mGLThread = new Thread(mGLRunnable);
+            mGLThread.start();
+        }
+    }
+
+    public void stopPreview() throws InterruptedException {
+        synchronized (REC_LOCK) {
+            mGLThreadFlag = false;
+            mSem.release();
+            if (mGLThread != null && mGLThread.isAlive()) {
+                mGLThread.join();
+                mGLThread = null;
+            }
+            Log.d("C2D", "CameraRecorder stopPreview");
+        }
+    }
+
+    public void startRecord() throws IOException {
+        synchronized (REC_LOCK) {
+            isRecordStarted = true;
+            MediaFormat audioFormat = mConfig.getAudioFormat();
+            mAudioEncoder = MediaCodec.createEncoderByType(audioFormat.getString(MediaFormat.KEY_MIME));
+            mAudioEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+            MediaFormat videoFormat = mConfig.getVideoFormat();
+            mVideoEncoder = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
+            //此处不能用mOutputSurface,会configure失败
+            mVideoEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+            mEncodeSurface = mVideoEncoder.createInputSurface();
+
+            mAudioEncoder.start();
+            mVideoEncoder.start();
+            mMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+            mRecordBufferSize = AudioRecord.getMinBufferSize(mRecordSampleRate,
+                    mRecordChannelConfig, mRecordAudioFormat) * 2;
+            //        buffer=new byte[bufferSize];
+            mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mRecordSampleRate, mRecordChannelConfig,
+                    mRecordAudioFormat, mRecordBufferSize);
+
+            mAudioThread = new Thread(new Runnable() {
+                @Override
+                public void run() {
+                    mAudioRecord.startRecording();
+                    while (!audioEncodeStep(isTryStopAudio)) ;
+                    mAudioRecord.stop();
+                }
+            });
+            mAudioThread.start();
+            isRecordAudioStarted = true;
+        }
+    }
+
+    public void stopRecord() throws InterruptedException {
+        synchronized (REC_LOCK) {
+            if (isRecordStarted) {
+                isTryStopAudio = true;
+                if (isRecordAudioStarted) {
+                    mAudioThread.join();
+                    isRecordAudioStarted = false;
+                }
+                synchronized (VIDEO_LOCK) {
+                    if (isRecordVideoStarted) {
+                        mEGLEncodeSurface = null;
+                        videoEncodeStep(true);
+                    }
+                    isRecordVideoStarted = false;
+                }
+                mAudioEncoder.stop();
+                mAudioEncoder.release();
+                mVideoEncoder.stop();
+                mVideoEncoder.release();
+                try {
+                    if (isMuxStarted) {
+                        isMuxStarted = false;
+                        mMuxer.stop();
+                        mMuxer.release();
+                    }
+                } catch (IllegalStateException e) {
+                    e.printStackTrace();
+                    File file = new File(mOutputPath);
+                    if (file.exists() && file.delete()) {
+                        Log.d("C2D", "delete error file :" + mOutputPath);
+                    }
+                }
+
+                mAudioEncoder = null;
+                mVideoEncoder = null;
+                mMuxer = null;
+
+                mAudioTrack = -1;
+                mVideoTrack = -1;
+
+                isRecordStarted = false;
+            }
+        }
+    }
+
+    private boolean videoEncodeStep(boolean isEnd) {
+        if (isEnd) {
+            mVideoEncoder.signalEndOfInputStream();
+        }
+        while (true) {
+            int outputIndex = mVideoEncoder.dequeueOutputBuffer(mVideoEncodeBufferInfo, TIME_OUT);
+            if (outputIndex >= 0) {
+                if (isMuxStarted && mVideoEncodeBufferInfo.size > 0 && mVideoEncodeBufferInfo.presentationTimeUs > 0) {
+                    mMuxer.writeSampleData(mVideoTrack, getOutputBuffer(mVideoEncoder, outputIndex), mVideoEncodeBufferInfo);
+                }
+                mVideoEncoder.releaseOutputBuffer(outputIndex, false);
+                if (mVideoEncodeBufferInfo.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
+                    Log.d("C2D", "CameraRecorder get video encode end of stream");
+                    return true;
+                }
+            } else if (outputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
+                break;
+            } else if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+                Log.e("C2D", "get video output format changed ->" + mVideoEncoder.getOutputFormat().toString());
+                mVideoTrack = mMuxer.addTrack(mVideoEncoder.getOutputFormat());
+                mMuxer.start();
+                isMuxStarted = true;
+            }
+        }
+        return false;
+    }
+
+    private boolean audioEncodeStep(boolean isEnd) {
+        if (isRecordAudioStarted) {
+            int inputIndex = mAudioEncoder.dequeueInputBuffer(TIME_OUT);
+            if (inputIndex >= 0) {
+                ByteBuffer buffer = getInputBuffer(mAudioEncoder, inputIndex);
+                buffer.clear();
+                long time = (System.currentTimeMillis() - BASE_TIME) * 1000;
+                int length = mAudioRecord.read(buffer, mRecordBufferSize);
+                if (length >= 0) {
+                    mAudioEncoder.queueInputBuffer(inputIndex, 0, length, time,
+                            isEnd ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
+                }
+            }
+            while (true) {
+                int outputIndex = mAudioEncoder.dequeueOutputBuffer(mAudioEncodeBufferInfo, TIME_OUT);
+                if (outputIndex >= 0) {
+                    //todo 第一帧音频时间戳为0的问题
+                    if (isMuxStarted && mAudioEncodeBufferInfo.size > 0 && mAudioEncodeBufferInfo.presentationTimeUs > 0) {
+                        mMuxer.writeSampleData(mAudioTrack, getOutputBuffer(mAudioEncoder, outputIndex), mAudioEncodeBufferInfo);
+                    }
+                    mAudioEncoder.releaseOutputBuffer(outputIndex, false);
+                    if (mAudioEncodeBufferInfo.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
+                        Log.d("C2D", "CameraRecorder get audio encode end of stream");
+                        isTryStopAudio = false;
+                        isRecordAudioStarted = false;
+                        return true;
+                    }
+                } else if (outputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
+                    break;
+                } else if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+                    Log.e("C2D", "get audio output format changed ->" + mAudioEncoder.getOutputFormat().toString());
+                    synchronized (VIDEO_LOCK) {
+                        mAudioTrack = mMuxer.addTrack(mAudioEncoder.getOutputFormat());
+                        isRecordVideoStarted = true;
+                    }
+                }
+            }
+        }
+        return false;
+    }
+
+    private ByteBuffer getInputBuffer(MediaCodec codec, int index) {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+            return codec.getInputBuffer(index);
+        } else {
+            return codec.getInputBuffers()[index];
+        }
+    }
+
+    private ByteBuffer getOutputBuffer(MediaCodec codec, int index) {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+            return codec.getOutputBuffer(index);
+        } else {
+            return codec.getOutputBuffers()[index];
+        }
+    }
+
+    public static class Configuration {
+
+        private MediaFormat mAudioFormat;
+        private MediaFormat mVideoFormat;
+
+        public Configuration(MediaFormat audio, MediaFormat video) {
+            this.mAudioFormat = audio;
+            this.mVideoFormat = video;
+        }
+
+        public Configuration(int width, int height) {
+            mAudioFormat = MediaFormat.createAudioFormat("audio/mp4a-latm", 48000, 2);
+            mAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 128000);
+            mAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
+
+            mVideoFormat = MediaFormat.createVideoFormat("video/avc", width, height);
+            mVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
+            mVideoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+            mVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 5);
+            mVideoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+        }
+
+        public MediaFormat getAudioFormat() {
+            return mAudioFormat;
+        }
+
+        public MediaFormat getVideoFormat() {
+            return mVideoFormat;
+        }
+
+    }
+}

+ 196 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/EGLHelper.java

@@ -0,0 +1,196 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.annotation.TargetApi;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.util.Log;
+
+import javax.microedition.khronos.opengles.GL10;
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class EGLHelper {
+
+    private EGLSurface mEGLSurface;
+    private EGLContext mEGLContext;
+    private EGLDisplay mEGLDisplay;
+    private EGLConfig mEGLConfig;
+
+    private EGLContext mShareEGLContext= EGL14.EGL_NO_CONTEXT;
+
+    private boolean isDebug=true;
+
+    private int mEglSurfaceType= EGL14.EGL_WINDOW_BIT;
+
+    private Object mSurface;
+    private Object mCopySurface;
+
+    /**
+     * @param type one of {@link EGL14#EGL_WINDOW_BIT}、{@link EGL14#EGL_PBUFFER_BIT}、{@link EGL14#EGL_PIXMAP_BIT}
+     */
+    public void setEGLSurfaceType(int type){
+        this.mEglSurfaceType=type;
+    }
+
+    public void setSurface(Object surface){
+        this.mSurface=surface;
+    }
+
+    public void setCopySurface(Object surface){
+        this.mCopySurface=surface;
+    }
+
+    /**
+     * create the environment for OpenGLES
+     * @param eglWidth width
+     * @param eglHeight height
+     */
+    public boolean createGLES(int eglWidth, int eglHeight){
+        int[] attributes = new int[] {
+                EGL14.EGL_SURFACE_TYPE, mEglSurfaceType,      //渲染类型
+                EGL14.EGL_RED_SIZE, 8,  //指定RGB中的R大小(bits)
+                EGL14.EGL_GREEN_SIZE, 8, //指定G大小
+                EGL14.EGL_BLUE_SIZE, 8,  //指定B大小
+                EGL14.EGL_ALPHA_SIZE, 8, //指定Alpha大小,以上四项实际上指定了像素格式
+                EGL14.EGL_DEPTH_SIZE, 16, //指定深度缓存(Z Buffer)大小
+                EGL14.EGL_RENDERABLE_TYPE, 4, //指定渲染api类别, 如上一小节描述,这里或者是硬编码的4(EGL14.EGL_OPENGL_ES2_BIT)
+                EGL14.EGL_NONE };  //总是以EGL14.EGL_NONE结尾
+
+        int glAttrs[] = {
+                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,  //0x3098是EGL14.EGL_CONTEXT_CLIENT_VERSION,但是4.2以前没有EGL14
+                EGL14.EGL_NONE
+        };
+
+        int bufferAttrs[]={
+                EGL14.EGL_WIDTH,eglWidth,
+                EGL14.EGL_HEIGHT,eglHeight,
+                EGL14.EGL_NONE
+        };
+
+        //第二步 获取Display
+        //获取默认显示设备,一般为设备主屏幕
+        mEGLDisplay= EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+
+        //获取版本号,[0]为版本号,[1]为子版本号
+        int[] versions=new int[2];
+        EGL14.eglInitialize(mEGLDisplay,versions,0,versions,1);
+        log(EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_VENDOR));
+        log(EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_VERSION));
+        log(EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_EXTENSIONS));
+
+        //第三部 选择config
+        //获取EGL可用配置
+        EGLConfig[] configs = new EGLConfig[1];
+        int[] configNum = new int[1];
+        EGL14.eglChooseConfig(mEGLDisplay, attributes,0, configs,0, 1, configNum,0);
+        if(configs[0]==null){
+            log("eglChooseConfig Error:"+ EGL14.eglGetError());
+            return false;
+        }
+        mEGLConfig = configs[0];
+
+        //创建EGLContext
+        mEGLContext= EGL14.eglCreateContext(mEGLDisplay,mEGLConfig,mShareEGLContext, glAttrs,0);
+        if(mEGLContext== EGL14.EGL_NO_CONTEXT){
+            return false;
+        }
+        //获取创建后台绘制的Surface
+        switch (mEglSurfaceType){
+            case EGL14.EGL_WINDOW_BIT:
+                mEGLSurface= EGL14.eglCreateWindowSurface(mEGLDisplay,mEGLConfig,mSurface,new int[]{EGL14.EGL_NONE},0);
+                break;
+            case EGL14.EGL_PIXMAP_BIT:
+                break;
+            case EGL14.EGL_PBUFFER_BIT:
+                mEGLSurface= EGL14.eglCreatePbufferSurface(mEGLDisplay,mEGLConfig,bufferAttrs,0);
+                break;
+        }
+        if(mEGLSurface== EGL14.EGL_NO_SURFACE){
+            log("eglCreateSurface Error:"+ EGL14.eglGetError());
+
+            return false;
+        }
+
+        if(!EGL14.eglMakeCurrent(mEGLDisplay,mEGLSurface,mEGLSurface,mEGLContext)){
+            log("eglMakeCurrent Error:"+ EGL14.eglQueryString(mEGLDisplay, EGL14.eglGetError()));
+            return false;
+        }
+        log("gl environment create success");
+        return true;
+    }
+
+    public EGLSurface createEGLWindowSurface(Object object){
+        return EGL14.eglCreateWindowSurface(mEGLDisplay,mEGLConfig,object,new int[]{EGL14.EGL_NONE},0);
+    }
+
+    public void setShareEGLContext(EGLContext context){
+        this.mShareEGLContext=context;
+    }
+
+    public EGLContext getEGLContext(){
+        return mEGLContext;
+    }
+
+    public boolean makeCurrent(){
+        return EGL14.eglMakeCurrent(mEGLDisplay,mEGLSurface,mEGLSurface,mEGLContext);
+    }
+
+    public boolean makeCurrent(EGLSurface surface){
+        return EGL14.eglMakeCurrent(mEGLDisplay,surface,surface,mEGLContext);
+    }
+
+    public boolean destroyGLES(){
+        EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
+        EGL14.eglDestroySurface(mEGLDisplay,mEGLSurface);
+        EGL14.eglDestroyContext(mEGLDisplay,mEGLContext);
+        EGL14.eglTerminate(mEGLDisplay);
+        log("gl destroy gles");
+        return true;
+    }
+
+    public void setPresentationTime(long time){
+        EGLExt.eglPresentationTimeANDROID(mEGLDisplay,mEGLSurface,time);
+    }
+
+    public void setPresentationTime(EGLSurface surface,long time){
+        EGLExt.eglPresentationTimeANDROID(mEGLDisplay,surface,time);
+    }
+
+    public boolean swapBuffers(){
+        return EGL14.eglSwapBuffers(mEGLDisplay,mEGLSurface);
+    }
+
+    public boolean swapBuffers(EGLSurface surface){
+        return EGL14.eglSwapBuffers(mEGLDisplay,surface);
+    }
+
+
+    //创建视频数据流的OES TEXTURE
+    public int createTextureID() {
+        int[] texture = new int[1];
+        GLES20.glGenTextures(1, texture, 0);
+        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
+        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
+        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
+        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
+        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
+        return texture[0];
+    }
+
+    private void log(String log){
+        if(isDebug){
+            Log.e("EGLHelper",log);
+        }
+    }
+
+}

+ 41 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BaseFilter.java

@@ -0,0 +1,41 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+public class BaseFilter extends Filter {
+
+    public BaseFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/base.frag");
+    }
+
+    public BaseFilter(String vert,String frag){
+        super(null,vert,frag);
+    }
+
+    public BaseFilter(){
+        super(null,"attribute vec4 aVertexCo;\n" +
+                "attribute vec2 aTextureCo;\n" +
+                "\n" +
+                "uniform mat4 uVertexMatrix;\n" +
+                "uniform mat4 uTextureMatrix;\n" +
+                "\n" +
+                "varying vec2 vTextureCo;\n" +
+                "\n" +
+                "void main(){\n" +
+                "    gl_Position = uVertexMatrix*aVertexCo;\n" +
+                "    vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;\n" +
+                "}",
+                "precision mediump float;\n" +
+                "varying vec2 vTextureCo;\n" +
+                "uniform sampler2D uTexture;\n" +
+                "void main() {\n" +
+                "    gl_FragColor = texture2D( uTexture, vTextureCo);\n" +
+                "}");
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+    }
+
+}

+ 75 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/BeautyFilter.java

@@ -0,0 +1,75 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+/**
+ * Created by Yj on 2017/9/18.
+ */
+
+public class BeautyFilter extends Filter {
+
+    private int mGLaaCoef;
+    private int mGLmixCoef;
+    private int mGLiternum;
+
+
+    private float aaCoef;
+    private float mixCoef;
+    private int iternum;
+
+    public BeautyFilter(Resources resource) {
+        super(resource,"shader/beauty/beauty.vert", "shader/beauty/beauty.frag");
+        shaderNeedTextureSize(true);
+        setBeautyLevel(0);
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        mGLaaCoef=GLES20.glGetUniformLocation(mGLProgram,"uACoef");
+        mGLmixCoef=GLES20.glGetUniformLocation(mGLProgram,"uMixCoef");
+        mGLiternum=GLES20.glGetUniformLocation(mGLProgram,"uIternum");
+    }
+
+    public Filter setBeautyLevel(int level){
+        switch (level){
+            case 1:
+                a(1,0.19f,0.54f);
+                break;
+            case 2:
+                a(2,0.29f,0.54f);
+                break;
+            case 3:
+                a(3,0.17f,0.39f);
+                break;
+            case 4:
+                a(3,0.25f,0.54f);
+                break;
+            case 5:
+                a(4,0.13f,0.54f);
+                break;
+            case 6:
+                a(4,0.19f,0.69f);
+                break;
+            default:
+                a(0,0f,0f);
+                break;
+        }
+        return this;
+    }
+
+    private void a(int a,float b,float c){
+        this.iternum=a;
+        this.aaCoef=b;
+        this.mixCoef=c;
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform1f(mGLaaCoef,aaCoef);
+        GLES20.glUniform1f(mGLmixCoef,mixCoef);
+        GLES20.glUniform1i(mGLiternum,iternum);
+    }
+}

+ 120 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/ChooseFilter.java

@@ -0,0 +1,120 @@
+/*
+ *
+ * NoFilter.java
+ * 
+ * Created by Wuwang on 2016/10/17
+ */
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+/**
+ * 滤镜集合
+ */
+public class ChooseFilter extends Filter {
+
+	//滤镜选择代码
+	private int hChangeType;
+	private int hFilterCode = 0;
+
+	//复杂
+	private int width;
+	private int height;
+	private boolean needGLWH = false;
+	private int mGLWidth;
+	private int mGLHeight;
+	private boolean needTexelWH = false;
+	private float mTexelWidth;
+	private float mTexelHeight;
+	private int mUniformTexelWidthLocation;
+	private int mUniformTexelHeightLocation;
+
+	public ChooseFilter(Resources resource) {
+		super(resource, "shader/choose/choose.vert", "shader/choose/choose.frag");
+	}
+
+	@Override
+	protected void onCreate() {
+		super.onCreate();
+		hChangeType = GLES20.glGetUniformLocation(mGLProgram, "vChangeType");
+		mGLWidth = GLES20.glGetUniformLocation(mGLProgram, "uWidth");
+		mGLHeight = GLES20.glGetUniformLocation(mGLProgram, "uHeight");
+		mUniformTexelWidthLocation = GLES20.glGetUniformLocation(mGLProgram, "texelWidth");
+		mUniformTexelHeightLocation = GLES20.glGetUniformLocation(mGLProgram, "texelHeight");
+	}
+
+	@Override
+	protected void onSizeChanged(int width, int height) {
+		super.onSizeChanged(width, height);
+		this.width = width;
+		this.height = height;
+		setTextlSize(5.0f);
+	}
+
+	@Override
+	protected void onSetExpandData() {
+		super.onSetExpandData();
+		GLES20.glUniform1i(hChangeType, hFilterCode);
+		if (needGLWH) {
+			GLES20.glUniform1f(mGLWidth, width);
+			GLES20.glUniform1f(mGLHeight, height);
+		}
+		if (needTexelWH) {
+			GLES20.glUniform1f(mUniformTexelWidthLocation, mTexelWidth);
+			GLES20.glUniform1f(mUniformTexelHeightLocation, mTexelHeight);
+		}
+	}
+
+	/**
+	 * 设置滤镜类型
+	 *
+	 * @param code
+	 */
+	public void setChangeType(int code) {
+		this.hFilterCode = code;
+		switch (code) {
+			case FilterType.TOON:
+				needTexelWH = true;
+				setTextlSize(4.2f);
+				break;
+			case FilterType.CONVOLUTION:
+				needTexelWH = true;
+				setTextlSize(1.3f);
+				break;
+			case FilterType.SOBEL:
+				needGLWH = true;
+				break;
+			case FilterType.SKETCH:
+				needTexelWH = true;
+				setTextlSize(3.0f);
+				break;
+			default:
+				needTexelWH = false;
+				needGLWH = false;
+				break;
+		}
+	}
+
+	private void setTextlSize(float size) {
+		mTexelWidth = size / width;
+		mTexelHeight = size / height;
+	}
+
+	/**
+	 * 滤镜类型
+	 */
+	public static class FilterType {
+		public static final int NORMAL = 0;
+		public static final int COOL = 1;
+		public static final int WARM = 2;
+		public static final int GRAY = 3;
+		public static final int CAMEO = 4;
+		public static final int INVERT = 5;
+		public static final int SEPIA = 6;
+		public static final int TOON = 7;
+		public static final int CONVOLUTION = 8;
+		public static final int SOBEL = 9;
+		public static final int SKETCH = 10;
+	}
+}

+ 43 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DistortionFilter.java

@@ -0,0 +1,43 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+import com.joe.camera2recorddemo.OpenGL.TransUtil;
+import com.joe.camera2recorddemo.OpenGL.Transformation;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+/**
+ * 旋转,翻转,裁剪类滤镜
+ * Created by Yj on 2017/10/31.
+ */
+
+public class DistortionFilter extends Filter {
+
+	//旋转,翻转,裁剪变换类
+	private Transformation mTransformation;
+	private float[] mTextureCo;
+
+	public DistortionFilter(Resources resource) {
+		super(resource, "shader/base.vert","shader/base.frag");
+		initTransformation();
+	}
+
+	/**
+	 * 初始化变化类
+	 */
+	private void initTransformation() {
+		mTextureCo = MatrixUtils.getOriginalTextureCo();
+		if (mTransformation == null) {
+			mTransformation = new Transformation();
+		}
+	}
+
+	/**
+	 * 设置变化类
+	 * @param transformation
+	 */
+	public void setTransformation(Transformation transformation){
+		mTransformation = transformation;
+		setTextureCo(TransUtil.getTransformationCo(mTextureCo, mTransformation));
+	}
+}

+ 13 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/DrawFilter.java

@@ -0,0 +1,13 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+/**
+ * Created by Administrator on 2017/11/16.
+ */
+
+public class DrawFilter extends Filter {
+	public DrawFilter(Resources resource) {
+		super(resource, "", "");
+	}
+}

+ 216 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Filter.java

@@ -0,0 +1,216 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+
+import com.joe.camera2recorddemo.OpenGL.FrameBuffer;
+import com.joe.camera2recorddemo.OpenGL.Renderer;
+import com.joe.camera2recorddemo.Utils.GpuUtils;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+public abstract class Filter implements Renderer {
+
+    private float[] mVertexMatrix= MatrixUtils.getOriginalMatrix();
+    private float[] mTextureMatrix=MatrixUtils.getOriginalMatrix();
+
+    private float[] mVertexCo;
+
+    protected FloatBuffer mVertexBuffer;
+    protected FloatBuffer mTextureBuffer;
+
+    protected int mWidth;
+    protected int mHeight;
+
+    protected Resources mRes;
+    private String mVertex;
+    private String mFragment;
+
+    protected int mGLProgram;
+    protected int mGLVertexCo;
+    protected int mGLTextureCo;
+    protected int mGLVertexMatrix;
+    protected int mGLTextureMatrix;
+    protected int mGLTexture;
+
+    private int mGLWidth;
+    private int mGLHeight;
+    private boolean isUseSize=false;
+
+    private FrameBuffer mFrameTemp;
+
+    protected Filter(Resources resource,String vertex,String fragment){
+        this.mRes=resource;
+        this.mVertex=vertex;
+        this.mFragment=fragment;
+        mFrameTemp=new FrameBuffer();
+        initBuffer();
+    }
+
+    protected void initBuffer(){
+        ByteBuffer vertex=ByteBuffer.allocateDirect(32);
+        vertex.order(ByteOrder.nativeOrder());
+        mVertexBuffer=vertex.asFloatBuffer();
+        mVertexBuffer.put(MatrixUtils.getOriginalVertexCo());
+        mVertexBuffer.position(0);
+        ByteBuffer texture=ByteBuffer.allocateDirect(32);
+        texture.order(ByteOrder.nativeOrder());
+        mTextureBuffer=texture.asFloatBuffer();
+        mTextureBuffer.put(MatrixUtils.getOriginalTextureCo());
+        mTextureBuffer.position(0);
+    }
+
+    public void setVertexCo(float[] vertexCo){
+        mVertexCo = vertexCo;
+        mVertexBuffer.clear();
+        mVertexBuffer.put(vertexCo);
+        mVertexBuffer.position(0);
+    }
+
+    public void setTextureCo(float[] textureCo){
+        mTextureBuffer.clear();
+        mTextureBuffer.put(textureCo);
+        mTextureBuffer.position(0);
+    }
+
+    public void setVertexBuffer(FloatBuffer vertexBuffer){
+        this.mVertexBuffer=vertexBuffer;
+    }
+
+    public void setTextureBuffer(FloatBuffer textureBuffer){
+        this.mTextureBuffer=textureBuffer;
+    }
+
+    public void setVertexMatrix(float[] matrix){
+        this.mVertexMatrix=matrix;
+    }
+
+    public void setTextureMatrix(float[] matrix){
+        this.mTextureMatrix=matrix;
+    }
+
+    public float[] getVertexMatrix(){
+        return mVertexMatrix;
+    }
+
+    public float[] getTextureMatrix(){
+        return mTextureMatrix;
+    }
+
+    public float[] getVertexCo() { return mVertexCo;}
+
+    protected void shaderNeedTextureSize(boolean need){
+        this.isUseSize=need;
+    }
+
+    protected void onCreate(){
+        if(mRes!=null){
+            mGLProgram= GpuUtils.createGLProgramByAssetsFile(mRes,mVertex,mFragment);
+        }else{
+            mGLProgram= GpuUtils.createGLProgram(mVertex,mFragment);
+        }
+        mGLVertexCo=GLES20.glGetAttribLocation(mGLProgram,"aVertexCo");
+        mGLTextureCo=GLES20.glGetAttribLocation(mGLProgram,"aTextureCo");
+        mGLVertexMatrix=GLES20.glGetUniformLocation(mGLProgram,"uVertexMatrix");
+        mGLTextureMatrix=GLES20.glGetUniformLocation(mGLProgram,"uTextureMatrix");
+        mGLTexture=GLES20.glGetUniformLocation(mGLProgram,"uTexture");
+
+        if(isUseSize){
+            mGLWidth=GLES20.glGetUniformLocation(mGLProgram,"uWidth");
+            mGLHeight=GLES20.glGetUniformLocation(mGLProgram,"uHeight");
+        }
+    }
+
+    protected void onSizeChanged(int width,int height){
+
+    }
+
+    @Override
+    public final void create() {
+        if(mVertex!=null&&mFragment!=null){
+            onCreate();
+        }
+    }
+
+    @Override
+    public void sizeChanged(int width, int height) {
+        onSizeChanged(width, height);
+        this.mWidth=width;
+        this.mHeight=height;
+
+        mFrameTemp.destroyFrameBuffer();
+    }
+
+    @Override
+    public void draw(int texture) {
+        onClear();
+        onUseProgram();
+        onSetExpandData();
+        onBindTexture(texture);
+        onDraw();
+    }
+
+    public int drawToTexture(int texture){
+        mFrameTemp.bindFrameBuffer(mWidth,mHeight);
+        onClear();
+        onUseProgram();
+        MatrixUtils.flip(mVertexMatrix,false,true);
+        onSetExpandData();
+        MatrixUtils.flip(mVertexMatrix,false,true);
+        onBindTexture(texture);
+        onDraw();
+        mFrameTemp.unBindFrameBuffer();
+        return mFrameTemp.getCacheTextureId();
+    }
+
+    @Override
+    public void destroy() {
+        mFrameTemp.destroyFrameBuffer();
+        GLES20.glDeleteProgram(mGLProgram);
+    }
+
+    protected void onUseProgram(){
+        GLES20.glUseProgram(mGLProgram);
+    }
+
+    protected void onDraw(){
+        GLES20.glEnableVertexAttribArray(mGLVertexCo);
+        GLES20.glVertexAttribPointer(mGLVertexCo,2, GLES20.GL_FLOAT, false, 0,mVertexBuffer);
+        GLES20.glEnableVertexAttribArray(mGLTextureCo);
+        GLES20.glVertexAttribPointer(mGLTextureCo, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer);
+        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4);
+        GLES20.glDisableVertexAttribArray(mGLVertexCo);
+        GLES20.glDisableVertexAttribArray(mGLTextureCo);
+    }
+
+    protected void onClear(){
+        GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
+    }
+
+    /**
+     * 设置其他扩展数据
+     */
+    protected void onSetExpandData(){
+        GLES20.glUniformMatrix4fv(mGLVertexMatrix,1,false,mVertexMatrix,0);
+        GLES20.glUniformMatrix4fv(mGLTextureMatrix,1,false,mTextureMatrix,0);
+        if(isUseSize){
+            GLES20.glUniform1f(mGLWidth,mWidth);
+            GLES20.glUniform1f(mGLHeight,mHeight);
+        }
+    }
+
+    /**
+     * 绑定默认纹理
+     */
+    protected void onBindTexture(int textureId){
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,textureId);
+        GLES20.glUniform1i(mGLTexture,0);
+    }
+
+}

+ 113 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/GroupFilter.java

@@ -0,0 +1,113 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+import java.util.Iterator;
+import java.util.Vector;
+
+/**
+ * Created by Yj on 2017/9/24 0024.
+ */
+
+public class GroupFilter extends BaseFilter {
+
+    private Vector<Filter> mGroup;
+    private Vector<Filter> mTempGroup;
+
+    public GroupFilter(Resources resource) {
+        super(resource);
+    }
+
+    public GroupFilter(){
+        super();
+    }
+
+    @Override
+    protected void initBuffer() {
+        super.initBuffer();
+        mGroup=new Vector<>();
+        mTempGroup=new Vector<>();
+    }
+
+    public synchronized void addFilter(Filter filter){
+        mGroup.add(filter);
+        mTempGroup.add(filter);
+    }
+
+    public synchronized void addFilter(int index,Filter filter){
+        mGroup.add(index, filter);
+        mTempGroup.add(filter);
+    }
+
+    public synchronized Filter removeFilter(int index){
+        return mGroup.remove(index);
+    }
+
+    public boolean removeFilter(Filter filter){
+        return mGroup.remove(filter);
+    }
+
+    public synchronized Filter element(int index){
+        return mGroup.elementAt(index);
+    }
+
+    public synchronized Iterator<Filter> iterator(){
+        return mGroup.iterator();
+    }
+
+    public synchronized boolean isEmpty(){
+        return mGroup.isEmpty();
+    }
+
+    @Override
+    protected synchronized void onCreate() {
+        super.onCreate();
+        for (Filter filter : mGroup) {
+            filter.create();
+        }
+        mTempGroup.clear();
+    }
+
+    private void tempFilterInit(int width,int height){
+        for (Filter filter : mTempGroup) {
+            filter.create();
+            filter.sizeChanged(width, height);
+        }
+        mTempGroup.removeAllElements();
+    }
+
+    @Override
+    protected synchronized void onSizeChanged(int width, int height) {
+        super.onSizeChanged(width, height);
+        for (Filter filter : mGroup) {
+            filter.sizeChanged(width, height);
+        }
+    }
+
+    @Override
+    public void draw(int texture) {
+        if(mTempGroup.size()>0){
+            tempFilterInit(mWidth, mHeight);
+        }
+        int tempTextureId=texture;
+        for (int i=0;i<mGroup.size();i++){
+            Filter filter=mGroup.get(i);
+            tempTextureId=filter.drawToTexture(tempTextureId);
+        }
+        super.draw(tempTextureId);
+    }
+
+    @Override
+    public int drawToTexture(int texture) {
+        if(mTempGroup.size()>0){
+            tempFilterInit(mWidth, mHeight);
+        }
+        int tempTextureId=texture;
+        for (int i=0;i<mGroup.size();i++){
+            Filter filter=mGroup.get(i);
+            tempTextureId=filter.drawToTexture(tempTextureId);
+        }
+        return super.drawToTexture(tempTextureId);
+    }
+
+}

+ 34 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/Mp4EditFilter.java

@@ -0,0 +1,34 @@
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.content.res.Resources;
+
+/**
+ * 综合滤镜
+ * Created by Yj on 2017/11/1.
+ */
+
+public class Mp4EditFilter extends GroupFilter {
+	private ChooseFilter chooseFilter;
+	private DistortionFilter distortionFilter;
+
+	public Mp4EditFilter(Resources resource) {
+		super(resource);
+	}
+
+	@Override
+	protected void initBuffer() {
+		super.initBuffer();
+		chooseFilter = new ChooseFilter(mRes);
+		distortionFilter = new DistortionFilter(mRes);
+		addFilter(chooseFilter);
+		addFilter(distortionFilter);
+	}
+
+	public ChooseFilter getChooseFilter() {
+		return chooseFilter;
+	}
+
+	public DistortionFilter getDistortionFilter() {
+		return distortionFilter;
+	}
+}

+ 48 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Filter/OesFilter.java

@@ -0,0 +1,48 @@
+/*
+ * Created by Wuwang on 2017/9/11
+ * Copyright © 2017年 深圳哎吖科技. All rights reserved.
+ */
+package com.joe.camera2recorddemo.OpenGL.Filter;
+
+import android.annotation.TargetApi;
+import android.content.res.Resources;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+
+@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
+public class OesFilter extends Filter {
+
+    public OesFilter(Resources resource) {
+        super(resource,"shader/oes.vert", "shader/oes.frag");
+    }
+
+    public OesFilter(){
+        super(null,"attribute vec4 aVertexCo;\n" +
+                "attribute vec2 aTextureCo;\n" +
+                "\n" +
+                "uniform mat4 uVertexMatrix;\n" +
+                "uniform mat4 uTextureMatrix;\n" +
+                "\n" +
+                "varying vec2 vTextureCo;\n" +
+                "\n" +
+                "void main(){\n" +
+                "    gl_Position = uVertexMatrix*aVertexCo;\n" +
+                "    vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;\n" +
+                "}",
+                "#extension GL_OES_EGL_image_external : require\n" +
+                "precision mediump float;\n" +
+                "varying vec2 vTextureCo;\n" +
+                "uniform samplerExternalOES uTexture;\n" +
+                "void main() {\n" +
+                "    gl_FragColor = texture2D( uTexture, vTextureCo);\n" +
+                "}");
+    }
+
+    @Override
+    protected void onBindTexture(int textureId) {
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,textureId);
+        GLES20.glUniform1i(mGLTexture,0);
+    }
+}

+ 87 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/FrameBuffer.java

@@ -0,0 +1,87 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.opengl.GLES20;
+
+/**
+ * Created by Yj on 2017/9/13.
+ */
+
+public class FrameBuffer {
+
+    private int[] mFrameTemp;
+
+    public int bindFrameBuffer(int width,int height){
+        if(mFrameTemp==null){
+            mFrameTemp=new int[3];
+            GLES20.glGenFramebuffers(1,mFrameTemp,0);
+            GLES20.glGenTextures(1,mFrameTemp,1);
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,mFrameTemp[1]);
+            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0,GLES20.GL_RGBA, width, height,
+                    0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+            //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
+            //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
+            //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE);
+            //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE);
+
+            GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
+            GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+                    GLES20.GL_TEXTURE_2D, mFrameTemp[1], 0);
+        }else{
+            GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
+            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameTemp[0]);
+        }
+        return GLES20.glGetError();
+    }
+
+    public void createFrameBuffer(int width,int height,int tex_type,int tex_format,
+                                  int min_params,int max_params,int wrap_s,int wrap_t){
+        mFrameTemp=new int[3];
+        GLES20.glGenFramebuffers(1,mFrameTemp,0);
+        GLES20.glGenTextures(1,mFrameTemp,1);
+        GLES20.glBindTexture(tex_type,mFrameTemp[1]);
+        GLES20.glTexImage2D(tex_type, 0,tex_format, width, height,
+                0, tex_format, GLES20.GL_UNSIGNED_BYTE, null);
+        //设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
+        GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_MIN_FILTER,min_params);
+        //设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
+        GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_MAG_FILTER,max_params);
+        //设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
+        GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_WRAP_S,wrap_s);
+        //设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
+        GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_WRAP_T,wrap_t);
+
+        GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
+        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+                tex_type, mFrameTemp[1], 0);
+    }
+
+    public int bindFrameBuffer(){
+        if(mFrameTemp==null)return -1;
+        GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
+        return GLES20.glGetError();
+    }
+
+    public void unBindFrameBuffer(){
+        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[2]);
+    }
+
+    public int getCacheTextureId(){
+        return mFrameTemp!=null?mFrameTemp[1]:-1;
+    }
+
+    public void destroyFrameBuffer(){
+        if(mFrameTemp!=null){
+            GLES20.glDeleteFramebuffers(1,mFrameTemp,0);
+            GLES20.glDeleteTextures(1,mFrameTemp,1);
+            mFrameTemp=null;
+        }
+    }
+
+}

+ 415 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MP4Edior.java

@@ -0,0 +1,415 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.Log;
+import android.util.Size;
+import android.view.Surface;
+
+import com.joe.camera2recorddemo.MediaCodecUtil.TrackUtils;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.Semaphore;
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class MP4Edior {
+
+	//============================  OpenGL =========================
+
+	private SurfaceTexture mInputTexture;
+	private Surface mOutputSurface;
+	//    private EGLHelper mEncodeEGLHelper;
+	private EGLHelper mShowEGLHelper;
+
+	private boolean mGLThreadFlag = false;
+	private Thread mGLThread;
+	private WrapRenderer mRenderer;
+	private Semaphore mSem;
+	private int mInputTextureId;
+
+	private int mPreviewWidth = -1;
+	private int mPreviewHeight = -1;
+	private int mInputWidth = -1;
+	private int mInputHeight = -1;
+
+	private final Object VIDEO_LOCK = new Object();
+	private final Object REC_LOCK = new Object();
+
+	//===========================  MeidaCodec ========================
+
+	private static final String TAG = "VideoToFrames";
+	private static final long DEFAULT_TIMEOUT_US = 10000;
+
+	private final int decodeColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
+
+	MediaExtractor extractor = null;
+	MediaCodec decoder = null;
+	MediaFormat mediaFormat;
+
+	private boolean isLoop = false;//是否循环播放
+	private boolean isStop = false;//是否停止
+	private String videoFilePath;
+
+	private Size mSize;//输入视频的尺寸
+
+	public MP4Edior() {
+		mShowEGLHelper = new EGLHelper();
+		mSem = new Semaphore(0);
+	}
+
+	public Surface createInputSurfaceTexture() {
+		mInputTextureId = mShowEGLHelper.createTextureID();
+		mInputTexture = new SurfaceTexture(mInputTextureId);
+		new Handler(Looper.getMainLooper()).post(new Runnable() {
+			@Override
+			public void run() {
+				mInputTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+					@Override
+					public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+						mSem.release();
+					}
+				});
+			}
+		});
+		return new Surface(mInputTexture);
+	}
+
+	public void setOutputSurface(Surface surface, int width, int height) {
+		this.mOutputSurface = surface;
+		this.mPreviewWidth = width;
+		this.mPreviewHeight = height;
+	}
+
+	public void setRenderer(Renderer renderer) {
+		mRenderer = new WrapRenderer(renderer);
+	}
+
+	/**
+	 * 开始预览
+	 */
+	public void startPreview() {
+		synchronized (REC_LOCK) {
+			mSem.drainPermits();
+			mGLThreadFlag = true;
+			mGLThread = new Thread(mGLRunnable);
+			mGLThread.start();
+		}
+	}
+
+	/**
+	 * 停止预览
+	 *
+	 * @throws InterruptedException
+	 */
+	public void stopPreview() throws InterruptedException {
+		synchronized (REC_LOCK) {
+			mGLThreadFlag = false;
+			mSem.release();
+			if (mGLThread != null && mGLThread.isAlive()) {
+				mGLThread.join();
+				mGLThread = null;
+			}
+			Log.d("C2D", "CameraRecorder stopPreview");
+		}
+	}
+
+	private Runnable mGLRunnable = new Runnable() {
+		@Override
+		public void run() {
+			if (mOutputSurface == null) {
+				Log.e("C2D", "CameraRecorder GLThread exit : outputSurface==null");
+				return;
+			}
+			if (mPreviewWidth <= 0 || mPreviewHeight <= 0) {
+				Log.e("C2D", "CameraRecorder GLThread exit : Preview Size==0");
+				return;
+			}
+			mShowEGLHelper.setSurface(mOutputSurface);
+			boolean ret = mShowEGLHelper.createGLES(mPreviewWidth, mPreviewHeight);
+			if (!ret) {
+				Log.e("C2D", "CameraRecorder GLThread exit : createGLES failed");
+				return;
+			}
+			if (mRenderer == null) {
+				mRenderer = new WrapRenderer(null);
+			}
+			mRenderer.setFlag(WrapRenderer.TYPE_SURFACE);
+			mRenderer.create();
+			int[] t = new int[1];
+			GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, t, 0);
+			mRenderer.sizeChanged(mPreviewWidth, mPreviewHeight);
+			GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, t[0]);
+			while (mGLThreadFlag) {
+				try {
+					mSem.acquire();
+				} catch (InterruptedException e) {
+					e.printStackTrace();
+				}
+				if (mGLThreadFlag) {
+					mInputTexture.updateTexImage();
+					mInputTexture.getTransformMatrix(mRenderer.getTextureMatrix());
+					synchronized (VIDEO_LOCK) {
+						GLES20.glViewport(0, 0, mPreviewWidth, mPreviewHeight);
+						mRenderer.draw(mInputTextureId);
+						mShowEGLHelper.swapBuffers();
+					}
+				}
+			}
+			mShowEGLHelper.destroyGLES();
+		}
+	};
+
+	/**
+	 * =====================================    MediaCodec   ===============================
+	 */
+
+	/**
+	 * 解码器初始化
+	 *
+	 * @param videoFilePath
+	 */
+	public void decodePrepare(String videoFilePath) {
+		this.videoFilePath = videoFilePath;
+		extractor = null;
+		decoder = null;
+		try {
+			File videoFile = new File(videoFilePath);
+			extractor = new MediaExtractor();
+			extractor.setDataSource(videoFile.toString());
+			int trackIndex = TrackUtils.selectVideoTrack(extractor);
+			if (trackIndex < 0) {
+				throw new RuntimeException("No video track found in " + videoFilePath);
+			}
+			extractor.selectTrack(trackIndex);
+			mediaFormat = extractor.getTrackFormat(trackIndex);
+			String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
+			decoder = MediaCodec.createDecoderByType(mime);
+			if (isColorFormatSupported(decodeColorFormat, decoder.getCodecInfo().getCapabilitiesForType(mime))) {
+				mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, decodeColorFormat);
+				Log.i(TAG, "set decode color format to type " + decodeColorFormat);
+			} else {
+				Log.i(TAG, "unable to set decode color format, color format type " + decodeColorFormat + " not supported");
+			}
+
+			//获取宽高信息
+			int rotation = mediaFormat.containsKey(MediaFormat.KEY_ROTATION) ? mediaFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
+			if (rotation == 90 || rotation == 270) {
+				mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+				mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+			} else {
+				mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+				mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+			}
+			mSize = new Size(mInputWidth, mInputHeight);
+
+			//设置
+			mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+			mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+			mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2500000);
+			decoder.configure(mediaFormat, createInputSurfaceTexture(), null, 0);
+			decoder.start();
+		} catch (IOException ioe) {
+			throw new RuntimeException("failed init encoder", ioe);
+		}
+	}
+
+	public void close() {
+		try {
+			if (decoder != null) {
+				decoder.stop();
+				decoder.release();
+			}
+
+			if (extractor != null) {
+				extractor.release();
+				extractor = null;
+			}
+		} catch (IllegalStateException e) {
+			e.printStackTrace();
+		}
+	}
+
+	/**
+	 * 外部调用开始解码
+	 */
+	public void excuate() {
+		try {
+			decodeFramesToImage(decoder, extractor, mediaFormat);
+		} finally {
+			close();
+			if (isLoop && !isStop) {
+				decodePrepare(videoFilePath);
+				excuate();
+			}
+		}
+
+	}
+
+	/**
+	 * 设置是否循环
+	 *
+	 * @param isLoop
+	 */
+	public void setLoop(boolean isLoop) {
+		this.isLoop = isLoop;
+	}
+
+	/**
+	 * 检查是否支持的色彩格式
+	 *
+	 * @param colorFormat
+	 * @param caps
+	 * @return
+	 */
+	private boolean isColorFormatSupported(int colorFormat, MediaCodecInfo.CodecCapabilities caps) {
+		for (int c : caps.colorFormats) {
+			if (c == colorFormat) {
+				return true;
+			}
+		}
+		return false;
+	}
+
+	/**
+	 * 开始解码
+	 *
+	 * @param decoder
+	 * @param extractor
+	 * @param mediaFormat
+	 */
+	public void decodeFramesToImage(MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat) {
+		MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+		boolean sawInputEOS = false;
+		boolean sawOutputEOS = false;
+
+		long startMs = System.currentTimeMillis();
+		while (!sawOutputEOS && !isStop) {
+			if (!sawInputEOS) {
+				int inputBufferId = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
+				if (inputBufferId >= 0) {
+					ByteBuffer inputBuffer = decoder.getInputBuffer(inputBufferId);
+					int sampleSize = extractor.readSampleData(inputBuffer, 0); //将一部分视频数据读取到inputbuffer中,大小为sampleSize
+					if (sampleSize < 0) {
+						decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+						sawInputEOS = true;
+					} else {
+						long presentationTimeUs = extractor.getSampleTime();
+						Log.v(TAG, "presentationTimeUs:" + presentationTimeUs);
+						decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
+						extractor.advance();  //移动到视频文件的下一个地址
+					}
+				}
+			}
+			int outputBufferId = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
+			if (outputBufferId >= 0) {
+				if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+					sawOutputEOS = true;
+				}
+				boolean doRender = (info.size != 0);
+				if (doRender) {
+					sleepRender(info, startMs);//延迟解码
+					decoder.releaseOutputBuffer(outputBufferId, true);
+				}
+			}
+		}
+	}
+
+	/**
+	 * 停止解码播放
+	 */
+	public void stop() {
+		isStop = true;
+	}
+
+	/**
+	 * 开始解码播放
+	 */
+	public void start() {
+		isStop = false;
+	}
+
+	/**
+	 * 获取视频尺寸
+	 *
+	 * @return 视频尺寸
+	 */
+	public Size getSize() {
+		return mSize;
+	}
+
+	/**
+	 * 延迟解码,按帧播放
+	 */
+	private void sleepRender(MediaCodec.BufferInfo audioBufferInfo, long startMs) {
+		while (audioBufferInfo.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
+			try {
+				Thread.sleep(10);
+			} catch (InterruptedException e) {
+				e.printStackTrace();
+				break;
+			}
+		}
+	}
+
+	/**
+	 * 设置变换类型
+	 *
+	 * @param transformation
+	 */
+	public void setTransformation(Transformation transformation) {
+		float[] vms = mRenderer.getmFilter().getVertexMatrix();
+		if (transformation.getScaleType() == MatrixUtils.TYPE_CENTERINSIDE) {
+			if (transformation.getRotation() == 90 || transformation.getRotation() == 270) {
+				MatrixUtils.getMatrix(vms, MatrixUtils.TYPE_CENTERINSIDE, transformation.getInputSize().getWidth(), transformation.getInputSize().getHeight()
+						, transformation.getOutputSize().getHeight(), transformation.getOutputSize().getWidth());
+			} else {
+				MatrixUtils.getMatrix(vms, MatrixUtils.TYPE_CENTERINSIDE, transformation.getInputSize().getHeight(), transformation.getInputSize().getWidth()
+						, transformation.getOutputSize().getHeight(), transformation.getOutputSize().getWidth());
+			}
+		}
+
+		//设置旋转
+		if (transformation.getRotation() != 0) {
+			MatrixUtils.rotation(vms, transformation.getRotation());
+		}
+
+		//设置裁剪
+		if (transformation.getCropRect() != null) {
+			float[] vtCo = new float[8];
+			MatrixUtils.crop(vtCo,transformation.getCropRect().x,transformation.getCropRect().y
+					,transformation.getCropRect().width,transformation.getCropRect().height);
+			mRenderer.getmFilter().setTextureCo(vtCo);
+		}
+
+		//设置翻转
+		if (transformation.getFlip() != Transformation.FLIP_NONE) {
+			switch (transformation.getFlip()) {
+				case Transformation.FLIP_HORIZONTAL:
+					MatrixUtils.flip(vms, true, false);
+					break;
+				case Transformation.FLIP_VERTICAL:
+					MatrixUtils.flip(vms, false, true);
+					break;
+				case Transformation.FLIP_HORIZONTAL_VERTICAL:
+					MatrixUtils.flip(vms, true, true);
+					break;
+				default:
+					break;
+			}
+		}
+
+		//设置投影矩阵
+		mRenderer.getmFilter().setVertexMatrix(vms);
+	}
+}

+ 59 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/AdjustFilter.java

@@ -0,0 +1,59 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.ChooseFilter;
+import com.joe.camera2recorddemo.OpenGL.Filter.DistortionFilter;
+import com.joe.camera2recorddemo.OpenGL.Filter.GroupFilter;
+
+/**
+ * 调整滤镜
+ * Created by Yj on 2018/6/14.
+ */
+
+public class AdjustFilter extends GroupFilter {
+	private ContrastFilter contrastFilter;
+	private BrightnessFilter brightnessFilter;
+	private SaturationFilter saturationFilter;
+	private VignetteFilter vignetteFilter;
+	private DistortionFilter distortionFilter;
+
+	public AdjustFilter(Resources resource) {
+		super(resource);
+	}
+
+	@Override
+	protected void initBuffer() {
+		super.initBuffer();
+		contrastFilter = new ContrastFilter(mRes);
+		brightnessFilter = new BrightnessFilter(mRes);
+		saturationFilter = new SaturationFilter(mRes);
+		distortionFilter = new DistortionFilter(mRes);
+		vignetteFilter = new VignetteFilter(mRes);
+		addFilter(contrastFilter);
+		addFilter(brightnessFilter);
+		addFilter(saturationFilter);
+		addFilter(vignetteFilter);
+		addFilter(distortionFilter);
+	}
+
+	public ContrastFilter getContrastFilter() {
+		return contrastFilter;
+	}
+
+	public BrightnessFilter getBrightnessFilter() {
+		return brightnessFilter;
+	}
+
+	public SaturationFilter getSaturationFilter() {
+		return saturationFilter;
+	}
+
+	public DistortionFilter getDistortionFilter() {
+		return distortionFilter;
+	}
+
+	public VignetteFilter getVignetteFilter() {
+		return vignetteFilter;
+	}
+}

+ 35 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/BrightnessFilter.java

@@ -0,0 +1,35 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+
+/**
+ * 亮度滤镜
+ */
+public class BrightnessFilter extends Filter {
+
+    private int brightnessType;
+    private float brightnessCode = 0.0f;
+
+    public BrightnessFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/mh/brightness.frag");
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        brightnessType = GLES20.glGetUniformLocation(mGLProgram, "brightness");
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform1f(brightnessType, brightnessCode);
+    }
+
+    public void setBrightnessCode(float brightnessCode) {
+        this.brightnessCode = brightnessCode;
+    }
+}

+ 35 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/ContrastFilter.java

@@ -0,0 +1,35 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+
+/**
+ * 对比度滤镜
+ */
+public class ContrastFilter extends Filter {
+
+    private int contrastType;
+    private float contrastCode = 1.0f;
+
+    public ContrastFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/mh/contrast.frag");
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        contrastType = GLES20.glGetUniformLocation(mGLProgram, "stepcv");
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform1f(contrastType, contrastCode);
+    }
+
+    public void setContrastCode(float contrastCode) {
+        this.contrastCode = contrastCode;
+    }
+}

+ 35 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/SaturationFilter.java

@@ -0,0 +1,35 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+
+/**
+ * 饱和度滤镜
+ */
+public class SaturationFilter extends Filter {
+
+    private int saturationType;
+    private float saturationCode = 1.0f;
+
+    public SaturationFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/mh/saturation.frag");
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        saturationType = GLES20.glGetUniformLocation(mGLProgram, "saturation");
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform1f(saturationType, saturationCode);
+    }
+
+    public void setSaturationCode(float saturationCode) {
+        this.saturationCode = saturationCode;
+    }
+}

+ 57 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/MhFilter/VignetteFilter.java

@@ -0,0 +1,57 @@
+package com.joe.camera2recorddemo.OpenGL.MhFilter;
+
+import android.content.res.Resources;
+import android.graphics.PointF;
+import android.opengl.GLES20;
+
+import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
+
+import java.nio.FloatBuffer;
+
+/**
+ * 暗角滤镜
+ */
+public class VignetteFilter extends Filter {
+
+    private int mVignetteCenterLocation;
+    private PointF mVignetteCenter = new PointF();
+    private int mVignetteColorLocation;
+    private float[] mVignetteColor = new float[] {0.0f, 0.0f, 0.0f};
+    private int mVignetteStartLocation;
+    private float mVignetteStart = 0.75f;
+    private int mVignetteEndLocation;
+    private float mVignetteEnd = 0.75f;
+    private float[] vec2 = new float[2];
+
+    public VignetteFilter(Resources resource) {
+        super(resource,"shader/base.vert","shader/mh/vignette.frag");
+        vec2[0] = mVignetteCenter.x;
+        vec2[1] = mVignetteCenter.y;
+    }
+
+    @Override
+    protected void onCreate() {
+        super.onCreate();
+        mVignetteCenterLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteCenter");
+        mVignetteColorLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteColor");
+        mVignetteStartLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteStart");
+        mVignetteEndLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteEnd");
+    }
+
+    @Override
+    protected void onSetExpandData() {
+        super.onSetExpandData();
+        GLES20.glUniform2fv(mVignetteCenterLocation,1,vec2,0);
+        GLES20.glUniform3fv(mVignetteColorLocation,1, FloatBuffer.wrap(mVignetteColor));
+        GLES20.glUniform1f(mVignetteStartLocation, mVignetteStart);
+        GLES20.glUniform1f(mVignetteEndLocation, mVignetteEnd);
+    }
+
+    public void setmVignetteStart(float mVignetteStart) {
+        this.mVignetteStart = mVignetteStart;
+    }
+
+    public void setmVignetteEnd(float mVignetteEnd) {
+        this.mVignetteEnd = mVignetteEnd;
+    }
+}

+ 582 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Mp4Processor.java

@@ -0,0 +1,582 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.MediaMetadataRetriever;
+import android.media.MediaMuxer;
+import android.os.Build;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.Surface;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.Semaphore;
+
+
+/**
+ * MP4处理工具,暂时只用于处理图像。
+ * 4.4的手机不支持video/mp4v-es格式的视频流,MediaMuxer混合无法stop,5.0以上可以
+ */
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class Mp4Processor {
+
+	private final int TIME_OUT = 1000;
+
+	private String mInputPath;                  //输入路径
+	private String mOutputPath;                 //输出路径
+
+	private MediaCodec mVideoDecoder;           //视频解码器
+	private MediaCodec mVideoEncoder;           //视频编码器
+	//private MediaCodec mAudioDecoder;           //音频解码器
+	//private MediaCodec mAudioEncoder;           //音频编码器
+	private MediaExtractor mExtractor;          //音视频分离器
+	private MediaMuxer mMuxer;                  //音视频混合器
+	private EGLHelper mEGLHelper;               //GL环境创建的帮助类
+	private MediaCodec.BufferInfo mVideoDecoderBufferInfo;  //用于存储当前帧的视频解码信息
+	//private MediaCodec.BufferInfo mAudioDecoderBufferInfo;  //用于存储当前帧的音频解码信息
+	private MediaCodec.BufferInfo mVideoEncoderBufferInfo;  //用于存储当前帧的视频编码信息
+	private MediaCodec.BufferInfo mAudioEncoderBufferInfo;  //用于纯粹当前帧的音频编码信息
+
+	private int mAudioEncoderTrack = -1;     //解码音轨
+	private int mVideoEncoderTrack = -1;     //解码视轨
+	private int mAudioDecoderTrack = -1;     //编码音轨
+	private int mVideoDecoderTrack = -1;     //编码视轨
+
+	//private String mAudioMime;
+	//private String mVideoMime;
+
+	private int mInputVideoWidth = 0;     //输入视频的宽度
+	private int mInputVideoHeight = 0;    //输入视频的高度
+
+	private int mOutputVideoWidth = 0;    //输出视频的宽度
+	private int mOutputVideoHeight = 0;   //输出视频的高度
+	private int mVideoTextureId;        //原始视频图像的纹理
+	private SurfaceTexture mVideoSurfaceTexture;    //用于接收原始视频的解码的图像流
+
+	private boolean isRenderToWindowSurface;        //是否渲染到用户设置的WindowBuffer上,用于测试
+	private Surface mOutputSurface;                 //视频输出的Surface
+
+	private Thread mDecodeThread;
+	private Thread mGLThread;
+	private boolean mCodecFlag = false;
+	private boolean isVideoExtractorEnd = false;
+	private boolean isAudioExtractorEnd = false;
+	private boolean isStarted = false;
+	private WrapRenderer mRenderer;
+	private boolean mGLThreadFlag = false;
+	private Semaphore mSem;
+	private Semaphore mDecodeSem;
+
+	private final Object Extractor_LOCK = new Object();
+	private final Object MUX_LOCK = new Object();
+	private final Object PROCESS_LOCK = new Object();
+
+	private OnProgressListener mProgressListener;
+
+	private boolean isUserWantToStop = false;
+	private long mVideoStopTimeStamp = 0;     //视频停止时的时间戳,用于外部主动停止处理时,音频截取
+
+	private long mTotalVideoTime = 0;     //视频的总时长
+
+	private int filterRotation = 0;//滤镜的旋转角度
+
+	public Mp4Processor() {
+		mEGLHelper = new EGLHelper();
+		mVideoDecoderBufferInfo = new MediaCodec.BufferInfo();
+		//mAudioDecoderBufferInfo=new MediaCodec.BufferInfo();
+		mVideoEncoderBufferInfo = new MediaCodec.BufferInfo();
+		mAudioEncoderBufferInfo = new MediaCodec.BufferInfo();
+	}
+
+
+	/**
+	 * 设置用于处理的MP4文件
+	 *
+	 * @param path 文件路径
+	 */
+	public void setInputPath(String path) {
+		this.mInputPath = path;
+	}
+
+	/**
+	 * 设置处理后的mp4存储的位置
+	 *
+	 * @param path 文件路径
+	 */
+	public void setOutputPath(String path) {
+		this.mOutputPath = path;
+	}
+
+	/**
+	 * 设置滤镜的旋转角度
+	 *
+	 * @param rotation
+	 */
+	public void setFilterRotation(int rotation) {
+		this.filterRotation = rotation;
+	}
+
+	/**
+	 * 设置直接渲染到指定的Surface上,测试用
+	 *
+	 * @param surface 渲染的位置
+	 */
+	public void setOutputSurface(Surface surface) {
+		this.mOutputSurface = surface;
+		this.isRenderToWindowSurface = surface != null;
+	}
+
+	/**
+	 * 设置用户处理接口
+	 *
+	 * @param renderer 处理接口
+	 */
+	public void setRenderer(Renderer renderer) {
+		mRenderer = new WrapRenderer(renderer);
+	}
+
+	public int getVideoSurfaceTextureId() {
+		return mVideoTextureId;
+	}
+
+	public SurfaceTexture getVideoSurfaceTexture() {
+		return mVideoSurfaceTexture;
+	}
+
+	/**
+	 * 设置输出Mp4的图像大小,默认为输出大小
+	 *
+	 * @param width  视频图像宽度
+	 * @param height 视频图像高度
+	 */
+	public void setOutputSize(int width, int height) {
+		this.mOutputVideoWidth = width;
+		this.mOutputVideoHeight = height;
+	}
+
+	public void setOnCompleteListener(OnProgressListener listener) {
+		this.mProgressListener = listener;
+	}
+
+	private boolean prepare() throws IOException {
+		//todo 获取视频旋转信息,并做出相应处理
+		synchronized (PROCESS_LOCK) {
+			int videoRotation = 0;
+			MediaMetadataRetriever mMetRet = new MediaMetadataRetriever();
+			mMetRet.setDataSource(mInputPath);
+			mExtractor = new MediaExtractor();
+			mExtractor.setDataSource(mInputPath);
+			int count = mExtractor.getTrackCount();
+			//解析Mp4
+			for (int i = 0; i < count; i++) {
+				MediaFormat format = mExtractor.getTrackFormat(i);
+				String mime = format.getString(MediaFormat.KEY_MIME);
+				Log.d("Mp4Processor", "extractor format-->" + mExtractor.getTrackFormat(i));
+				if (mime.startsWith("audio")) {
+					mAudioDecoderTrack = i;
+					//todo 暂时不对音频处理,后续需要对音频处理时再修改这个
+					/*mAudioDecoder=MediaCodec.createDecoderByType(mime);
+					mAudioDecoder.configure(format,null,null,0);
+                    if(!isRenderToWindowSurface){
+                        Log.e("wuwang", format.toString());
+                        MediaFormat audioFormat=MediaFormat.createAudioFormat(mime,
+                                format.getInteger(MediaFormat.KEY_SAMPLE_RATE),
+                                format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
+                        audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,
+                                format.getInteger(MediaFormat.KEY_AAC_PROFILE));
+                        audioFormat.setInteger(MediaFormat.KEY_BIT_RATE,
+                                Integer.valueOf(mMetRet.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE)));
+                        mAudioEncoder=MediaCodec.createEncoderByType(mime);
+                        mAudioEncoder.configure(audioFormat,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
+                    }*/
+				} else if (mime.startsWith("video")) {
+					//5.0以下,不能解析mp4v-es //todo 5.0以上也可能存在问题,目前还不知道原因
+//                    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP&&mime.equals(MediaFormat.MIMETYPE_VIDEO_MPEG4)) {
+//                        return false;
+//                    }
+					mVideoDecoderTrack = i;
+					mTotalVideoTime = Long.valueOf(mMetRet.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION));
+					String rotation = mMetRet.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
+					if (rotation != null) {
+						videoRotation = Integer.valueOf(rotation);
+					}
+					if (videoRotation == 90 || videoRotation == 270) {
+						mInputVideoHeight = format.getInteger(MediaFormat.KEY_WIDTH);
+						mInputVideoWidth = format.getInteger(MediaFormat.KEY_HEIGHT);
+					} else {
+						mInputVideoWidth = format.getInteger(MediaFormat.KEY_WIDTH);
+						mInputVideoHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
+					}
+					Log.e("Mp4Processor", "createDecoder");
+					mVideoDecoder = MediaCodec.createDecoderByType(mime);
+					Log.e("Mp4Processor", "createDecoder end");
+					mVideoTextureId = mEGLHelper.createTextureID();
+					mVideoSurfaceTexture = new SurfaceTexture(mVideoTextureId);
+					mVideoSurfaceTexture.setOnFrameAvailableListener(mFrameAvaListener);
+					mVideoDecoder.configure(format, new Surface(mVideoSurfaceTexture), null, 0);
+					if (!isRenderToWindowSurface) {
+						if (mOutputVideoWidth == 0 || mOutputVideoHeight == 0) {
+							mOutputVideoWidth = mInputVideoWidth;
+							mOutputVideoHeight = mInputVideoHeight;
+						}
+						//判断滤镜旋转角度
+						if(filterRotation == 90 || filterRotation == 270){
+							int temp = mOutputVideoWidth;
+							mOutputVideoWidth = mOutputVideoHeight;
+							mOutputVideoHeight = temp;
+						}
+						Log.v("Mp4ProcessorHH","w:"+mOutputVideoWidth+";h:"+mOutputVideoHeight+";r:"+filterRotation);
+						MediaFormat videoFormat = MediaFormat.createVideoFormat(/*mime*/"video/avc", mOutputVideoWidth, mOutputVideoHeight);
+						videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+						videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, mOutputVideoHeight * mOutputVideoWidth * 5);
+						videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 24);
+						videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+//						videoFormat.setInteger(MediaFormat.KEY_ROTATION,0);
+						mVideoEncoder = MediaCodec.createEncoderByType(/*mime*/"video/avc");
+						mVideoEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+						mOutputSurface = mVideoEncoder.createInputSurface();
+						Bundle bundle = new Bundle();
+						if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
+							bundle.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, mOutputVideoHeight * mOutputVideoWidth * 5);
+							mVideoEncoder.setParameters(bundle);
+						}
+					}
+				}
+			}
+			if (!isRenderToWindowSurface) {
+				//如果用户没有设置渲染到指定Surface,就需要导出视频,暂时不对音频做处理
+				mMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+//                mMuxer.setOrientationHint(videoRotation);
+				Log.d("Mp4Processor", "video rotation:" + videoRotation);
+				//如果mp4中有音轨
+				if (mAudioDecoderTrack >= 0) {
+					MediaFormat format = mExtractor.getTrackFormat(mAudioDecoderTrack);
+					Log.d("Mp4Processor", "audio track-->" + format.toString());
+
+					mAudioEncoderTrack = mMuxer.addTrack(format);
+				}
+			}
+		}
+		return true;
+	}
+
+	public boolean start() throws IOException {
+		synchronized (PROCESS_LOCK) {
+			if (!isStarted) {
+				if (!prepare()) {
+					Log.e("Mp4Processor", "prepare failed");
+					return false;
+				}
+
+				isUserWantToStop = false;
+
+				isVideoExtractorEnd = false;
+				isVideoExtractorEnd = false;
+				mGLThreadFlag = true;
+				mVideoDecoder.start();
+				//mAudioDecoder.start();
+				if (!isRenderToWindowSurface) {
+					//mAudioEncoder.start();
+					mVideoEncoder.start();
+				}
+
+				mGLThread = new Thread(new Runnable() {
+					@Override
+					public void run() {
+						glRunnable();
+					}
+				});
+				mGLThread.start();
+
+				mCodecFlag = true;
+				mDecodeThread = new Thread(new Runnable() {
+					@Override
+					public void run() {
+						//视频处理
+						if (mVideoDecoderTrack >= 0) {
+							Log.d("Mp4Processor", "videoDecodeStep start");
+							while (mCodecFlag && !videoDecodeStep()) ;
+							Log.d("Mp4Processor", "videoDecodeStep end");
+							mGLThreadFlag = false;
+							try {
+								mSem.release();
+								mGLThread.join();
+							} catch (InterruptedException e) {
+								e.printStackTrace();
+							}
+						}
+						//将原视频中的音频复制到新视频中
+						if (mAudioDecoderTrack >= 0 && mVideoEncoderTrack >= 0) {
+							ByteBuffer buffer = ByteBuffer.allocate(1024 * 32);
+							while (mCodecFlag && !audioDecodeStep(buffer)) ;
+							buffer.clear();
+						}
+
+						Log.d("Mp4Processor", "codec thread_finish");
+						mCodecFlag = false;
+						avStop();
+						//todo 判断是用户取消了的情况
+						if (mProgressListener != null) {
+							mProgressListener.onComplete(mOutputPath);
+						}
+					}
+				});
+				mDecodeThread.start();
+				isStarted = true;
+			}
+		}
+		return true;
+	}
+
+	/**
+	 * 等待解码线程执行完毕,异步线程同步等待
+	 */
+	public void waitProcessFinish() throws InterruptedException {
+		if (mDecodeThread != null && mDecodeThread.isAlive()) {
+			mDecodeThread.join();
+		}
+	}
+
+	private boolean audioDecodeStep(ByteBuffer buffer) {
+		boolean isTimeEnd = false;
+		buffer.clear();
+		synchronized (Extractor_LOCK) {
+			mExtractor.selectTrack(mAudioDecoderTrack);
+			int length = mExtractor.readSampleData(buffer, 0);
+			if (length != -1) {
+				int flags = mExtractor.getSampleFlags();
+				mAudioEncoderBufferInfo.size = length;
+				mAudioEncoderBufferInfo.flags = flags;
+				mAudioEncoderBufferInfo.presentationTimeUs = mExtractor.getSampleTime();
+				mAudioEncoderBufferInfo.offset = 0;
+				isTimeEnd = mExtractor.getSampleTime() >= mVideoStopTimeStamp;
+				mMuxer.writeSampleData(mAudioEncoderTrack, buffer, mAudioEncoderBufferInfo);
+			}
+			isAudioExtractorEnd = !mExtractor.advance();
+		}
+		return isAudioExtractorEnd || isTimeEnd;
+	}
+
+	//视频解码到SurfaceTexture上,以供后续处理。返回值为是否是最后一帧视频
+	private boolean videoDecodeStep() {
+		int mInputIndex = mVideoDecoder.dequeueInputBuffer(TIME_OUT);
+		if (mInputIndex >= 0) {
+			ByteBuffer buffer = getInputBuffer(mVideoDecoder, mInputIndex);
+			buffer.clear();
+			synchronized (Extractor_LOCK) {
+				mExtractor.selectTrack(mVideoDecoderTrack);
+				int ret = mExtractor.readSampleData(buffer, 0);
+				if (ret != -1) {
+					mVideoStopTimeStamp = mExtractor.getSampleTime();
+					Log.d("Mp4Processor", "mVideoStopTimeStamp:" + mVideoStopTimeStamp);
+					mVideoDecoder.queueInputBuffer(mInputIndex, 0, ret, mVideoStopTimeStamp, mExtractor.getSampleFlags());
+				}
+				isVideoExtractorEnd = !mExtractor.advance();
+			}
+		}
+		while (true) {
+			int mOutputIndex = mVideoDecoder.dequeueOutputBuffer(mVideoDecoderBufferInfo, TIME_OUT);
+			if (mOutputIndex >= 0) {
+				try {
+					Log.d("Mp4Processor", " mDecodeSem.acquire ");
+					mSem.release();
+					if (!isUserWantToStop) {
+						mDecodeSem.acquire();
+					}
+					Log.d("Mp4Processor", " mDecodeSem.acquire end ");
+				} catch (InterruptedException e) {
+					e.printStackTrace();
+				}
+				mVideoDecoder.releaseOutputBuffer(mOutputIndex, true);
+			} else if (mOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+				//MediaFormat format=mVideoDecoder.getOutputFormat();
+			} else if (mOutputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
+				break;
+			}
+		}
+		return isVideoExtractorEnd || isUserWantToStop;
+	}
+
+	private boolean videoEncodeStep(boolean isEnd) {
+		if (isEnd) {
+			mVideoEncoder.signalEndOfInputStream();
+		}
+		while (true) {
+			int mOutputIndex = mVideoEncoder.dequeueOutputBuffer(mVideoEncoderBufferInfo, TIME_OUT);
+			Log.d("Mp4Processor", "videoEncodeStep-------------------mOutputIndex=" + mOutputIndex + "/" + mVideoEncoderBufferInfo.presentationTimeUs);
+			if (mOutputIndex >= 0) {
+				ByteBuffer buffer = getOutputBuffer(mVideoEncoder, mOutputIndex);
+				if (mVideoEncoderBufferInfo.size > 0) {
+					mMuxer.writeSampleData(mVideoEncoderTrack, buffer, mVideoEncoderBufferInfo);
+				}
+				mVideoEncoder.releaseOutputBuffer(mOutputIndex, false);
+			} else if (mOutputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+				MediaFormat format = mVideoEncoder.getOutputFormat();
+				Log.d("Mp4Processor", "video format -->" + format.toString());
+				mVideoEncoderTrack = mMuxer.addTrack(format);
+				mMuxer.start();
+				synchronized (MUX_LOCK) {
+					MUX_LOCK.notifyAll();
+				}
+			} else if (mOutputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
+				break;
+			}
+		}
+		return false;
+	}
+
+	private void glRunnable() {
+		mSem = new Semaphore(0);
+		mDecodeSem = new Semaphore(0);
+		mEGLHelper.setSurface(mOutputSurface);
+		boolean ret = mEGLHelper.createGLES(mOutputVideoWidth, mOutputVideoHeight);
+		if (!ret) return;
+		if (mRenderer == null) {
+			mRenderer = new WrapRenderer(null);
+		}
+		mRenderer.setFlag(WrapRenderer.TYPE_MOVE);
+		mRenderer.create();
+		mRenderer.sizeChanged(mOutputVideoWidth, mOutputVideoHeight);
+		while (mGLThreadFlag) {
+			try {
+				Log.d("Mp4Processor", " mSem.acquire ");
+				mSem.acquire();
+				Log.d("Mp4Processor", " mSem.acquire end");
+			} catch (InterruptedException e) {
+				e.printStackTrace();
+			}
+			if (mGLThreadFlag) {
+				mVideoSurfaceTexture.updateTexImage();
+				//todo 带有rotation的视频,还需要处理
+				mVideoSurfaceTexture.getTransformMatrix(mRenderer.getTextureMatrix());
+				mRenderer.draw(mVideoTextureId);
+				mEGLHelper.setPresentationTime(mVideoDecoderBufferInfo.presentationTimeUs * 1000);
+				if (!isRenderToWindowSurface) {
+					videoEncodeStep(false);
+				}
+				mEGLHelper.swapBuffers();
+			}
+			if (mProgressListener != null) {
+				mProgressListener.onProgress(getTotalVideoTime() * 1000L, mVideoDecoderBufferInfo.presentationTimeUs);
+			}
+			mDecodeSem.release();
+		}
+		if (!isRenderToWindowSurface) {
+			videoEncodeStep(true);
+		}
+		mRenderer.destroy();
+		mEGLHelper.destroyGLES();
+	}
+
+	public long getPresentationTime() {
+		return mVideoDecoderBufferInfo.presentationTimeUs * 1000;
+	}
+
+	public long getTotalVideoTime() {
+		return mTotalVideoTime;
+	}
+
+	private SurfaceTexture.OnFrameAvailableListener mFrameAvaListener = new SurfaceTexture.OnFrameAvailableListener() {
+		@Override
+		public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+			Log.e("Mp4Processor", "mSem.release ");
+//            mSem.release();
+		}
+	};
+
+	private void avStop() {
+		if (isStarted) {
+			if (mVideoDecoder != null) {
+				mVideoDecoder.stop();
+				mVideoDecoder.release();
+				mVideoDecoder = null;
+			}
+			if (!isRenderToWindowSurface && mVideoEncoder != null) {
+				mVideoEncoder.stop();
+				mVideoEncoder.release();
+				mVideoEncoder = null;
+			}
+			if (!isRenderToWindowSurface) {
+				if (mMuxer != null && mVideoEncoderTrack >= 0) {
+					try {
+						mMuxer.stop();
+					} catch (IllegalStateException e) {
+						e.printStackTrace();
+					}
+				}
+				if (mMuxer != null) {
+					try {
+						mMuxer.release();
+					} catch (IllegalStateException e) {
+						e.printStackTrace();
+					}
+					mMuxer = null;
+				}
+			}
+			if (mExtractor != null) {
+				mExtractor.release();
+			}
+			isStarted = false;
+			mVideoEncoderTrack = -1;
+			mVideoDecoderTrack = -1;
+			mAudioEncoderTrack = -1;
+			mAudioDecoderTrack = -1;
+		}
+	}
+
+	public boolean stop() throws InterruptedException {
+		synchronized (PROCESS_LOCK) {
+			if (isStarted) {
+				if (mCodecFlag) {
+					mDecodeSem.release();
+					isUserWantToStop = true;
+					if (mDecodeThread != null && mDecodeThread.isAlive()) {
+						Log.d("Mp4Processor", "try to stop decode thread");
+						mDecodeThread.join();
+						Log.d("Mp4Processor", "decode thread stoped");
+					}
+					isUserWantToStop = false;
+				}
+			}
+		}
+		return true;
+	}
+
+	public boolean release() throws InterruptedException {
+		synchronized (PROCESS_LOCK) {
+			if (mCodecFlag) {
+				stop();
+			}
+		}
+		return true;
+	}
+
+	private ByteBuffer getInputBuffer(MediaCodec codec, int index) {
+		if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+			return codec.getInputBuffer(index);
+		} else {
+			return codec.getInputBuffers()[index];
+		}
+	}
+
+	private ByteBuffer getOutputBuffer(MediaCodec codec, int index) {
+		if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+			return codec.getOutputBuffer(index);
+		} else {
+			return codec.getOutputBuffers()[index];
+		}
+	}
+
+	public interface OnProgressListener {
+		void onProgress(long max, long current);
+
+		void onComplete(String path);
+	}
+
+}

+ 13 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Renderer.java

@@ -0,0 +1,13 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+public interface Renderer {
+
+    void create();
+
+    void sizeChanged(int width, int height);
+
+    void draw(int texture);
+
+    void destroy();
+
+}

+ 180 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/TransUtil.java

@@ -0,0 +1,180 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+
+/**
+ * Created by Yj on 2017/10/30.
+ * 变换的帮助类
+ */
+
+public class TransUtil {
+
+	public static float[] textureCoords;
+	/**
+	 * 获得变换后的数据
+	 * @param tc 				原始数据
+	 * @param transformation	变化类型
+	 * @return
+	 */
+	public static float[] getTransformationCo(float[] tc,final Transformation transformation) {
+		textureCoords = tc;
+		if (transformation.getCropRect() != null) {
+			resolveCrop(transformation.getCropRect().x, transformation.getCropRect().y,
+					transformation.getCropRect().width, transformation.getCropRect().height);
+		} else {
+			resolveCrop(Transformation.FULL_RECT.x, Transformation.FULL_RECT.y,
+					Transformation.FULL_RECT.width, Transformation.FULL_RECT.height);
+		}
+		resolveFlip(transformation.getFlip());
+		resolveRotate(transformation.getRotation());
+		return textureCoords;
+	}
+
+	private static void resolveCrop(float x, float y, float width, float height) {
+		float minX = x;
+		float minY = y;
+		float maxX = minX + width;
+		float maxY = minY + height;
+
+		// left bottom
+		textureCoords[0] = minX;
+		textureCoords[1] = minY;
+		// right bottom
+		textureCoords[2] = maxX;
+		textureCoords[3] = minY;
+		// left top
+		textureCoords[4] = minX;
+		textureCoords[5] = maxY;
+		// right top
+		textureCoords[6] = maxX;
+		textureCoords[7] = maxY;
+	}
+
+	private static void resolveFlip(int flip) {
+		switch (flip) {
+			case Transformation.FLIP_HORIZONTAL:
+				swap(textureCoords, 0, 2);
+				swap(textureCoords, 4, 6);
+				break;
+			case Transformation.FLIP_VERTICAL:
+				swap(textureCoords, 1, 5);
+				swap(textureCoords, 3, 7);
+				break;
+			case Transformation.FLIP_HORIZONTAL_VERTICAL:
+				swap(textureCoords, 0, 2);
+				swap(textureCoords, 4, 6);
+
+				swap(textureCoords, 1, 5);
+				swap(textureCoords, 3, 7);
+				break;
+			case Transformation.FLIP_NONE:
+			default:
+				break;
+		}
+	}
+
+	private static void resolveRotate(int rotation) {
+		float x, y;
+		switch (rotation) {
+			case 90:
+				x = textureCoords[0];
+				y = textureCoords[1];
+				textureCoords[0] = textureCoords[4];
+				textureCoords[1] = textureCoords[5];
+				textureCoords[4] = textureCoords[6];
+				textureCoords[5] = textureCoords[7];
+				textureCoords[6] = textureCoords[2];
+				textureCoords[7] = textureCoords[3];
+				textureCoords[2] = x;
+				textureCoords[3] = y;
+				break;
+			case 180:
+				swap(textureCoords, 0, 6);
+				swap(textureCoords, 1, 7);
+				swap(textureCoords, 2, 4);
+				swap(textureCoords, 3, 5);
+				break;
+			case 270:
+				x = textureCoords[0];
+				y = textureCoords[1];
+				textureCoords[0] = textureCoords[2];
+				textureCoords[1] = textureCoords[3];
+				textureCoords[2] = textureCoords[6];
+				textureCoords[3] = textureCoords[7];
+				textureCoords[6] = textureCoords[4];
+				textureCoords[7] = textureCoords[5];
+				textureCoords[4] = x;
+				textureCoords[5] = y;
+				break;
+			case 0:
+			default:
+				break;
+		}
+	}
+
+//	/**
+//	 * 缩放变换
+//	 * @param vertices 		顶点坐标系
+//	 * @param inputWidth 	输入宽度
+//	 * @param inputHeight 	输入高度
+//	 * @param outputWidth	输出宽度
+//	 * @param outputHeight	输出高度
+//	 * @param scaleType		缩放类型
+//	 * @return
+//	 */
+//	public static float[] resolveScale(float[] vertices,int inputWidth, int inputHeight, int outputWidth, int outputHeight,
+//							  int scaleType) {
+//		if (scaleType == Transformation.SCALE_TYPE_FIT_XY) {
+//			// The default is FIT_XY
+//			return vertices;
+//		}
+//
+//		// Note: scale type need to be implemented by adjusting
+//		// the vertices (not textureCoords).
+//		if (inputWidth * outputHeight == inputHeight * outputWidth) {
+//			// Optional optimization: If input w/h aspect is the same as output's,
+//			// there is no need to adjust vertices at all.
+//			return vertices;
+//		}
+//
+//		float inputAspect = inputWidth / (float) inputHeight;
+//		float outputAspect = outputWidth / (float) outputHeight;
+//
+//		if (scaleType == Transformation.SCALE_TYPE_CENTER_CROP) {
+//			if (inputAspect < outputAspect) {
+//				float heightRatio = outputAspect / inputAspect;
+//				vertices[1] *= heightRatio;
+//				vertices[3] *= heightRatio;
+//				vertices[5] *= heightRatio;
+//				vertices[7] *= heightRatio;
+//			} else {
+//				float widthRatio = inputAspect / outputAspect;
+//				vertices[0] *= widthRatio;
+//				vertices[2] *= widthRatio;
+//				vertices[4] *= widthRatio;
+//				vertices[6] *= widthRatio;
+//			}
+//		} else if (scaleType == Transformation.SCALE_TYPE_CENTER_INSIDE) {
+//			if (inputAspect < outputAspect) {
+//				float widthRatio = inputAspect / outputAspect;
+//				vertices[0] *= widthRatio;
+//				vertices[2] *= widthRatio;
+//				vertices[4] *= widthRatio;
+//				vertices[6] *= widthRatio;
+//			} else {
+//				float heightRatio = outputAspect / inputAspect;
+//				vertices[1] *= heightRatio;
+//				vertices[3] *= heightRatio;
+//				vertices[5] *= heightRatio;
+//				vertices[7] *= heightRatio;
+//			}
+//		}
+//		return vertices;
+//	}
+
+
+	private static void swap(float[] arr, int index1, int index2) {
+		float temp = arr[index1];
+		arr[index1] = arr[index2];
+		arr[index2] = temp;
+	}
+}

+ 88 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/Transformation.java

@@ -0,0 +1,88 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+import android.util.Size;
+
+/**
+ * Created by Yj on 2017/10/30.
+ * 图形旋转,翻转,缩放,裁剪类
+ */
+
+public class Transformation {
+    public static final Rect FULL_RECT = new Rect(0, 0, 1, 1);
+
+    public static final int FLIP_NONE = 2001;
+    public static final int FLIP_HORIZONTAL = 2002;
+    public static final int FLIP_VERTICAL = 2003;
+    public static final int FLIP_HORIZONTAL_VERTICAL = 2004;
+
+    private Rect cropRect = FULL_RECT;
+    private int flip = FLIP_NONE;
+    private int rotation = 0;
+    private Size inputSize;
+    private Size outputSize;
+    private int scaleType = 0;
+
+    public void setCrop(Rect cropRect) {
+        this.cropRect = cropRect;
+    }
+
+    public void setFlip(int flip) {
+        this.flip = flip;
+    }
+
+    public void setRotation(int rotation) {
+        this.rotation = rotation;
+    }
+
+    public void setInputSize(Size inputSize) {
+        this.inputSize = inputSize;
+    }
+
+    public void setOutputSize(Size outputSize) {
+        this.outputSize = outputSize;
+    }
+
+    public Rect getCropRect() {
+        return cropRect;
+    }
+
+    public int getFlip() {
+        return flip;
+    }
+
+    public int getRotation() {
+        return rotation;
+    }
+
+    public Size getInputSize() {
+        return inputSize;
+    }
+
+    public Size getOutputSize() {
+        return outputSize;
+    }
+
+    public int getScaleType() {
+        return scaleType;
+    }
+
+    public void setScale(Size inputSize, Size outputSize, int scaleType) {
+        this.inputSize = inputSize;
+        this.outputSize = outputSize;
+        this.scaleType = scaleType;
+    }
+
+    public static class Rect {
+        final float x;
+        final float y;
+        final float width;
+        final float height;
+
+        public Rect(final float x, final float y, final float width, final float height) {
+            this.x = x;
+            this.y = y;
+            this.width = width;
+            this.height = height;
+        }
+    }
+}

+ 80 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/OpenGL/WrapRenderer.java

@@ -0,0 +1,80 @@
+package com.joe.camera2recorddemo.OpenGL;
+
+
+import com.joe.camera2recorddemo.OpenGL.Filter.OesFilter;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+/**
+ * Created by aiya on 2017/9/12.
+ */
+
+class WrapRenderer implements Renderer{
+
+    private Renderer mRenderer;
+    private OesFilter mFilter;
+    private FrameBuffer mFrameBuffer;
+
+    public static final int TYPE_MOVE=0;
+    public static final int TYPE_CAMERA=1;
+    public static final int TYPE_SURFACE=2;
+
+    public WrapRenderer(Renderer renderer){
+        this.mRenderer=renderer;
+        mFrameBuffer=new FrameBuffer();
+        mFilter=new OesFilter();
+        if(renderer!=null){
+            MatrixUtils.flip(mFilter.getVertexMatrix(),false,true);
+        }
+    }
+
+    public OesFilter getmFilter() {
+        return mFilter;
+    }
+
+    public void setFlag(int flag){
+        if(flag==TYPE_SURFACE){
+            mFilter.setVertexCo(MatrixUtils.getSurfaceVertexCo());
+        }else if(flag==TYPE_CAMERA){
+            mFilter.setVertexCo(MatrixUtils.getCameraVertexCo());
+        }else if(flag==TYPE_MOVE){
+            mFilter.setVertexCo(MatrixUtils.getMoveVertexCo());
+        }
+    }
+
+    public float[] getTextureMatrix(){
+        return mFilter.getTextureMatrix();
+    }
+
+    @Override
+    public void create() {
+        mFilter.create();
+        if(mRenderer!=null){
+            mRenderer.create();
+        }
+    }
+
+    @Override
+    public void sizeChanged(int width, int height) {
+        mFilter.sizeChanged(width, height);
+        if(mRenderer!=null){
+            mRenderer.sizeChanged(width, height);
+        }
+    }
+
+    @Override
+    public void draw(int texture) {
+        if(mRenderer!=null){
+            mRenderer.draw(mFilter.drawToTexture(texture));
+        }else{
+            mFilter.draw(texture);
+        }
+    }
+
+    @Override
+    public void destroy() {
+        if(mRenderer!=null){
+            mRenderer.destroy();
+        }
+        mFilter.destroy();
+    }
+}

+ 150 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraParamUtil.java

@@ -0,0 +1,150 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.util.Log;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+public class CameraParamUtil {
+    private static final String TAG = "JCameraView";
+    private static CameraParamUtil cameraParamUtil = null;
+    private CameraSizeComparator sizeComparator = new CameraSizeComparator();
+
+    private CameraParamUtil() {
+
+    }
+
+    public static CameraParamUtil getInstance() {
+        if (cameraParamUtil == null) {
+            cameraParamUtil = new CameraParamUtil();
+            return cameraParamUtil;
+        } else {
+            return cameraParamUtil;
+        }
+    }
+
+    public Camera.Size getPreviewSize(List<Camera.Size> list, int th, float rate) {
+        Collections.sort(list, sizeComparator);
+        int i = 0;
+        for (Camera.Size s : list) {
+            if ((s.width > th) && equalRate(s, rate)) {
+                Log.i(TAG, "MakeSure Preview :w = " + s.width + " h = " + s.height);
+                break;
+            }
+            i++;
+        }
+        if (i == list.size()) {
+            return getBestSize(list, rate);
+        } else {
+            return list.get(i);
+        }
+    }
+
+    public Camera.Size getPictureSize(List<Camera.Size> list, int th, float rate) {
+        Collections.sort(list, sizeComparator);
+        int i = 0;
+        for (Camera.Size s : list) {
+            if ((s.width > th) && equalRate(s, rate)) {
+                Log.i(TAG, "MakeSure Picture :w = " + s.width + " h = " + s.height);
+                break;
+            }
+            i++;
+        }
+        if (i == list.size()) {
+            return getBestSize(list, rate);
+        } else {
+            return list.get(i);
+        }
+    }
+
+    private Camera.Size getBestSize(List<Camera.Size> list, float rate) {
+        float previewDisparity = 100;
+        int index = 0;
+        for (int i = 0; i < list.size(); i++) {
+            Camera.Size cur = list.get(i);
+            float prop = (float) cur.width / (float) cur.height;
+            if (Math.abs(rate - prop) < previewDisparity) {
+                previewDisparity = Math.abs(rate - prop);
+                index = i;
+            }
+        }
+        return list.get(index);
+    }
+
+
+    private boolean equalRate(Camera.Size s, float rate) {
+        float r = (float) (s.width) / (float) (s.height);
+        return Math.abs(r - rate) <= 0.2;
+    }
+
+    public boolean isSupportedFocusMode(List<String> focusList, String focusMode) {
+        for (int i = 0; i < focusList.size(); i++) {
+            if (focusMode.equals(focusList.get(i))) {
+                Log.i(TAG, "FocusMode supported " + focusMode);
+                return true;
+            }
+        }
+        Log.i(TAG, "FocusMode not supported " + focusMode);
+        return false;
+    }
+
+    public boolean isSupportedPictureFormats(List<Integer> supportedPictureFormats, int jpeg) {
+        for (int i = 0; i < supportedPictureFormats.size(); i++) {
+            if (jpeg == supportedPictureFormats.get(i)) {
+                Log.i(TAG, "Formats supported " + jpeg);
+                return true;
+            }
+        }
+        Log.i(TAG, "Formats not supported " + jpeg);
+        return false;
+    }
+
+    public int getCameraDisplayOrientation(Context context, int cameraId) {
+        Camera.CameraInfo info = new Camera.CameraInfo();
+        Camera.getCameraInfo(cameraId, info);
+        WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+        int rotation = wm.getDefaultDisplay().getRotation();
+        int degrees = 0;
+        switch (rotation) {
+            case Surface.ROTATION_0:
+                degrees = 0;
+                break;
+            case Surface.ROTATION_90:
+                degrees = 90;
+                break;
+            case Surface.ROTATION_180:
+                degrees = 180;
+                break;
+            case Surface.ROTATION_270:
+                degrees = 270;
+                break;
+        }
+        int result;
+        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+            result = (info.orientation + degrees) % 360;
+            result = (360 - result) % 360;   // compensate the mirror
+        } else {
+            // back-facing
+            result = (info.orientation - degrees + 360) % 360;
+        }
+        return result;
+    }
+
+    private class CameraSizeComparator implements Comparator<Camera.Size> {
+        public int compare(Camera.Size lhs, Camera.Size rhs) {
+            if (lhs.width == rhs.width) {
+                return 0;
+            } else if (lhs.width > rhs.width) {
+                return 1;
+            } else {
+                return -1;
+            }
+        }
+
+    }
+}

+ 47 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/CameraUtils.java

@@ -0,0 +1,47 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.app.Activity;
+import android.hardware.Camera;
+import android.view.Surface;
+
+/**
+ * Created by Administrator on 2017/10/10.
+ */
+
+public class CameraUtils {
+    /**
+     * 解决前置摄像头上下颠倒的问题
+     *
+     * @param cameraId
+     * @param camera
+     */
+    public static void setCameraDisplayOrientation(Activity activity, int cameraId, Camera camera) {
+        Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+        Camera.getCameraInfo(cameraId, info);
+        int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
+        int degrees = 0;
+        switch (rotation) {
+            case Surface.ROTATION_0:
+                degrees = 90;
+                break;
+            case Surface.ROTATION_90:
+                degrees = 180;
+                break;
+            case Surface.ROTATION_180:
+                degrees = 270;
+                break;
+            case Surface.ROTATION_270:
+                degrees = 0;
+                break;
+        }
+
+        int result;
+        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+            result = (info.orientation + degrees) % 360;
+            result = (360 - result) % 360;  // compensate the mirror
+        } else {  // back-facing
+            result = (info.orientation - degrees + 360) % 360;
+        }
+        camera.setDisplayOrientation(result);
+    }
+}

+ 87 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/FormatUtils.java

@@ -0,0 +1,87 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.util.Size;
+
+import com.joe.camera2recorddemo.MediaCodecUtil.TrackUtils;
+
+/**
+ * 获取视频信息类
+ * Created by Administrator on 2017/12/6.
+ */
+
+public class FormatUtils {
+
+    /**
+     * 获取视频尺寸
+     *
+     * @param url 视频地址
+     * @return 返回视频尺寸
+     */
+    public static Size getVideoSize(String url) {
+        int mInputHeight = 0, mInputWidth = 0;
+        MediaExtractor extractor = new MediaExtractor();
+        try {
+            extractor.setDataSource(url);
+            int trackIndex = TrackUtils.selectVideoTrack(extractor);
+            if (trackIndex < 0) {
+                throw new RuntimeException("No video track found in " + url);
+            }
+            extractor.selectTrack(trackIndex);
+            MediaFormat mediaFormat = extractor.getTrackFormat(trackIndex);
+            //获取宽高信息
+            int rotation = mediaFormat.containsKey(MediaFormat.KEY_ROTATION) ? mediaFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
+            if (rotation == 90 || rotation == 270) {
+                mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+                mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+            } else {
+                mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+                mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return new Size(mInputWidth, mInputHeight);
+    }
+
+    /**
+     * 获取视频信息
+     *
+     * @param url
+     * @return
+     */
+    public static VideoFormat getVideoFormat(String url) {
+        VideoFormat videoFormat = new VideoFormat();
+        videoFormat.height = 0;
+        videoFormat.width = 0;
+        MediaExtractor extractor = new MediaExtractor();
+        try {
+            extractor.setDataSource(url);
+            int trackIndex = TrackUtils.selectVideoTrack(extractor);
+            if (trackIndex < 0) {
+                throw new RuntimeException("No video track found in " + url);
+            }
+            extractor.selectTrack(trackIndex);
+            MediaFormat mediaFormat = extractor.getTrackFormat(trackIndex);
+            //获取宽高信息
+            videoFormat.rotation = mediaFormat.containsKey(MediaFormat.KEY_ROTATION) ? mediaFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
+            if (videoFormat.rotation == 90 || videoFormat.rotation == 270) {
+                videoFormat.height = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+                videoFormat.width = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+            } else {
+                videoFormat.width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
+                videoFormat.height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return videoFormat;
+    }
+
+    public static class VideoFormat {
+        public int width;
+        public int height;
+        public int rotation;
+    }
+}

+ 133 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GetPathFromUri4kitkat.java

@@ -0,0 +1,133 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.annotation.SuppressLint;
+import android.content.ContentUris;
+import android.content.Context;
+import android.database.Cursor;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Environment;
+import android.provider.DocumentsContract;
+import android.provider.MediaStore;
+
+public class GetPathFromUri4kitkat {
+
+    /**
+     * 专为Android4.4设计的从Uri获取文件绝对路径,以前的方法已不好使
+     */
+    @SuppressLint("NewApi")
+    public static String getPath(final Context context, final Uri uri) {
+
+        final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
+
+        // DocumentProvider
+        if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
+            // ExternalStorageProvider
+            if (isExternalStorageDocument(uri)) {
+                final String docId = DocumentsContract.getDocumentId(uri);
+                final String[] split = docId.split(":");
+                final String type = split[0];
+
+                if ("primary".equalsIgnoreCase(type)) {
+                    return Environment.getExternalStorageDirectory() + "/" + split[1];
+                }
+
+                // TODO handle non-primary volumes
+            }
+            // DownloadsProvider
+            else if (isDownloadsDocument(uri)) {
+
+                final String id = DocumentsContract.getDocumentId(uri);
+                final Uri contentUri = ContentUris.withAppendedId(
+                        Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
+
+                return getDataColumn(context, contentUri, null, null);
+            }
+            // MediaProvider
+            else if (isMediaDocument(uri)) {
+                final String docId = DocumentsContract.getDocumentId(uri);
+                final String[] split = docId.split(":");
+                final String type = split[0];
+
+                Uri contentUri = null;
+                if ("image".equals(type)) {
+                    contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
+                } else if ("video".equals(type)) {
+                    contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
+                } else if ("audio".equals(type)) {
+                    contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
+                }
+
+                final String selection = "_id=?";
+                final String[] selectionArgs = new String[]{split[1]};
+
+                return getDataColumn(context, contentUri, selection, selectionArgs);
+            }
+        }
+        // MediaStore (and general)
+        else if ("content".equalsIgnoreCase(uri.getScheme())) {
+            return getDataColumn(context, uri, null, null);
+        }
+        // File
+        else if ("file".equalsIgnoreCase(uri.getScheme())) {
+            return uri.getPath();
+        }
+
+        return null;
+    }
+
+    /**
+     * Get the value of the data column for this Uri. This is useful for
+     * MediaStore Uris, and other file-based ContentProviders.
+     *
+     * @param context       The context.
+     * @param uri           The Uri to query.
+     * @param selection     (Optional) Filter used in the query.
+     * @param selectionArgs (Optional) Selection arguments used in the query.
+     * @return The value of the _data column, which is typically a file path.
+     */
+    public static String getDataColumn(Context context, Uri uri, String selection,
+                                       String[] selectionArgs) {
+
+        Cursor cursor = null;
+        final String column = "_data";
+        final String[] projection = {column};
+
+        try {
+            cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,
+                    null);
+            if (cursor != null && cursor.moveToFirst()) {
+                final int column_index = cursor.getColumnIndexOrThrow(column);
+                return cursor.getString(column_index);
+            }
+        } finally {
+            if (cursor != null)
+                cursor.close();
+        }
+        return null;
+    }
+
+    /**
+     * @param uri The Uri to check.
+     * @return Whether the Uri authority is ExternalStorageProvider.
+     */
+    public static boolean isExternalStorageDocument(Uri uri) {
+        return "com.android.externalstorage.documents".equals(uri.getAuthority());
+    }
+
+    /**
+     * @param uri The Uri to check.
+     * @return Whether the Uri authority is DownloadsProvider.
+     */
+    public static boolean isDownloadsDocument(Uri uri) {
+        return "com.android.providers.downloads.documents".equals(uri.getAuthority());
+    }
+
+    /**
+     * @param uri The Uri to check.
+     * @return Whether the Uri authority is MediaProvider.
+     */
+    public static boolean isMediaDocument(Uri uri) {
+        return "com.android.providers.media.documents".equals(uri.getAuthority());
+    }
+}

+ 106 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/GpuUtils.java

@@ -0,0 +1,106 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.content.res.Resources;
+import android.opengl.GLES20;
+import android.util.Log;
+
+import java.io.InputStream;
+
+public enum GpuUtils {
+    ;
+
+    /**
+     * 读取Assets中的文本文件
+     *
+     * @param mRes res
+     * @param path 文件路径
+     * @return 文本内容
+     */
+    public static String readText(Resources mRes, String path) {
+        StringBuilder result = new StringBuilder();
+        try {
+            InputStream is = mRes.getAssets().open(path);
+            int ch;
+            byte[] buffer = new byte[1024];
+            while (-1 != (ch = is.read(buffer))) {
+                result.append(new String(buffer, 0, ch));
+            }
+        } catch (Exception e) {
+            return null;
+        }
+        return result.toString().replaceAll("\\r\\n", "\n");
+    }
+
+    /**
+     * 加载Shader
+     *
+     * @param shaderType Shader类型
+     * @param source     Shader代码
+     * @return shaderId
+     */
+    public static int loadShader(int shaderType, String source) {
+        if (source == null) {
+            glError(1, "Shader source ==null : shaderType =" + shaderType);
+            return 0;
+        }
+        int shader = GLES20.glCreateShader(shaderType);
+        if (0 != shader) {
+            GLES20.glShaderSource(shader, source);
+            GLES20.glCompileShader(shader);
+            int[] compiled = new int[1];
+            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+            if (compiled[0] == 0) {
+                glError(1, "Could not compile shader:" + shaderType);
+                glError(1, "GLES20 Error:" + GLES20.glGetShaderInfoLog(shader));
+                GLES20.glDeleteShader(shader);
+                shader = 0;
+            }
+        }
+        return shader;
+    }
+
+    /**
+     * 通过字符串创建GL程序
+     *
+     * @param vertexSource   顶点着色器
+     * @param fragmentSource 片元着色器
+     * @return programId
+     */
+    public static int createGLProgram(String vertexSource, String fragmentSource) {
+        int vertex = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+        if (vertex == 0) return 0;
+        int fragment = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+        if (fragment == 0) return 0;
+        int program = GLES20.glCreateProgram();
+        if (program != 0) {
+            GLES20.glAttachShader(program, vertex);
+            GLES20.glAttachShader(program, fragment);
+            GLES20.glLinkProgram(program);
+            int[] linkStatus = new int[1];
+            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+            if (linkStatus[0] != GLES20.GL_TRUE) {
+                glError(1, "Could not link program:" + GLES20.glGetProgramInfoLog(program));
+                GLES20.glDeleteProgram(program);
+                program = 0;
+            }
+        }
+        return program;
+    }
+
+    /**
+     * 通过assets中的文件创建GL程序
+     *
+     * @param res      res
+     * @param vertex   顶点作色器路径
+     * @param fragment 片元着色器路径
+     * @return programId
+     */
+    public static int createGLProgramByAssetsFile(Resources res, String vertex, String fragment) {
+        return createGLProgram(readText(res, vertex), readText(res, fragment));
+    }
+
+    private static void glError(int code, Object index) {
+        Log.e("C2D", "glError:" + code + "---" + index);
+    }
+
+}

+ 195 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/MatrixUtils.java

@@ -0,0 +1,195 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.opengl.Matrix;
+
+public enum MatrixUtils {
+    ;
+    public static final int TYPE_FITXY = 0;
+    public static final int TYPE_CENTERCROP = 1;
+    public static final int TYPE_CENTERINSIDE = 2;
+    public static final int TYPE_FITSTART = 3;
+    public static final int TYPE_FITEND = 4;
+
+    /**
+     * @return the original texture coordinate
+     */
+    public static float[] getOriginalTextureCo() {
+        return new float[]{
+                0.0f, 0.0f,
+                0.0f, 1.0f,
+                1.0f, 0.0f,
+                1.0f, 1.0f
+        };
+    }
+
+    /**
+     * @return the original vertex coordinate
+     */
+    public static float[] getOriginalVertexCo() {
+        return new float[]{
+                -1.0f, 1.0f,
+                -1.0f, -1.0f,
+                1.0f, 1.0f,
+                1.0f, -1.0f
+        };
+    }
+
+    /**
+     * @return the original matrix
+     */
+    public static float[] getOriginalMatrix() {
+        return new float[]{
+                1, 0, 0, 0,
+                0, 1, 0, 0,
+                0, 0, 1, 0,
+                0, 0, 0, 1
+        };
+    }
+
+    /**
+     * calculate appointed matrix by image size and view size
+     *
+     * @param matrix     returns the result
+     * @param type       one of TYPE_FITEND,TYPE_CENTERCROP,TYPE_CENTERINSIDE,TYPE_FITSTART,TYPE_FITXY
+     * @param imgWidth   image width
+     * @param imgHeight  image height
+     * @param viewWidth  view width
+     * @param viewHeight view height
+     */
+    public static void getMatrix(float[] matrix, int type, int imgWidth, int imgHeight, int viewWidth,
+                                 int viewHeight) {
+        if (imgHeight > 0 && imgWidth > 0 && viewWidth > 0 && viewHeight > 0) {
+            float[] projection = new float[16];
+            float[] camera = new float[16];
+            if (type == TYPE_FITXY) {
+                Matrix.orthoM(projection, 0, -1, 1, -1, 1, 1, 3);
+                Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
+                Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
+            }
+            float sWhView = (float) viewWidth / viewHeight;
+            float sWhImg = (float) imgWidth / imgHeight;
+            if (sWhImg > sWhView) {
+                switch (type) {
+                    case TYPE_CENTERCROP:
+                        Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3);
+                        break;
+                    case TYPE_CENTERINSIDE:
+                        Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3);
+                        break;
+                    case TYPE_FITSTART:
+                        Matrix.orthoM(projection, 0, -1, 1, 1 - 2 * sWhImg / sWhView, 1, 1, 3);
+                        break;
+                    case TYPE_FITEND:
+                        Matrix.orthoM(projection, 0, -1, 1, -1, 2 * sWhImg / sWhView - 1, 1, 3);
+                        break;
+                }
+            } else {
+                switch (type) {
+                    case TYPE_CENTERCROP:
+                        Matrix.orthoM(projection, 0, -1, 1, -sWhImg / sWhView, sWhImg / sWhView, 1, 3);
+                        break;
+                    case TYPE_CENTERINSIDE:
+                        Matrix.orthoM(projection, 0, -sWhView / sWhImg, sWhView / sWhImg, -1, 1, 1, 3);
+                        break;
+                    case TYPE_FITSTART:
+                        Matrix.orthoM(projection, 0, -1, 2 * sWhView / sWhImg - 1, -1, 1, 1, 3);
+                        break;
+                    case TYPE_FITEND:
+                        Matrix.orthoM(projection, 0, 1 - 2 * sWhView / sWhImg, 1, -1, 1, 1, 3);
+                        break;
+                }
+            }
+            Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
+            Matrix.multiplyMM(matrix, 0, projection, 0, camera, 0);
+        }
+    }
+
+    /**
+     * @param m 待翻转的4X4矩阵
+     * @param x X轴翻转
+     * @param y Y轴翻转
+     * @return
+     */
+    public static float[] flip(float[] m, boolean x, boolean y) {
+        if (x || y) {
+            Matrix.scaleM(m, 0, x ? -1 : 1, y ? -1 : 1, 1);
+        }
+        return m;
+    }
+
+    /**
+     * 旋转矩阵
+     *
+     * @param m 待旋转的4X4矩阵
+     * @param r 旋转角度
+     * @return
+     */
+    public static float[] rotation(float[] m, float r) {
+        Matrix.rotateM(m, 0, r, 0.0f, 0.0f, 1.0f);
+        return m;
+    }
+
+    public static float[] crop(float[] m, float x, float y, float width, float height) {
+        float minX = x;
+        float minY = y;
+        float maxX = minX + width;
+        float maxY = minY + height;
+
+        // left bottom
+        m[0] = minX;
+        m[1] = minY;
+        // right bottom
+        m[2] = maxX;
+        m[3] = minY;
+        // left top
+        m[4] = minX;
+        m[5] = maxY;
+        // right top
+        m[6] = maxX;
+        m[7] = maxY;
+
+        return m;
+    }
+
+    /**
+     * 获取Surface的顶点坐标系
+     *
+     * @return
+     */
+    public static float[] getSurfaceVertexCo() {
+        return new float[]{
+                -1.0f, -1.0f,
+                -1.0f, 1.0f,
+                1.0f, -1.0f,
+                1.0f, 1.0f,
+        };
+    }
+
+    /**
+     * 获取Camera的顶点坐标系
+     *
+     * @return
+     */
+    public static float[] getCameraVertexCo() {
+        return new float[]{
+                -1.0f, 1.0f,
+                1.0f, 1.0f,
+                -1.0f, -1.0f,
+                1.0f, -1.0f,
+        };
+    }
+
+    /**
+     * 获取本地视频处理的顶点坐标系
+     *
+     * @return
+     */
+    public static float[] getMoveVertexCo() {
+        return new float[]{
+                1.0f, -1.0f,
+                -1.0f, -1.0f,
+                1.0f, 1.0f,
+                -1.0f, 1.0f,
+        };
+    }
+}

+ 36 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/Utils/UriUtils.java

@@ -0,0 +1,36 @@
+package com.joe.camera2recorddemo.Utils;
+
+import android.content.ContentResolver;
+import android.content.Context;
+import android.net.Uri;
+import android.util.Log;
+
+/**
+ * Created by Yj on 2017/10/16.
+ */
+
+public class UriUtils {
+
+    /**
+     * 获取URI的绝对路径
+     *
+     * @param context
+     * @param uri
+     * @return
+     */
+    public static String getRealFilePath(Context context, final Uri uri) {
+        if (null == uri) return null;
+        final String scheme = uri.getScheme();
+        String data = null;
+        if (scheme == null) {
+            Log.e("UriUtils", "scheme is null");
+            data = uri.getPath();
+        } else if (ContentResolver.SCHEME_FILE.equals(scheme)) {
+            data = uri.getPath();
+            Log.e("UriUtils", "SCHEME_FILE");
+        } else if (ContentResolver.SCHEME_CONTENT.equals(scheme)) {
+            data = GetPathFromUri4kitkat.getPath(context, uri);
+        }
+        return data;
+    }
+}

+ 228 - 0
OpenGLlibrary/src/main/java/com/joe/camera2recorddemo/View/CameraRecordView.java

@@ -0,0 +1,228 @@
+package com.joe.camera2recorddemo.View;
+
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.util.AttributeSet;
+import android.view.Surface;
+import android.view.TextureView;
+
+import com.joe.camera2recorddemo.Entity.SizeInfo;
+import com.joe.camera2recorddemo.OpenGL.CameraRecorder;
+import com.joe.camera2recorddemo.OpenGL.Filter.Mp4EditFilter;
+import com.joe.camera2recorddemo.OpenGL.Renderer;
+import com.joe.camera2recorddemo.Utils.CameraParamUtil;
+import com.joe.camera2recorddemo.Utils.MatrixUtils;
+
+import java.io.IOException;
+
+public class CameraRecordView extends TextureView implements Renderer {
+
+    private static final int STATE_INIT = 0;
+    private static final int STATE_RECORDING = 1;
+    private static final int STATE_PAUSE = 2;
+    private int mRecorderState;
+    // 摄像头
+    private Camera mCamera;
+    private Camera.Parameters mParams;
+    private float screenProp;
+    private int mCurrentCameraState = 0;
+
+    private CameraRecorder mCameraRecord;
+    private Mp4EditFilter mFilter;
+    private int mCurrentFilterIndex;// 当前滤镜
+
+    private int mCameraWidth;
+    private int mCameraHeight;
+    // private SizeInfo recordSize;
+
+    public CameraRecordView(Context context) {
+        this(context, null);
+    }
+
+    public CameraRecordView(Context context, AttributeSet attrs) {
+        super(context, attrs);
+        init();
+    }
+
+    private void init() {
+        mFilter = new Mp4EditFilter(getResources());
+        mCameraRecord = new CameraRecorder();
+
+        setSurfaceTextureListener(new SurfaceTextureListener() {
+            @Override
+            public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
+                mCamera = Camera.open(0);
+                screenProp = (float) height / (float) width;
+                initCamera(screenProp);
+
+                mCameraRecord.setOutputSurface(new Surface(surface));
+                Camera.Size videoSize;
+                if (mParams.getSupportedVideoSizes() == null) {
+                    videoSize = CameraParamUtil.getInstance().getPreviewSize(mParams.getSupportedPreviewSizes(), 600,
+                            screenProp);
+                } else {
+                    videoSize = CameraParamUtil.getInstance().getPreviewSize(mParams.getSupportedVideoSizes(), 600,
+                            screenProp);
+                }
+                SizeInfo sizeInfo;
+                if (videoSize.width == videoSize.height) {
+                    sizeInfo = new SizeInfo(720, 720);
+                } else {
+                    sizeInfo = new SizeInfo(videoSize.height, videoSize.width);
+                }
+                mCameraRecord.setOutputSize(sizeInfo);
+                mCameraRecord.setRenderer(CameraRecordView.this);
+                mCameraRecord.setPreviewSize(width, height);
+                mCameraRecord.startPreview();
+            }
+
+            @Override
+            public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
+                mCameraRecord.setPreviewSize(width, height);
+            }
+
+            @Override
+            public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+                // 停止录制
+                if (mRecorderState == STATE_RECORDING) {
+                    try {
+                        stopRecord();
+                    } catch (InterruptedException e) {
+                        e.printStackTrace();
+                    }
+                }
+
+                stopPreview();
+                return true;
+            }
+
+            @Override
+            public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+
+            }
+        });
+    }
+
+    @Override
+    public void create() {
+        try {
+            mCamera.setPreviewTexture(mCameraRecord.createInputSurfaceTexture());
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        Camera.Size mSize = mCamera.getParameters().getPreviewSize();
+        mCameraWidth = mSize.height;
+        mCameraHeight = mSize.width;
+
+        mCamera.startPreview();
+        mFilter.create();
+    }
+
+    @Override
+    public void sizeChanged(int width, int height) {
+        mFilter.sizeChanged(width, height);
+        MatrixUtils.getMatrix(mFilter.getVertexMatrix(), MatrixUtils.TYPE_CENTERCROP,
+                mCameraWidth, mCameraHeight, width, height);
+        MatrixUtils.flip(mFilter.getVertexMatrix(), false, true);
+    }
+
+    @Override
+    public void draw(int texture) {
+        mFilter.draw(texture);
+    }
+
+    @Override
+    public void destroy() {
+        mFilter.destroy();
+    }
+
+    public void initCamera(float screenProp) {
+        if (mCamera != null) {
+            mParams = mCamera.getParameters();
+            Camera.Size previewSize = CameraParamUtil.getInstance().getPreviewSize(mParams
+                    .getSupportedPreviewSizes(), 1000, screenProp);
+            Camera.Size pictureSize = CameraParamUtil.getInstance().getPictureSize(mParams
+                    .getSupportedPictureSizes(), 1200, screenProp);
+            mParams.setPreviewSize(previewSize.width, previewSize.height);
+            mParams.setPictureSize(pictureSize.width, pictureSize.height);
+            if (CameraParamUtil.getInstance().isSupportedFocusMode(
+                    mParams.getSupportedFocusModes(),
+                    Camera.Parameters.FOCUS_MODE_AUTO)) {
+                mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
+            }
+
+            if (CameraParamUtil.getInstance().isSupportedPictureFormats(mParams.getSupportedPictureFormats(),
+                    ImageFormat.JPEG)) {
+                mParams.setPictureFormat(ImageFormat.JPEG);
+                mParams.setJpegQuality(100);
+            }
+            mCamera.setParameters(mParams);
+            mParams = mCamera.getParameters();
+        }
+    }
+
+    public void switchFilter(int index) {
+        if (mCurrentFilterIndex != index) {
+            mCurrentFilterIndex = index;
+            mFilter.getChooseFilter().setChangeType(mCurrentFilterIndex);
+        }
+    }
+
+    public void switchCamera() {
+        if (Camera.getNumberOfCameras() > 1) {
+            stopPreview();
+            mCurrentCameraState += 1;
+            if (mCurrentCameraState > Camera.getNumberOfCameras() - 1)
+                mCurrentCameraState = 0;
+            mCamera = Camera.open(mCurrentCameraState);
+            initCamera(screenProp);// 切换摄像头之后需要重新setParameters
+/*
+            if (mParams == null) {
+                initCamera(screenProp);
+            } else {
+                // 部分机型 java.lang.RuntimeException: setParameters failed
+               mCamera.setParameters(mParams);
+            }
+*/
+            mCameraRecord.startPreview();
+        }
+    }
+
+    public void stopPreview() {
+        try {
+            mCameraRecord.stopPreview();
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+        if (mCamera != null) {
+            mCamera.stopPreview();
+            mCamera.release();
+            mCamera = null;
+        }
+    }
+
+    /**
+     * 开始录制
+     */
+    public void startRecord(String filePath) throws IOException {
+        mCameraRecord.setOutputPath(filePath);
+        mCameraRecord.startRecord();
+
+        mRecorderState = STATE_RECORDING;
+    }
+
+    /**
+     * 停止录制
+     */
+    public void stopRecord() throws InterruptedException {
+        mCameraRecord.stopRecord();
+
+        mRecorderState = STATE_INIT;
+    }
+
+    public Camera getCamera() {
+        return mCamera;
+    }
+}

+ 5 - 0
OpenGLlibrary/src/main/res/drawable/btn_shutter_background.xml

@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<selector xmlns:android="http://schemas.android.com/apk/res/android" >
+    <item android:state_pressed="true" android:drawable="@mipmap/btn_camera_all_click"/>
+	<item android:drawable="@mipmap/btn_camera_all"/>
+</selector>

BIN
OpenGLlibrary/src/main/res/drawable/change_camera.png


+ 9 - 0
OpenGLlibrary/src/main/res/drawable/editchoose_backgroud.xml

@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="utf-8"?>
+<shape xmlns:android="http://schemas.android.com/apk/res/android">
+    <solid android:color="#49000000" />
+    <corners android:topLeftRadius="10dp"
+             android:topRightRadius="10dp"
+             android:bottomRightRadius="10dp"
+             android:bottomLeftRadius="10dp"/>
+    <stroke android:width="1dp" android:color="#49000000" />
+</shape>

+ 9 - 0
OpenGLlibrary/src/main/res/drawable/filterchoose_backgroud.xml

@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="utf-8"?>
+<shape xmlns:android="http://schemas.android.com/apk/res/android">
+    <solid android:color="#8c000000" />
+    <corners android:topLeftRadius="20dp"
+             android:topRightRadius="20dp"
+             android:bottomRightRadius="20dp"
+             android:bottomLeftRadius="20dp"/>
+    <stroke android:width="1dp" android:color="#8c000000" />
+</shape>

BIN
OpenGLlibrary/src/main/res/drawable/mp4_crop.png


BIN
OpenGLlibrary/src/main/res/drawable/mp4_flip.png


BIN
OpenGLlibrary/src/main/res/drawable/mp4_rotation.png


BIN
OpenGLlibrary/src/main/res/drawable/mp4_save.png


BIN
OpenGLlibrary/src/main/res/drawable/oneto.png


BIN
OpenGLlibrary/src/main/res/drawable/record_start.png


BIN
OpenGLlibrary/src/main/res/drawable/record_stop.png


BIN
OpenGLlibrary/src/main/res/mipmap-xxhdpi/btn_camera_all.png


BIN
OpenGLlibrary/src/main/res/mipmap-xxhdpi/btn_camera_all_click.png


+ 33 - 0
OpenGLlibrary/src/main/res/values/attrs.xml

@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+
+    <declare-styleable name="WheelView">
+        <!-- 中间分割线外的item数量,整个滑动数量就为 wheelItemCount * 2 + 1  -->
+        <attr name="wheelItemCount" format="integer"/>
+        <!-- 滑轮item高度 -->
+        <attr name="wheelItemSize" format="dimension"/>
+        <!-- 滑轮字体大小 -->
+        <attr name="wheelTextSize" format="dimension"/>
+        <!-- 滑轮字体颜色 -->
+        <attr name="wheelTextColor" format="color"/>
+        <!-- 滑轮中心字体颜色 -->
+        <attr name="wheelTextColorCenter" format="color"/>
+        <!-- 分割线颜色 -->
+        <attr name="dividerColor" format="color"/>
+        <!-- 布局方向 -->
+        <attr name="wheelOrientation">
+            <enum name="vertical" value="1"/>
+            <enum name="horizontal" value="2"/>
+        </attr>
+        <!-- 水平布局时不考虑只参数,  当垂直布局时的左右靠齐立体效果 -->
+        <attr name="wheelGravity">
+            <enum name="left" value="1"/>
+            <enum name="center" value="2"/>
+            <enum name="right" value="3"/>
+        </attr>
+
+        <!-- 两根分割线的距离 -->
+        <attr name="wheelDividerSize" format="dimension"/>
+    </declare-styleable>
+
+</resources>

+ 6 - 0
OpenGLlibrary/src/main/res/values/colors.xml

@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <color name="colorPrimary">#3F51B5</color>
+    <color name="colorPrimaryDark">#303F9F</color>
+    <color name="colorAccent">#FF4081</color>
+</resources>

+ 11 - 0
OpenGLlibrary/src/main/res/values/strings.xml

@@ -0,0 +1,11 @@
+<resources>
+    <string name="app_name">Camera2RecordDemo</string>
+
+    <!-- TODO: Remove or change this placeholder text -->
+    <string name="hello_blank_fragment">Hello blank fragment</string>
+    <string name="permission_request">权限申请</string>
+    <string name="record">开始</string>
+    <string name="stop">停止</string>
+    <string name="camera_error">摄像头错误</string>
+    <string name="intro_message">信息</string>
+</resources>

+ 11 - 0
OpenGLlibrary/src/main/res/values/styles.xml

@@ -0,0 +1,11 @@
+<resources>
+
+    <!-- Base application theme. -->
+    <style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
+        <!-- Customize your theme here. -->
+        <item name="colorPrimary">@color/colorPrimary</item>
+        <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
+        <item name="colorAccent">@color/colorAccent</item>
+    </style>
+
+</resources>


+ 1 - 0
YZxing-lib/.gitignore

@@ -0,0 +1 @@
+/build

+ 32 - 0
YZxing-lib/build.gradle

@@ -0,0 +1,32 @@
+apply plugin: 'com.android.library'
+
+android {
+    compileSdkVersion compile_version
+
+    defaultConfig {
+        minSdkVersion min_version
+        targetSdkVersion target_version
+        versionCode 1
+        versionName "1.0"
+
+        testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+
+    }
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+        }
+    }
+}
+
+dependencies {
+    implementation 'androidx.legacy:legacy-support-v4:1.0.0'
+    androidTestImplementation('androidx.test.espresso:espresso-core:3.1.0', {
+        exclude group: 'com.android.support', module: 'support-annotations'
+    })
+    implementation 'androidx.appcompat:appcompat:1.0.0'
+    implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
+    testImplementation 'junit:junit:4.12'
+    api 'com.google.zxing:core:3.3.0'
+}

+ 25 - 0
YZxing-lib/proguard-rules.pro

@@ -0,0 +1,25 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in /Users/yangyu/Library/Android/sdk/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile

+ 27 - 0
YZxing-lib/src/androidTest/java/com/example/qrcode/ExampleInstrumentedTest.java

@@ -0,0 +1,27 @@
+package com.example.qrcode;
+
+import android.content.Context;
+
+import androidx.test.InstrumentationRegistry;
+import androidx.test.runner.AndroidJUnit4;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Instrumentation test, which will execute on an Android device.
+ *
+ * @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
+ */
+@RunWith(AndroidJUnit4.class)
+public class ExampleInstrumentedTest {
+    @Test
+    public void useAppContext() throws Exception {
+        // Context of the app under test.
+        Context appContext = InstrumentationRegistry.getTargetContext();
+
+        assertEquals("com.example.qrcode.test", appContext.getPackageName());
+    }
+}

+ 25 - 0
YZxing-lib/src/main/AndroidManifest.xml

@@ -0,0 +1,25 @@
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="com.example.qrcode">
+
+    <uses-permission android:name="android.permission.CAMERA" />
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
+    <uses-permission android:name="android.permission.INTERNET" />
+    <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
+
+    <application
+        android:allowBackup="true"
+        android:label="@string/app_name"
+        android:supportsRtl="true">
+
+        <activity
+            android:name=".ScannerActivity"
+            android:screenOrientation="portrait"
+            android:theme="@style/MyAppTheme" />
+
+        <activity
+            android:name=".ShowResultActivity"
+            android:theme="@style/MyAppTheme" />
+
+    </application>
+
+</manifest>

+ 110 - 0
YZxing-lib/src/main/java/com/example/qrcode/BeepManager.java

@@ -0,0 +1,110 @@
+package com.example.qrcode;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.SharedPreferences;
+import android.content.res.AssetFileDescriptor;
+import android.media.AudioManager;
+import android.media.MediaPlayer;
+import android.os.Vibrator;
+import android.preference.PreferenceManager;
+import android.util.Log;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+/**
+ * Created by yangyu on 17/10/19.
+ */
+
+public final class BeepManager implements MediaPlayer.OnErrorListener, Closeable {
+
+    private static final String TAG = BeepManager.class.getSimpleName();
+
+    private static final float BEEP_VOLUME = 0.10f;
+    private static final long VIBRATE_DURATION = 200L;
+
+    private final Activity activity;
+    private MediaPlayer mediaPlayer;
+    private boolean playBeep;
+    private boolean vibrate;
+
+    BeepManager(Activity activity) {
+        this.activity = activity;
+        this.mediaPlayer = null;
+        updatePrefs();
+    }
+
+    synchronized void updatePrefs() {
+        SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(activity);
+        playBeep = shouldBeep(prefs, activity);
+        vibrate = prefs.getBoolean(Constant.KEY_VIBRATE, false);
+        if (playBeep && mediaPlayer == null) {
+            // The volume on STREAM_SYSTEM is not adjustable, and users found it too loud,
+            // so we now play on the music stream.
+            activity.setVolumeControlStream(AudioManager.STREAM_MUSIC);
+            mediaPlayer = buildMediaPlayer(activity);
+        }
+    }
+
+    synchronized void playBeepSoundAndVibrate() {
+        if (playBeep && mediaPlayer != null) {
+            mediaPlayer.start();
+        }
+        if (vibrate) {
+            Vibrator vibrator = (Vibrator) activity.getSystemService(Context.VIBRATOR_SERVICE);
+            vibrator.vibrate(VIBRATE_DURATION);
+        }
+    }
+
+    private static boolean shouldBeep(SharedPreferences prefs, Context activity) {
+        boolean shouldPlayBeep = prefs.getBoolean(Constant.KEY_PLAY_BEEP, true);
+        if (shouldPlayBeep) {
+            // See if sound settings overrides this
+            AudioManager audioService = (AudioManager) activity.getSystemService(Context.AUDIO_SERVICE);
+            if (audioService.getRingerMode() != AudioManager.RINGER_MODE_NORMAL) {
+                shouldPlayBeep = false;
+            }
+        }
+        return shouldPlayBeep;
+    }
+
+    private MediaPlayer buildMediaPlayer(Context activity) {
+        MediaPlayer mediaPlayer = new MediaPlayer();
+        try (AssetFileDescriptor file = activity.getResources().openRawResourceFd(R.raw.beep)) {
+            mediaPlayer.setDataSource(file.getFileDescriptor(), file.getStartOffset(), file.getLength());
+            mediaPlayer.setOnErrorListener(this);
+            mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+            mediaPlayer.setLooping(false);
+            mediaPlayer.setVolume(BEEP_VOLUME, BEEP_VOLUME);
+            mediaPlayer.prepare();
+            return mediaPlayer;
+        } catch (IOException ioe) {
+            Log.w(TAG, ioe);
+            mediaPlayer.release();
+            return null;
+        }
+    }
+
+    @Override
+    public synchronized boolean onError(MediaPlayer mp, int what, int extra) {
+        if (what == MediaPlayer.MEDIA_ERROR_SERVER_DIED) {
+            // we are finished, so put up an appropriate error toast if required and finish
+            activity.finish();
+        } else {
+            // possibly media player error, so release and recreate
+            close();
+            updatePrefs();
+        }
+        return true;
+    }
+
+    @Override
+    public synchronized void close() {
+        if (mediaPlayer != null) {
+            mediaPlayer.release();
+            mediaPlayer = null;
+        }
+    }
+
+}

+ 29 - 0
YZxing-lib/src/main/java/com/example/qrcode/Constant.java

@@ -0,0 +1,29 @@
+package com.example.qrcode;
+
+/**
+ * Created by yangyu on 17/10/29.
+ */
+
+public class Constant {
+    public static final int MESSAGE_SCANNER_DECODE = 0;
+    public static final int MESSAGE_SCANNER_DECODE_SUCCEEDED = 1;
+    public static final int MESSAGE_SCANNER_DECODE_FAIL = 2;
+    public static final int MESSAGE_SCANNER_QUIT = 3;
+
+    public static final String KEY_AUTO_FOCUS = "auto_focus";
+    public static final String KEY_DISABLE_CONTINUOUS_FOCUS = "continuous_focus";
+    public static final String KEY_VIBRATE = "beep_vibrate";
+    public static final String KEY_PLAY_BEEP = "beep_play";
+    public static final String TRANSFE_RRECORD = "transferrecord";
+
+    public static final String EXTRA_SCANNER_FRAME_WIDTH = "scan_frame_width";
+    public static final String EXTRA_SCANNER_FRAME_HEIGHT = "scan_frame_height";
+    public static final String EXTRA_SCANNER_FRAME_TOP_PADDING = "scan_frame_top_padding";
+    public static final String EXTRA_SCAN_CODE_TYPE = "scan_code_type";
+    public static final String EXTRA_IS_ENABLE_SCAN_FROM_PIC = "is_enable_scan_from_pic";
+
+    public static final String EXTRA_RESULT_CODE_TYPE = "result_code_type";
+    public static final String EXTRA_RESULT_CONTENT = "result_content";
+
+    public static final String EXTRA_RESULT_TEXT_FROM_PIC = "text_from_pic";
+}

+ 345 - 0
YZxing-lib/src/main/java/com/example/qrcode/ScannerActivity.java

@@ -0,0 +1,345 @@
+package com.example.qrcode;
+
+import android.Manifest;
+import android.content.Intent;
+import android.content.pm.PackageManager;
+import android.graphics.Bitmap;
+import android.graphics.Color;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Message;
+import android.util.Log;
+import android.view.KeyEvent;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+import android.widget.ImageView;
+import android.widget.TextView;
+import android.widget.Toast;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.core.app.ActivityCompat;
+import androidx.core.content.ContextCompat;
+
+import com.example.qrcode.camera.CameraManager;
+import com.example.qrcode.decode.InactivityTimer;
+import com.example.qrcode.decode.ScannerHandler;
+import com.example.qrcode.utils.DecodeUtils;
+import com.example.qrcode.utils.NetUtil;
+import com.example.qrcode.utils.UriUtils;
+import com.example.qrcode.view.ScannerView;
+import com.google.zxing.BarcodeFormat;
+import com.google.zxing.Result;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.ref.WeakReference;
+import java.util.Collection;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Objects;
+import java.util.Set;
+
+
+/**
+ * Created by yangyu on 17/10/18.
+ */
+
+public class ScannerActivity extends SetActionBarActivity implements SurfaceHolder.Callback {
+    public static final String BARCODE_FORMAT = "support_barcode_format";
+    private static final String TAG = "ScannerActivity";
+    public final int PERMISSION_REQUEST_CODE_WRITE_EXTERNAL_STORAGE = 0X11;
+    public final int REQUEST_CODE_GET_PIC_URI = 0X12;
+    private final int MESSAGE_DECODE_FROM_BITMAP = 0;
+
+    private ScannerView mScannerView;
+    private SurfaceView mSurfaceView;
+
+    private InactivityTimer mInactivityTimer;
+    private BeepManager beepManager;
+
+    private com.example.qrcode.camera.CameraManager cameraManager;
+    private ScannerHandler handler;
+    private Collection<BarcodeFormat> decodeFormats;
+
+    private int mScanFocusWidth;
+    private int mScanFocusHeight;
+    private int mScanFocusTopPadding;
+
+    private boolean isEnableScanFromPicture;
+    private boolean hasSurface;
+    private MyHandler mHandler;
+
+    @Override
+    protected void onCreate(@Nullable Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+//        getSupportActionBar().hide();
+//        getWindow().setFlags(WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS, WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS);
+        setContentView(R.layout.layout_activity_scanner);
+        initView();
+        hasSurface = false;
+        Intent intent = getIntent();
+        if (intent != null) {
+            mScanFocusWidth = intent.getIntExtra(Constant.EXTRA_SCANNER_FRAME_WIDTH, -1);
+            mScanFocusHeight = intent.getIntExtra(Constant.EXTRA_SCANNER_FRAME_HEIGHT, -1);
+            mScanFocusTopPadding = intent.getIntExtra(Constant.EXTRA_SCANNER_FRAME_TOP_PADDING, -1);
+            isEnableScanFromPicture = intent.getBooleanExtra(Constant.EXTRA_IS_ENABLE_SCAN_FROM_PIC, false);
+            Bundle b = intent.getExtras();
+            if (b != null) {
+                HashMap<String, Set> formats = (HashMap<String, Set>) b.getSerializable(Constant.EXTRA_SCAN_CODE_TYPE);
+                if (formats != null) {
+                    decodeFormats = formats.get(BARCODE_FORMAT);
+                } else {
+                    decodeFormats = EnumSet.of(BarcodeFormat.QR_CODE
+                            , BarcodeFormat.CODE_128);
+                }
+            } else {
+                decodeFormats = EnumSet.of(BarcodeFormat.QR_CODE
+                        , BarcodeFormat.CODE_128);
+            }
+
+        }
+        Log.e(TAG, "onCreate:decodeFormats :" + decodeFormats.size() + "--" + decodeFormats.toString());
+        mInactivityTimer = new InactivityTimer(this);
+        beepManager = new BeepManager(this);
+        mHandler = new MyHandler(this);
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+        cameraManager = new CameraManager(this);
+        cameraManager.setManualFramingRect(mScanFocusWidth, mScanFocusHeight, mScanFocusTopPadding);
+        mScannerView.setCameraManager(cameraManager);
+        SurfaceHolder holder = mSurfaceView.getHolder();
+
+        if (hasSurface) {
+            initCamera(holder);
+        } else {
+            holder.addCallback(this);
+        }
+        mInactivityTimer.onResume();
+        beepManager.updatePrefs();
+    }
+
+    @Override
+    protected void onPause() {
+        super.onPause();
+        if (handler != null) {
+            handler.quitSynchronously();
+            handler = null;
+        }
+        cameraManager.closeDriver();
+        mInactivityTimer.onPause();
+        beepManager.close();
+    }
+
+    @Override
+    protected void onDestroy() {
+        cameraManager.clearFramingRect();
+        mInactivityTimer.shutdown();
+        super.onDestroy();
+    }
+
+    @Override
+    public boolean onKeyDown(int keyCode, KeyEvent event) {
+        switch (keyCode) {
+            case KeyEvent.KEYCODE_VOLUME_DOWN:
+                //关闭灯光
+                cameraManager.setTorch(false);
+                return true;
+            case KeyEvent.KEYCODE_VOLUME_UP:
+                //开启闪光灯
+                cameraManager.setTorch(true);
+                return true;
+        }
+        return super.onKeyDown(keyCode, event);
+    }
+
+//    @Override
+//    public boolean onCreateOptionsMenu(Menu menu) {
+////        if (isEnableScanFromPicture) {
+////            getMenuInflater().inflate(R.menu.menu_scan, menu);
+////            return true;
+////        } else {
+////            return super.onCreateOptionsMenu(menu);
+////        }
+//    }
+//
+//    @Override
+//    public boolean onOptionsItemSelected(MenuItem item) {
+////        int itemId = item.getItemId();
+////        if (itemId == R.id.scan_from_picture) {
+////            //先申请权限
+////            int checked = ContextCompat.checkSelfPermission(ScannerActivity.this
+////                    , Manifest.permission.WRITE_EXTERNAL_STORAGE);
+////            if (checked == PackageManager.PERMISSION_GRANTED) {
+////                goPicture();
+////            } else {
+////                ActivityCompat.requestPermissions(ScannerActivity.this
+////                        , new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, PERMISSION_REQUEST_CODE_WRITE_EXTERNAL_STORAGE);
+////            }
+////        }
+////        return true;
+//    }
+
+    private void goPicture() {
+        Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
+        intent.setType("image/*");
+        startActivityForResult(intent, REQUEST_CODE_GET_PIC_URI);
+    }
+
+    private void initView() {
+//        getSupportActionBar().hide();
+        TextView tv_title_center = findViewById(R.id.tv_title_center);
+        tv_title_center.setText(getString(R.string.title_qr_code_or_banner_code));
+        tv_title_center.setTextColor(Color.BLACK);
+        findViewById(R.id.tv_title_right).setOnClickListener(new View.OnClickListener() {
+                                                                 @Override
+                                                                 public void onClick(View v) {
+                                                                     int checked = ContextCompat.checkSelfPermission(ScannerActivity.this
+                                                                             , Manifest.permission.WRITE_EXTERNAL_STORAGE);
+                                                                     if (checked == PackageManager.PERMISSION_GRANTED) {
+                                                                         goPicture();
+                                                                     } else {
+                                                                         ActivityCompat.requestPermissions(ScannerActivity.this
+                                                                                 , new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, PERMISSION_REQUEST_CODE_WRITE_EXTERNAL_STORAGE);
+                                                                     }
+                                                                 }
+                                                             }
+        );
+
+        ImageView iv_title_left_first = findViewById(R.id.iv_title_left_first);
+        iv_title_left_first.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                finish();
+            }
+        });
+        iv_title_left_first.setImageDrawable(getResources().getDrawable(R.mipmap.return_icon));
+        mSurfaceView = (SurfaceView) findViewById(R.id.surface);
+        mScannerView = (ScannerView) findViewById(R.id.scan_view);
+    }
+
+    private void initCamera(SurfaceHolder surfaceHolder) {
+        if (surfaceHolder == null) {
+            throw new IllegalStateException("No SurfaceHolder provided");
+        }
+        if (cameraManager.isOpen()) {
+            Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?");
+            return;
+        }
+        try {
+            cameraManager.openDriver(surfaceHolder);
+            if (handler == null) {
+                handler = new ScannerHandler(this, decodeFormats, "utf-8", cameraManager);
+            }
+        } catch (IOException ioe) {
+            Log.w(TAG, ioe);
+        } catch (RuntimeException e) {
+            Log.w(TAG, "Unexpected error initializing camera", e);
+        }
+    }
+
+    @Override
+    public void surfaceCreated(SurfaceHolder holder) {
+        if (holder == null) {
+            Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!");
+        }
+        if (!hasSurface) {
+            hasSurface = true;
+            initCamera(holder);
+        }
+    }
+
+    @Override
+    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+    }
+
+    @Override
+    public void surfaceDestroyed(SurfaceHolder holder) {
+        hasSurface = false;
+    }
+
+    // 在这里处理扫码结果
+    public void handDecode(final Result result) {
+        if (!NetUtil.isGprsOrWifiConnected(this)) {
+            Toast.makeText(this, getString(R.string.check_net), Toast.LENGTH_SHORT).show();
+            return;
+        }
+        mInactivityTimer.onActivity();
+        beepManager.playBeepSoundAndVibrate();
+        Intent data = new Intent();
+        BarcodeFormat format = result.getBarcodeFormat();
+        String type = format.toString();
+        data.putExtra(Constant.EXTRA_RESULT_CODE_TYPE, type);
+        data.putExtra(Constant.EXTRA_RESULT_CONTENT, result.getText());
+        setResult(RESULT_OK, data);
+        finish();
+    }
+
+    public CameraManager getCameraManager() {
+        return cameraManager;
+    }
+
+    public Handler getHandler() {
+        return handler;
+    }
+
+    @Override
+    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+        super.onActivityResult(requestCode, resultCode, data);
+        if (resultCode == RESULT_OK) {
+            switch (requestCode) {
+                case REQUEST_CODE_GET_PIC_URI:
+                    Uri uri = data.getData();
+                    Log.e(TAG, "onActivityResult: uri:" + Objects.requireNonNull(uri).toString());
+                    String imagePath = UriUtils.getPicturePathFromUri(ScannerActivity.this, uri);
+                    // 对获取到的二维码照片进行压缩
+                    // 必须压缩,否则可能因图片过大内存不够崩溃,
+                    try {
+                        Bitmap bitmap = DecodeUtils.compressPicture(this, Uri.fromFile(new File(imagePath)));
+                        Message message = mHandler.obtainMessage(MESSAGE_DECODE_FROM_BITMAP, bitmap);
+                        mHandler.sendMessage(message);
+                    } catch (Exception e) {
+                        Toast.makeText(this, e.getMessage(), Toast.LENGTH_SHORT).show();
+                    }
+                    break;
+            }
+        }
+    }
+
+    @Override
+    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
+        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+        if (requestCode == PERMISSION_REQUEST_CODE_WRITE_EXTERNAL_STORAGE) {
+            if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
+                goPicture();
+                return;
+            }
+        }
+    }
+
+    private static class MyHandler extends Handler {
+        private WeakReference<ScannerActivity> activity;
+
+        MyHandler(ScannerActivity mainActivityWeakReference) {
+            activity = new WeakReference<ScannerActivity>(mainActivityWeakReference);
+        }
+
+        @Override
+        public void handleMessage(Message msg) {
+            super.handleMessage(msg);
+            ScannerActivity activity = this.activity.get();
+            if (activity != null) {
+                if (msg.what == activity.MESSAGE_DECODE_FROM_BITMAP) {
+                    Bitmap bm = (Bitmap) msg.obj;
+                    DecodeUtils.DecodeAsyncTask decodeAsyncTask = new DecodeUtils.DecodeAsyncTask(activity);
+                    decodeAsyncTask.execute(bm);
+                }
+            }
+        }
+    }
+}

+ 27 - 0
YZxing-lib/src/main/java/com/example/qrcode/ScannerFragment.java

@@ -0,0 +1,27 @@
+package com.example.qrcode;
+
+
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+
+import androidx.fragment.app.Fragment;
+
+
+/**
+ * A simple {@link Fragment} subclass.
+ */
+public class ScannerFragment extends Fragment {
+
+    public ScannerFragment() {
+        // Required empty public constructor
+    }
+
+    @Override
+    public View onCreateView(LayoutInflater inflater, ViewGroup container,
+                             Bundle savedInstanceState) {
+        // Inflate the layout for this fragment
+        return inflater.inflate(R.layout.fragment_scanner, container, false);
+    }
+}

+ 78 - 0
YZxing-lib/src/main/java/com/example/qrcode/SetActionBarActivity.java

@@ -0,0 +1,78 @@
+package com.example.qrcode;
+
+import android.graphics.Color;
+import android.graphics.drawable.ColorDrawable;
+import android.os.Build;
+import android.os.Bundle;
+import android.view.View;
+import android.view.Window;
+import android.view.WindowManager;
+
+import androidx.appcompat.app.AppCompatActivity;
+
+
+/**
+ * Created by Administrator on 2016/4/20.
+ */
+public abstract class SetActionBarActivity extends AppCompatActivity {
+
+    @Override
+    protected void onCreate(Bundle savedInstanceState) {
+        setStatusBarColor();
+        super.onCreate(savedInstanceState);
+        setActionBar();
+    }
+
+    protected void setActionBar() {
+        if (getSupportActionBar() != null) { // 因为有的activity没有actionBar,所以加个判断
+//            SkinUtils.Skin skin = SkinUtils.getSkin(this);
+            getSupportActionBar().setBackgroundDrawable(new ColorDrawable(Color.WHITE));
+            if (Build.VERSION.SDK_INT >= 21) {//兼容5.0  去除actionbar阴影
+                getSupportActionBar().setElevation(0);
+            }
+        }
+    }
+
+    /**
+     * 沉浸式状态栏
+     */
+    protected void setStatusBarColor() {
+//        SkinUtils.Skin skin = SkinUtils.getSkin(this);
+        setStatusBarLight(true);
+    }
+
+    protected void setStatusBarLight(boolean light) {
+        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+            // >=5.0 背景为全透明
+            /* >=5.0,this method(getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS));
+            in some phone is half-transparent like vivo、nexus6p..
+            in some phone is full-transparent
+            so ...*/
+            Window window = getWindow();
+            window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
+            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+                if (light) {
+                    window.getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
+                            | View.SYSTEM_UI_FLAG_LAYOUT_STABLE
+                            | View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR);
+                } else {
+                    window.getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
+                            | View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
+                }
+                window.setStatusBarColor(Color.TRANSPARENT);
+            } else {
+                window.getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
+                        | View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
+                if (light) {
+                    window.setStatusBarColor(Color.BLACK);
+                } else {
+                    window.setStatusBarColor(Color.TRANSPARENT);
+                }
+            }
+            window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
+        } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
+            // 4.4背景为渐变半透明
+            getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
+        }
+    }
+}

+ 102 - 0
YZxing-lib/src/main/java/com/example/qrcode/ShowResultActivity.java

@@ -0,0 +1,102 @@
+package com.example.qrcode;
+
+import android.content.Intent;
+import android.graphics.Bitmap;
+import android.os.Bundle;
+import android.util.Log;
+import android.util.Patterns;
+import android.view.View;
+import android.webkit.WebChromeClient;
+import android.webkit.WebSettings;
+import android.webkit.WebView;
+import android.webkit.WebViewClient;
+import android.widget.ProgressBar;
+import android.widget.TextView;
+
+import androidx.annotation.Nullable;
+import androidx.appcompat.app.AppCompatActivity;
+
+/**
+ * Created by yangyu on 2017/11/28.
+ */
+
+public class ShowResultActivity extends AppCompatActivity {
+    private static final String TAG = "ShowResultActivity";
+
+    private WebView webView;
+    private TextView tv;
+    private ProgressBar pb;
+
+    private String resultText;
+
+    @Override
+    protected void onCreate(@Nullable Bundle savedInstanceState) {
+        super.onCreate(savedInstanceState);
+        setContentView(R.layout.layout_activity_show_result);
+        webView = (WebView) findViewById(R.id.web_content);
+        tv = (TextView) findViewById(R.id.tv);
+        pb = (ProgressBar) findViewById(R.id.progress);
+
+        WebSettings webSettings = webView.getSettings();
+        webSettings.setJavaScriptEnabled(true);
+        webSettings.setUseWideViewPort(true);
+        webSettings.setLoadWithOverviewMode(true);
+        webView.setWebChromeClient(webChromeClient);
+        webView.setWebViewClient(webViewClient);
+
+        Intent intent = getIntent();
+        if (intent != null) {
+            resultText = intent.getStringExtra(Constant.EXTRA_RESULT_TEXT_FROM_PIC);
+            if (Patterns.WEB_URL.matcher(resultText).matches()) {
+                //是一个web url
+                tv.setVisibility(View.GONE);
+                webView.setVisibility(View.VISIBLE);
+                webView.loadUrl(resultText);
+            } else {
+                //不是web url
+                tv.setVisibility(View.VISIBLE);
+                webView.setVisibility(View.GONE);
+                pb.setVisibility(View.GONE);
+                tv.setText(resultText);
+            }
+        }
+    }
+
+    private WebViewClient webViewClient = new WebViewClient() {
+        @Override
+        public boolean shouldOverrideUrlLoading(WebView view, String url) {
+            view.loadUrl(url);
+            return true;
+        }
+
+        @Override
+        public void onPageStarted(WebView view, String url, Bitmap favicon) {
+            super.onPageStarted(view, url, favicon);
+        }
+
+        @Override
+        public void onPageFinished(WebView view, String url) {
+            super.onPageFinished(view, url);
+        }
+    };
+
+    private WebChromeClient webChromeClient = new WebChromeClient() {
+        @Override
+        public void onProgressChanged(WebView view, int newProgress) {
+            super.onProgressChanged(view, newProgress);
+            if (newProgress == 100) {
+                pb.setVisibility(View.GONE);
+            } else {
+                pb.setVisibility(View.VISIBLE);
+                pb.setProgress(newProgress);
+            }
+        }
+
+        @Override
+        public void onReceivedTitle(WebView view, String title) {
+            super.onReceivedTitle(view, title);
+            Log.e(TAG, "onReceivedTitle: " + title);
+        }
+    };
+}
+

+ 46 - 0
YZxing-lib/src/main/java/com/example/qrcode/callback/PreviewCallback.java

@@ -0,0 +1,46 @@
+package com.example.qrcode.callback;
+
+import android.graphics.Point;
+import android.hardware.Camera;
+import android.os.Handler;
+import android.os.Message;
+import android.util.Log;
+
+import com.example.qrcode.camera.CameraConfigurationManager;
+
+/**
+ * Created by yangyu on 17/10/19.
+ */
+
+public class PreviewCallback implements Camera.PreviewCallback {
+
+    private static final String TAG = PreviewCallback.class.getSimpleName();
+
+    private final CameraConfigurationManager configManager;
+    private Handler previewHandler;
+    private int previewMessage;
+
+    public PreviewCallback(CameraConfigurationManager configManager) {
+        this.configManager = configManager;
+    }
+
+    public void setHandler(Handler previewHandler, int previewMessage) {
+        this.previewHandler = previewHandler;
+        this.previewMessage = previewMessage;
+    }
+
+    @Override
+    public void onPreviewFrame(byte[] data, Camera camera) {
+        Point cameraResolution = configManager.getCameraResolution();
+        Handler thePreviewHandler = previewHandler;
+        if (cameraResolution != null && thePreviewHandler != null) {
+            Message message = thePreviewHandler.obtainMessage(previewMessage, cameraResolution.x,
+                    cameraResolution.y, data);
+            message.sendToTarget();
+            previewHandler = null;
+        } else {
+            Log.d(TAG, "Got preview callback, but no handler or resolution available");
+        }
+    }
+
+}

+ 121 - 0
YZxing-lib/src/main/java/com/example/qrcode/camera/AutoFocusManager.java

@@ -0,0 +1,121 @@
+package com.example.qrcode.camera;
+
+import android.content.Context;
+import android.content.SharedPreferences;
+import android.hardware.Camera;
+import android.os.AsyncTask;
+import android.preference.PreferenceManager;
+import android.util.Log;
+
+import com.example.qrcode.Constant;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.concurrent.RejectedExecutionException;
+
+/**
+ * Created by yangyu on 17/10/19.
+ */
+
+final class AutoFocusManager implements Camera.AutoFocusCallback {
+
+    private static final String TAG = AutoFocusManager.class.getSimpleName();
+
+    private static final long AUTO_FOCUS_INTERVAL_MS = 2000L;
+    private static final Collection<String> FOCUS_MODES_CALLING_AF;
+
+    static {
+        FOCUS_MODES_CALLING_AF = new ArrayList<>(2);
+        FOCUS_MODES_CALLING_AF.add(Camera.Parameters.FOCUS_MODE_AUTO);
+        FOCUS_MODES_CALLING_AF.add(Camera.Parameters.FOCUS_MODE_MACRO);
+    }
+
+    private boolean stopped;
+    private boolean focusing;
+    private final boolean useAutoFocus;
+    private final Camera camera;
+    private AsyncTask<?, ?, ?> outstandingTask;
+
+    AutoFocusManager(Context context, Camera camera) {
+        this.camera = camera;
+        SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(context);
+        String currentFocusMode = camera.getParameters().getFocusMode();
+        useAutoFocus =
+                sharedPrefs.getBoolean(Constant.KEY_AUTO_FOCUS, true) &&
+                        FOCUS_MODES_CALLING_AF.contains(currentFocusMode);
+        Log.i(TAG, "Current focus mode '" + currentFocusMode + "'; use auto focus? " + useAutoFocus);
+        start();
+    }
+
+    @Override
+    public synchronized void onAutoFocus(boolean success, Camera theCamera) {
+        focusing = false;
+        autoFocusAgainLater();
+    }
+
+    private synchronized void autoFocusAgainLater() {
+        if (!stopped && outstandingTask == null) {
+            AutoFocusTask newTask = new AutoFocusTask();
+            try {
+                newTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
+                outstandingTask = newTask;
+            } catch (RejectedExecutionException ree) {
+                Log.w(TAG, "Could not request auto focus", ree);
+            }
+        }
+    }
+
+    synchronized void start() {
+        if (useAutoFocus) {
+            outstandingTask = null;
+            if (!stopped && !focusing) {
+                try {
+                    camera.autoFocus(this);
+                    focusing = true;
+                } catch (RuntimeException re) {
+                    // Have heard RuntimeException reported in Android 4.0.x+; continue?
+                    Log.w(TAG, "Unexpected exception while focusing", re);
+                    // Try again later to keep cycle going
+                    autoFocusAgainLater();
+                }
+            }
+        }
+    }
+
+    private synchronized void cancelOutstandingTask() {
+        if (outstandingTask != null) {
+            if (outstandingTask.getStatus() != AsyncTask.Status.FINISHED) {
+                outstandingTask.cancel(true);
+            }
+            outstandingTask = null;
+        }
+    }
+
+    synchronized void stop() {
+        stopped = true;
+        if (useAutoFocus) {
+            cancelOutstandingTask();
+            // Doesn't hurt to call this even if not focusing
+            try {
+                camera.cancelAutoFocus();
+            } catch (RuntimeException re) {
+                // Have heard RuntimeException reported in Android 4.0.x+; continue?
+                Log.w(TAG, "Unexpected exception while cancelling focusing", re);
+            }
+        }
+    }
+
+    private final class AutoFocusTask extends AsyncTask<Object, Object, Object> {
+        @Override
+        protected Object doInBackground(Object... voids) {
+            try {
+                Thread.sleep(AUTO_FOCUS_INTERVAL_MS);
+            } catch (InterruptedException e) {
+                // continue
+            }
+            start();
+            return null;
+        }
+    }
+
+}

+ 115 - 0
YZxing-lib/src/main/java/com/example/qrcode/camera/CameraConfigurationManager.java

@@ -0,0 +1,115 @@
+package com.example.qrcode.camera;
+
+/**
+ * Created by yangyu on 17/10/18.
+ */
+
+import android.content.Context;
+import android.content.SharedPreferences;
+import android.graphics.Point;
+import android.hardware.Camera;
+import android.preference.PreferenceManager;
+import android.util.Log;
+import android.view.Display;
+import android.view.WindowManager;
+
+import com.example.qrcode.Constant;
+import com.example.qrcode.utils.CameraConfigurationUtils;
+
+/**
+ * A class which deals with reading, parsing, and setting the camera parameters which are used to
+ * configure the camera hardware.
+ */
+@SuppressWarnings("deprecation") // camera APIs
+public final class CameraConfigurationManager {
+
+    private static final String TAG = "CameraConfiguration";
+
+    private final Context context;
+    private Point screenResolution;
+    private Point cameraResolution;
+    private Point bestPreviewSize;
+
+    CameraConfigurationManager(Context context) {
+        this.context = context;
+    }
+
+    /**
+     * Reads, one time, values from the camera that are needed by the app.
+     */
+    void initFromCameraParameters(OpenCamera camera) {
+        Camera.Parameters parameters = camera.getCamera().getParameters();
+        WindowManager manager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+        Display display = manager.getDefaultDisplay();
+        Point theScreenResolution = new Point();
+        display.getSize(theScreenResolution);
+        screenResolution = theScreenResolution;
+        Log.i(TAG, "Screen resolution in current orientation: " + screenResolution);
+        cameraResolution = CameraConfigurationUtils.findBestPreviewSizeValue(parameters, screenResolution);
+        Log.i(TAG, "Camera resolution: " + cameraResolution);
+        bestPreviewSize = CameraConfigurationUtils.findBestPreviewSizeValue(parameters, screenResolution);
+        Log.i(TAG, "Best available preview size: " + bestPreviewSize);
+    }
+
+    void setDesiredCameraParameters(OpenCamera camera, boolean safeMode) {
+
+        Camera theCamera = camera.getCamera();
+        Camera.Parameters parameters = theCamera.getParameters();
+
+        if (parameters == null) {
+            Log.w(TAG, "Device error: no camera parameters are available. Proceeding without configuration.");
+            return;
+        }
+
+        Log.i(TAG, "Initial camera parameters: " + parameters.flatten());
+
+        if (safeMode) {
+            Log.w(TAG, "In camera config safe mode -- most settings will not be honored");
+        }
+
+        SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
+        CameraConfigurationUtils.setFocus(
+                parameters,
+                prefs.getBoolean(Constant.KEY_AUTO_FOCUS, true),
+                prefs.getBoolean(Constant.KEY_DISABLE_CONTINUOUS_FOCUS, true),
+                safeMode);
+
+        parameters.setPreviewSize(bestPreviewSize.x, bestPreviewSize.y);
+
+        theCamera.setParameters(parameters);
+        //设置屏幕方向为垂直方向
+        theCamera.setDisplayOrientation(90);
+    }
+
+    public Point getCameraResolution() {
+        return cameraResolution;
+    }
+
+    Point getScreenResolution() {
+        return screenResolution;
+    }
+
+    boolean getTorchState(Camera camera) {
+        if (camera != null) {
+            Camera.Parameters parameters = camera.getParameters();
+            if (parameters != null) {
+                String flashMode = parameters.getFlashMode();
+                return flashMode != null &&
+                        (Camera.Parameters.FLASH_MODE_ON.equals(flashMode) ||
+                                Camera.Parameters.FLASH_MODE_TORCH.equals(flashMode));
+            }
+        }
+        return false;
+    }
+
+    void setTorch(Camera camera, boolean newSetting) {
+        Camera.Parameters parameters = camera.getParameters();
+        doSetTorch(parameters, newSetting, false);
+        camera.setParameters(parameters);
+    }
+
+    private void doSetTorch(Camera.Parameters parameters, boolean newSetting, boolean safeMode) {
+        CameraConfigurationUtils.setTorch(parameters, newSetting);
+    }
+
+}

+ 13 - 0
YZxing-lib/src/main/java/com/example/qrcode/camera/CameraFacing.java

@@ -0,0 +1,13 @@
+package com.example.qrcode.camera;
+
+/**
+ * Created by yangyu on 17/10/18.
+ */
+
+enum CameraFacing {
+
+    BACK,  // must be value 0!
+    FRONT, // must be value 1!
+
+}
+

+ 301 - 0
YZxing-lib/src/main/java/com/example/qrcode/camera/CameraManager.java

@@ -0,0 +1,301 @@
+package com.example.qrcode.camera;
+
+/**
+ * Created by yangyu on 17/10/18.
+ */
+
+import android.content.Context;
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.hardware.Camera;
+import android.os.Handler;
+import android.util.Log;
+import android.view.SurfaceHolder;
+
+import com.example.qrcode.callback.PreviewCallback;
+import com.google.zxing.PlanarYUVLuminanceSource;
+
+import java.io.IOException;
+
+/**
+ * This object wraps the Camera service object and expects to be the only one talking to it. The
+ * implementation encapsulates the steps needed to take preview-sized images, which are used for
+ * both preview and decoding.
+ *
+ * @author dswitkin@google.com (Daniel Switkin)
+ */
+@SuppressWarnings("deprecation") // camera APIs
+public final class CameraManager {
+
+    private static final String TAG = CameraManager.class.getSimpleName();
+
+    private static final int MIN_FRAME_WIDTH = 240;
+    private static final int MIN_FRAME_HEIGHT = 240;
+    private static final int MAX_FRAME_WIDTH = 1200; // = 5/8 * 1920
+    private static final int MAX_FRAME_HEIGHT = 675; // = 5/8 * 1080
+    private static final int DEFAULT_TOP_PADDINF = 200;
+
+    private final Context context;
+    private final CameraConfigurationManager configManager;
+    private OpenCamera camera;
+    private AutoFocusManager autoFocusManager;
+    private Rect framingRect;
+    private boolean initialized;
+    private boolean previewing;
+    private int requestedCameraId = OpenCameraInterface.NO_REQUESTED_CAMERA;
+    private int requestedFramingRectWidth;
+    private int requestedFramingRectHeight;
+    private int requestedFramingRectTopPadding;
+    /**
+     * Preview frames are delivered here, which we pass on to the registered handler. Make sure to
+     * clear the handler so it will only receive one message.
+     */
+    private final PreviewCallback previewCallback;
+
+    public CameraManager(Context context) {
+        this.context = context;
+        this.configManager = new CameraConfigurationManager(context);
+        previewCallback = new PreviewCallback(configManager);
+    }
+
+    /**
+     * Opens the camera driver and initializes the hardware parameters.
+     *
+     * @param holder The surface object which the camera will draw preview frames into.
+     * @throws IOException Indicates the camera driver failed to open.
+     */
+    public synchronized void openDriver(SurfaceHolder holder) throws IOException {
+        OpenCamera theCamera = camera;
+        if (theCamera == null) {
+            theCamera = OpenCameraInterface.open(requestedCameraId);
+            if (theCamera == null) {
+                throw new IOException("Camera.open() failed to return object from driver");
+            }
+            camera = theCamera;
+        }
+
+        if (!initialized) {
+            initialized = true;
+            configManager.initFromCameraParameters(theCamera);
+            if (requestedFramingRectWidth > 0 && requestedFramingRectHeight > 0) {
+                setManualFramingRect(requestedFramingRectWidth, requestedFramingRectHeight
+                        , requestedFramingRectTopPadding > 0 ? requestedFramingRectTopPadding : -1);
+                requestedFramingRectWidth = 0;
+                requestedFramingRectHeight = 0;
+            }
+        }
+
+        Camera cameraObject = theCamera.getCamera();
+        try {
+            configManager.setDesiredCameraParameters(theCamera, false);
+        } catch (RuntimeException re) {
+        }
+        cameraObject.setPreviewDisplay(holder);
+
+    }
+
+    public synchronized boolean isOpen() {
+        return camera != null;
+    }
+
+    /**
+     * Closes the camera driver if still in use.
+     */
+    public synchronized void closeDriver() {
+        if (camera != null) {
+            camera.getCamera().release();
+            camera = null;
+        }
+    }
+
+    public synchronized void clearFramingRect() {
+        // Make sure to clear these each time we close the camera, so that any scanning rect
+        // requested by intent is forgotten.
+        framingRect = null;
+    }
+
+    /**
+     * Asks the camera hardware to begin drawing preview frames to the screen.
+     */
+    public synchronized void startPreview() {
+        OpenCamera theCamera = camera;
+        if (theCamera != null && !previewing) {
+            theCamera.getCamera().startPreview();
+            previewing = true;
+            autoFocusManager = new AutoFocusManager(context, theCamera.getCamera());
+        }
+    }
+
+    /**
+     * Tells the camera to stop drawing preview frames.
+     */
+    public synchronized void stopPreview() {
+        if (autoFocusManager != null) {
+            autoFocusManager.stop();
+            autoFocusManager = null;
+        }
+        if (camera != null && previewing) {
+            camera.getCamera().stopPreview();
+            previewCallback.setHandler(null, 0);
+            previewing = false;
+        }
+    }
+
+    /**
+     * @param newSetting if {@code true}, light should be turned on if currently off. And vice versa.
+     */
+    public synchronized void setTorch(boolean newSetting) {
+        OpenCamera theCamera = camera;
+        if (theCamera != null && newSetting != configManager.getTorchState(theCamera.getCamera())) {
+            boolean wasAutoFocusManager = autoFocusManager != null;
+            if (wasAutoFocusManager) {
+                autoFocusManager.stop();
+                autoFocusManager = null;
+            }
+            configManager.setTorch(theCamera.getCamera(), newSetting);
+            if (wasAutoFocusManager) {
+                autoFocusManager = new AutoFocusManager(context, theCamera.getCamera());
+                autoFocusManager.start();
+            }
+        }
+    }
+
+    /**
+     * A single preview frame will be returned to the handler supplied. The data will arrive as byte[]
+     * in the message.obj field, with width and height encoded as message.arg1 and message.arg2,
+     * respectively.
+     *
+     * @param handler The handler to send the message to.
+     * @param message The what field of the message to be sent.
+     */
+    public synchronized void requestPreviewFrame(Handler handler, int message) {
+        OpenCamera theCamera = camera;
+        if (theCamera != null && previewing) {
+            previewCallback.setHandler(handler, message);
+            theCamera.getCamera().setOneShotPreviewCallback(previewCallback);
+        }
+    }
+
+    /**
+     * Calculates the framing rect which the UI should draw to show the user where to place the
+     * barcode. This target helps with alignment as well as forces the user to hold the device
+     * far enough away to ensure the image will be in focus.
+     *
+     * @return The rectangle to draw on screen in window coordinates.
+     */
+    public synchronized Rect getFramingRect() {
+        if (framingRect == null) {
+            if (camera == null) {
+                return null;
+            }
+            Point screenResolution = configManager.getScreenResolution();
+            if (screenResolution == null) {
+                // Called early, before init even finished
+                return null;
+            }
+
+            int width = findDesiredDimensionInRange(screenResolution.x, MIN_FRAME_WIDTH, MAX_FRAME_WIDTH);
+            int height = findDesiredDimensionInRange(screenResolution.y, MIN_FRAME_HEIGHT, MAX_FRAME_HEIGHT);
+
+            int leftOffset = (screenResolution.x - width) / 2;
+            int topOffset = DEFAULT_TOP_PADDINF;
+            framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
+            Log.d(TAG, "Calculated framing rect: " + framingRect);
+        }
+        return framingRect;
+    }
+
+    private static int findDesiredDimensionInRange(int resolution, int hardMin, int hardMax) {
+        int dim = 5 * resolution / 8; // Target 5/8 of each dimension
+        if (dim < hardMin) {
+            return hardMin;
+        }
+        if (dim > hardMax) {
+            return hardMax;
+        }
+        return dim;
+    }
+
+    //    /**
+    //     * Like {@link #getFramingRect} but coordinates are in terms of the preview frame,
+    //     * not UI / screen.
+    //     *
+    //     * @return {@link Rect} expressing barcode scan area in terms of the preview size
+    //     */
+    //    public synchronized Rect getFramingRectInPreview() {
+    //        if (framingRectInPreview == null) {
+    //            Rect framingRect = getFramingRect();
+    //            if (framingRect == null) {
+    //                return null;
+    //            }
+    //            Rect rect = new Rect(framingRect);
+    //            Point cameraResolution = configManager.getCameraResolution();
+    //            Point screenResolution = configManager.getScreenResolution();
+    //            if (cameraResolution == null || screenResolution == null) {
+    //                // Called early, before init even finished
+    //                return null;
+    //            }
+    //            rect.left = rect.left * cameraResolution.x / screenResolution.x;
+    //            rect.right = rect.right * cameraResolution.x / screenResolution.x;
+    //            rect.top = rect.top * cameraResolution.y / screenResolution.y;
+    //            rect.bottom = rect.bottom * cameraResolution.y / screenResolution.y;
+    //            framingRectInPreview = rect;
+    //        }
+    //        return framingRectInPreview;
+    //    }
+
+    /**
+     * 设置camera ID
+     *
+     * @param cameraId camera ID of the camera to use. A negative value means "no preference".
+     */
+    public synchronized void setManualCameraId(int cameraId) {
+        requestedCameraId = cameraId;
+    }
+
+    /**
+     * 设置扫码框的大小
+     *
+     * @param width  The width in pixels to scan.
+     * @param height The height in pixels to scan.
+     */
+    public synchronized void setManualFramingRect(int width, int height, int topPadding) {
+        if (initialized) {
+            Point screenResolution = configManager.getScreenResolution();
+            if (width > screenResolution.x) {
+                width = screenResolution.x;
+            }
+            if (height > screenResolution.y) {
+                height = screenResolution.y;
+            }
+            int leftOffset = (screenResolution.x - width) / 2;
+            int topOffset = topPadding < 0 ? DEFAULT_TOP_PADDINF : topPadding;
+            framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
+        } else {
+            requestedFramingRectWidth = width;
+            requestedFramingRectHeight = height;
+            requestedFramingRectTopPadding = topPadding;
+        }
+    }
+
+    /**
+     * A factory method to build the appropriate LuminanceSource object based on the format
+     * of the preview buffers, as described by Camera.Parameters.
+     *
+     * @param data   A preview frame.
+     * @param width  The width of the image.
+     * @param height The height of the image.
+     * @return A PlanarYUVLuminanceSource instance.
+     */
+    public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
+        return new PlanarYUVLuminanceSource(data, width, height, 0, 0,
+                width, height, false);
+    }
+
+    public Camera.Size getPreviewSize() {
+        if (camera != null && camera.getCamera() != null) {
+            return camera.getCamera().getParameters().getPreviewSize();
+        }
+        return null;
+    }
+}

+ 40 - 0
YZxing-lib/src/main/java/com/example/qrcode/camera/OpenCamera.java

@@ -0,0 +1,40 @@
+package com.example.qrcode.camera;
+
+import android.hardware.Camera;
+
+/**
+ * Created by yangyu on 17/10/18.
+ */
+
+public final class OpenCamera {
+
+    private final int index;
+    private final Camera camera;
+    private final CameraFacing facing;
+    private final int orientation;
+
+    public OpenCamera(int index, Camera camera, CameraFacing facing, int orientation) {
+        this.index = index;
+        this.camera = camera;
+        this.facing = facing;
+        this.orientation = orientation;
+    }
+
+    public Camera getCamera() {
+        return camera;
+    }
+
+    public CameraFacing getFacing() {
+        return facing;
+    }
+
+    public int getOrientation() {
+        return orientation;
+    }
+
+    @Override
+    public String toString() {
+        return "Camera #" + index + " : " + facing + ',' + orientation;
+    }
+
+}

+ 88 - 0
YZxing-lib/src/main/java/com/example/qrcode/camera/OpenCameraInterface.java

@@ -0,0 +1,88 @@
+package com.example.qrcode.camera;
+
+/**
+ * Created by yangyu on 17/10/18.
+ */
+
+import android.hardware.Camera;
+import android.util.Log;
+
+/**
+ * Abstraction over the {@link Camera} API that helps open them and return their metadata.
+ */
+@SuppressWarnings("deprecation") // camera APIs
+public final class OpenCameraInterface {
+
+    private static final String TAG = OpenCameraInterface.class.getName();
+
+    /** For {@link #open(int)}, means no preference for which camera to open. */
+    public static final int NO_REQUESTED_CAMERA = -1;
+
+    private OpenCameraInterface() {
+    }
+
+    /**
+     * Opens the requested camera with {@link Camera#open(int)}, if one exists.
+     *
+     * @param cameraId camera ID of the camera to use. A negative value
+     *  or {@link #NO_REQUESTED_CAMERA} means "no preference", in which case a rear-facing
+     *  camera is returned if possible or else any camera
+     * @return handle to {@link OpenCamera} that was opened
+     */
+    public static OpenCamera open(int cameraId) {
+
+        int numCameras = Camera.getNumberOfCameras();
+        if (numCameras == 0) {
+            Log.w(TAG, "No cameras!");
+            return null;
+        }
+
+        boolean explicitRequest = cameraId >= 0;
+
+        Camera.CameraInfo selectedCameraInfo = null;
+        int index;
+        if (explicitRequest) {
+            index = cameraId;
+            selectedCameraInfo = new Camera.CameraInfo();
+            Camera.getCameraInfo(index, selectedCameraInfo);
+        } else {
+            index = 0;
+            while (index < numCameras) {
+                Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
+                Camera.getCameraInfo(index, cameraInfo);
+                CameraFacing reportedFacing = CameraFacing.values()[cameraInfo.facing];
+                if (reportedFacing == CameraFacing.BACK) {
+                    selectedCameraInfo = cameraInfo;
+                    break;
+                }
+                index++;
+            }
+        }
+
+        Camera camera;
+        if (index < numCameras) {
+            Log.i(TAG, "Opening camera #" + index);
+            camera = Camera.open(index);
+        } else {
+            if (explicitRequest) {
+                Log.w(TAG, "Requested camera does not exist: " + cameraId);
+                camera = null;
+            } else {
+                Log.i(TAG, "No camera facing " + CameraFacing.BACK + "; returning camera #0");
+                camera = Camera.open(0);
+                selectedCameraInfo = new Camera.CameraInfo();
+                Camera.getCameraInfo(0, selectedCameraInfo);
+            }
+        }
+
+        if (camera == null) {
+            return null;
+        }
+        return new OpenCamera(index,
+                camera,
+                CameraFacing.values()[selectedCameraInfo.facing],
+                selectedCameraInfo.orientation);
+    }
+
+}
+

+ 151 - 0
YZxing-lib/src/main/java/com/example/qrcode/decode/DecodeHandler.java

@@ -0,0 +1,151 @@
+package com.example.qrcode.decode;
+
+import android.hardware.Camera;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+
+import com.example.qrcode.Constant;
+import com.example.qrcode.ScannerActivity;
+import com.google.zxing.BinaryBitmap;
+import com.google.zxing.DecodeHintType;
+import com.google.zxing.MultiFormatReader;
+import com.google.zxing.PlanarYUVLuminanceSource;
+import com.google.zxing.ReaderException;
+import com.google.zxing.Result;
+import com.google.zxing.common.HybridBinarizer;
+
+import java.util.Map;
+
+/**
+ * Created by yangyu on 17/10/19.
+ */
+
+final class DecodeHandler extends Handler {
+
+    private static final String TAG = DecodeHandler.class.getSimpleName();
+
+    private final ScannerActivity activity;
+    private final MultiFormatReader multiFormatReader;
+    private boolean running = true;
+    private byte[] mRotatedData;
+
+    DecodeHandler(ScannerActivity activity, Map<DecodeHintType, Object> hints) {
+        multiFormatReader = new MultiFormatReader();
+        multiFormatReader.setHints(hints);
+        this.activity = activity;
+    }
+
+    @Override
+    public void handleMessage(Message message) {
+        if (message == null || !running) {
+            return;
+        }
+        if (message.what == Constant.MESSAGE_SCANNER_DECODE) {
+            decode((byte[]) message.obj, message.arg1, message.arg2);
+
+        } else if (message.what == Constant.MESSAGE_SCANNER_QUIT) {
+            running = false;
+            Looper.myLooper().quit();
+
+        }
+    }
+
+    /**
+     * Decode the data within the viewfinder rectangle, and time how long it took. For efficiency,
+     * reuse the same reader objects from one decode to the next.
+     *
+     * @param data   The YUV preview frame.
+     * @param width  The width of the preview frame.
+     * @param height The height of the preview frame.
+     */
+    private void decode(byte[] data, int width, int height) {
+        Camera.Size size = activity.getCameraManager().getPreviewSize();
+        if (size == null) {
+            Log.e(TAG, "getPreviewSize()==null return");
+            return;
+        }
+        // 这里需要将获取的data翻转一下,因为相机默认拿的的横屏的数据
+        byte[] mRotatedData = new byte[data.length];
+        for (int y = 0; y < size.height; y++) {
+            for (int x = 0; x < size.width; x++)
+                mRotatedData[x * size.height + size.height - y - 1] = data[x + y * size.width];
+        }
+
+        // 宽高也要调整
+        int tmp = size.width;
+        size.width = size.height;
+        size.height = tmp;
+        long start = System.currentTimeMillis();
+        Result rawResult = null;
+        PlanarYUVLuminanceSource source = activity.getCameraManager().buildLuminanceSource(mRotatedData, size.width, size.height);
+        BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
+        try {
+            rawResult = multiFormatReader.decodeWithState(bitmap);
+        } catch (ReaderException re) {
+            // continue
+            Log.e(TAG, "decode: re:" + re.getMessage());
+        } finally {
+            multiFormatReader.reset();
+        }
+
+        //这里需要对扫码的数据进行宽高的调换,原来扫码的是横屏数据,需要转化成竖屏。
+/*
+        if (null == mRotatedData) {
+            mRotatedData = new byte[width * height];
+        } else {
+            if (mRotatedData.length < width * height) {
+                mRotatedData = new byte[width * height];
+            }
+        }
+        Arrays.fill(mRotatedData, (byte) 0);
+        for (int y = 0; y < height; y++) {
+            for (int x = 0; x < width; x++) {
+                if (x + y * width >= data.length) {
+                    break;
+                }
+                mRotatedData[x * height + height - y - 1] = data[x + y * width];
+            }
+        }
+        int tmp = width;
+        width = height;
+        height = tmp;
+        long start = System.currentTimeMillis();
+        Result rawResult = null;
+        PlanarYUVLuminanceSource source = activity.getCameraManager().buildLuminanceSource(mRotatedData, width, height);
+        if (source != null) {
+            BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
+            try {
+                rawResult = multiFormatReader.decodeWithState(bitmap);
+            } catch (ReaderException re) {
+                // continue
+                Log.e(TAG, "decode: re:" + re.getMessage());
+            } finally {
+                multiFormatReader.reset();
+            }
+        }
+*/
+
+        /********************************************************************************/
+        /********************************************************************************/
+        Handler handler = activity.getHandler();
+        if (rawResult != null) {
+            // Don't log the barcode contents for security.
+            long end = System.currentTimeMillis();
+            Log.d(TAG, "Found barcode in " + (end - start) + " ms");
+            if (handler != null) {
+                Message message = Message.obtain(handler, Constant.MESSAGE_SCANNER_DECODE_SUCCEEDED, rawResult);
+                Bundle bundle = new Bundle();
+                message.setData(bundle);
+                message.sendToTarget();
+            }
+        } else {
+            if (handler != null) {
+                Message message = Message.obtain(handler, Constant.MESSAGE_SCANNER_DECODE_FAIL);
+                message.sendToTarget();
+            }
+        }
+    }
+}

+ 0 - 0
YZxing-lib/src/main/java/com/example/qrcode/decode/DecodeThread.java


Неке датотеке нису приказане због велике количине промена