From 178ffa120612a8b3d2f451ef7fe5ea4f56662d92 Mon Sep 17 00:00:00 2001 From: ouyangpeng Date: Fri, 21 Jan 2022 11:30:42 +0800 Subject: [PATCH 1/3] =?UTF-8?q?1.=20=E4=BC=98=E5=8C=96=E9=83=A8=E5=88=86?= =?UTF-8?q?=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/src/main/cpp/JniImpl.cpp | 1 + .../openglcamera2/BaseRenderActivity.java | 2 +- .../byteflow/openglcamera2/ImageActivity.java | 6 +- .../byteflow/openglcamera2/MainActivity.java | 133 ++++-------------- .../adapter/MyRecyclerViewAdapter.java | 90 ++++++++++++ .../camera/Camera2FrameCallback.java | 1 + .../openglcamera2/camera/Camera2Wrapper.java | 32 +++-- .../openglcamera2/camera/CameraUtil.java | 18 +-- .../openglcamera2/frame/FrameUtil.java | 15 +- .../gesture/MyGestureListener.java | 4 +- .../openglcamera2/render/ByteFlowRender.java | 2 +- .../render/GLByteFlowRender.java | 9 +- 12 files changed, 162 insertions(+), 151 deletions(-) create mode 100644 app/src/main/java/com/byteflow/openglcamera2/adapter/MyRecyclerViewAdapter.java diff --git a/app/src/main/cpp/JniImpl.cpp b/app/src/main/cpp/JniImpl.cpp index eb27c2d..bf6d737 100644 --- a/app/src/main/cpp/JniImpl.cpp +++ b/app/src/main/cpp/JniImpl.cpp @@ -61,6 +61,7 @@ JNIEXPORT void JNICALL native_UpdateFrame(JNIEnv *env, jobject instance, jint fo if(pContext) pContext->UpdateFrame(format, buf, width, height); delete[] buf; + env->DeleteLocalRef(bytes); } /* diff --git a/app/src/main/java/com/byteflow/openglcamera2/BaseRenderActivity.java b/app/src/main/java/com/byteflow/openglcamera2/BaseRenderActivity.java index dd978dd..54f04ce 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/BaseRenderActivity.java +++ b/app/src/main/java/com/byteflow/openglcamera2/BaseRenderActivity.java @@ -78,7 +78,7 @@ protected boolean hasPermissionsGranted(String[] permissions) { } public void updateTransformMatrix(String cameraId) { - if (Integer.valueOf(cameraId) == CameraCharacteristics.LENS_FACING_FRONT) { + if (Integer.parseInt(cameraId) == CameraCharacteristics.LENS_FACING_FRONT) { mByteFlowRender.setTransformMatrix(90, 0); } else { mByteFlowRender.setTransformMatrix(90, 1); diff --git a/app/src/main/java/com/byteflow/openglcamera2/ImageActivity.java b/app/src/main/java/com/byteflow/openglcamera2/ImageActivity.java index 15f0b83..6d4d7c9 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/ImageActivity.java +++ b/app/src/main/java/com/byteflow/openglcamera2/ImageActivity.java @@ -37,7 +37,7 @@ protected void onCreate(@Nullable Bundle savedInstanceState) { WindowManager.LayoutParams.FLAG_FULLSCREEN); setContentView(R.layout.activity_image); - mSurfaceViewRoot = (RelativeLayout) findViewById(R.id.surface_root); + mSurfaceViewRoot = findViewById(R.id.surface_root); RelativeLayout.LayoutParams p = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT); @@ -158,7 +158,7 @@ public void onSwipe(MyGestureListener.SwipeDirection direction) { } } - public static final File getResultImgFile(final String ext) { + public static File getResultImgFile(final String ext) { final File dir = new File(Environment.getExternalStorageDirectory(), RESULT_IMG_DIR); Log.d(TAG, "path=" + dir.toString()); dir.mkdirs(); @@ -168,7 +168,7 @@ public static final File getResultImgFile(final String ext) { return null; } - private static final String getDateTimeString() { + private static String getDateTimeString() { final GregorianCalendar now = new GregorianCalendar(); return DateTime_FORMAT.format(now.getTime()); } diff --git a/app/src/main/java/com/byteflow/openglcamera2/MainActivity.java b/app/src/main/java/com/byteflow/openglcamera2/MainActivity.java index 39d6ecf..b2627b8 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/MainActivity.java +++ b/app/src/main/java/com/byteflow/openglcamera2/MainActivity.java @@ -3,10 +3,7 @@ import android.Manifest; import android.annotation.SuppressLint; import android.app.AlertDialog; -import android.content.Context; import android.content.Intent; -import android.graphics.Bitmap; -import android.graphics.Color; import android.os.Build; import android.os.Bundle; import android.util.Log; @@ -15,7 +12,6 @@ import android.view.View; import android.view.Menu; import android.view.MenuItem; -import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.view.WindowManager; import android.widget.Button; @@ -23,7 +19,6 @@ import android.widget.RadioButton; import android.widget.RadioGroup; import android.widget.RelativeLayout; -import android.widget.TextView; import android.widget.Toast; import androidx.annotation.NonNull; @@ -32,6 +27,7 @@ import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; +import com.byteflow.openglcamera2.adapter.MyRecyclerViewAdapter; import com.byteflow.openglcamera2.camera.Camera2FrameCallback; import com.byteflow.openglcamera2.camera.Camera2Wrapper; import com.byteflow.openglcamera2.frame.ByteFlowFrame; @@ -41,7 +37,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import static android.opengl.GLSurfaceView.RENDERMODE_WHEN_DIRTY; import static com.byteflow.openglcamera2.render.ByteFlowRender.IMAGE_FORMAT_I420; @@ -73,12 +68,10 @@ protected void onCreate(Bundle savedInstanceState) { WindowManager.LayoutParams.FLAG_FULLSCREEN); setContentView(R.layout.activity_main); - FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); + FloatingActionButton fab = findViewById(R.id.fab); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { -// Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG) -// .setAction("Action", null).show(); if (mCamera2Wrapper != null) { mCamera2Wrapper.capture(); } @@ -150,10 +143,10 @@ public boolean onOptionsItemSelected(MenuItem item) { String cameraId = mCamera2Wrapper.getCameraId(); String[] cameraIds = mCamera2Wrapper.getSupportCameraIds(); if (cameraIds != null) { - for (int i = 0; i < cameraIds.length; i++) { - if (!cameraIds[i].equals(cameraId)) { - mCamera2Wrapper.updateCameraId(cameraIds[i]); - updateTransformMatrix(cameraIds[i]); + for (String s : cameraIds) { + if (!s.equals(cameraId)) { + mCamera2Wrapper.updateCameraId(s); + updateTransformMatrix(s); updateGLSurfaceViewSize(mCamera2Wrapper.getPreviewSize()); break; } @@ -185,9 +178,9 @@ public void onCaptureFrame(byte[] data, int width, int height) { } private void initViews() { - mSwitchCamBtn = (ImageButton) findViewById(R.id.switch_camera_btn); - mSwitchRatioBtn = (ImageButton) findViewById(R.id.switch_ratio_btn); - mSwitchFilterBtn = (ImageButton) findViewById(R.id.switch_filter_btn); + mSwitchCamBtn = findViewById(R.id.switch_camera_btn); + mSwitchRatioBtn = findViewById(R.id.switch_ratio_btn); + mSwitchFilterBtn = findViewById(R.id.switch_filter_btn); mSwitchCamBtn.bringToFront(); mSwitchRatioBtn.bringToFront(); mSwitchFilterBtn.bringToFront(); @@ -195,7 +188,7 @@ private void initViews() { mSwitchRatioBtn.setOnClickListener(this); mSwitchFilterBtn.setOnClickListener(this); - mSurfaceViewRoot = (RelativeLayout) findViewById(R.id.surface_root); + mSurfaceViewRoot = findViewById(R.id.surface_root); RelativeLayout.LayoutParams p = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT); mSurfaceViewRoot.addView(mGLSurfaceView, p); @@ -203,12 +196,11 @@ private void initViews() { mByteFlowRender.loadShaderFromAssetsFile(mCurrentShaderIndex, getResources()); mCamera2Wrapper = new Camera2Wrapper(this); - //mCamera2Wrapper.setDefaultPreviewSize(getScreenSize()); ViewTreeObserver treeObserver = mSurfaceViewRoot.getViewTreeObserver(); treeObserver.addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { @Override - public boolean onPreDraw() { + public boolean onPreDraw() { mSurfaceViewRoot.getViewTreeObserver().removeOnPreDrawListener(this); mRootViewSize = new Size(mSurfaceViewRoot.getMeasuredWidth(), mSurfaceViewRoot.getMeasuredHeight()); updateGLSurfaceViewSize(mCamera2Wrapper.getPreviewSize()); @@ -245,7 +237,6 @@ private void showChangeSizeDialog() { } } - final AlertDialog.Builder builder = new AlertDialog.Builder(this); LayoutInflater inflater = LayoutInflater.from(this); final View rootView = inflater.inflate(R.layout.resolution_selected_layout, null); @@ -273,7 +264,7 @@ public void onItemClick(View view, int position) { myPreviewSizeViewAdapter.notifyItemChanged(position); String[] strs = previewSizeTitles.get(position).split("x"); - Size updateSize = new Size(Integer.valueOf(strs[0]), Integer.valueOf(strs[1])); + Size updateSize = new Size(Integer.parseInt(strs[0]), Integer.parseInt(strs[1])); Log.d(TAG, "onItemClick() called with: strs[0] = [" + strs[0] + "], strs[1] = [" + strs[1] + "]"); mCamera2Wrapper.updatePreviewSize(updateSize); updateGLSurfaceViewSize(mCamera2Wrapper.getPreviewSize()); @@ -292,7 +283,7 @@ public void onItemClick(View view, int position) { myCaptureSizeViewAdapter.notifyItemChanged(position); String[] strs = captureSizeTitles.get(position).split("x"); - Size updateSize = new Size(Integer.valueOf(strs[0]), Integer.valueOf(strs[1])); + Size updateSize = new Size(Integer.parseInt(strs[0]), Integer.parseInt(strs[1])); Log.d(TAG, "onItemClick() called with: strs[0] = [" + strs[0] + "], strs[1] = [" + strs[1] + "]"); mCamera2Wrapper.updatePictureSize(updateSize); updateGLSurfaceViewSize(mCamera2Wrapper.getPreviewSize()); @@ -368,12 +359,12 @@ public void onItemClick(View view, int position) { mByteFlowRender.setParamsInt(PARAM_TYPE_SET_EXAMPLE, sampleType); - switch (sampleType) { - case EXAMPLE_TYPE_KEY_CONVEYOR_BELT: - break; - default: - break; - } +// switch (sampleType) { +// case EXAMPLE_TYPE_KEY_CONVEYOR_BELT: +// break; +// default: +// break; +// } dialog.cancel(); } @@ -418,7 +409,7 @@ public void onSwipe(MyGestureListener.SwipeDirection direction) { case ASCII_SHADER_INDEX: loadRGBAImage(R.drawable.ascii_mapping, ASCII_SHADER_INDEX); break; - default: + default: } if (LUT_A_SHADER_INDEX <= mCurrentShaderIndex && mCurrentShaderIndex <= LUT_D_SHADER_INDEX) { @@ -474,10 +465,10 @@ public void onClick(View v) { String cameraId = mCamera2Wrapper.getCameraId(); String[] cameraIds = mCamera2Wrapper.getSupportCameraIds(); if (cameraIds != null) { - for (int i = 0; i < cameraIds.length; i++) { - if (!cameraIds[i].equals(cameraId)) { - mCamera2Wrapper.updateCameraId(cameraIds[i]); - updateTransformMatrix(cameraIds[i]); + for (String id : cameraIds) { + if (!id.equals(cameraId)) { + mCamera2Wrapper.updateCameraId(id); + updateTransformMatrix(id); updateGLSurfaceViewSize(mCamera2Wrapper.getPreviewSize()); break; } @@ -490,81 +481,7 @@ public void onClick(View v) { case R.id.switch_filter_btn: showGLSampleDialog(); break; - default: - } - } - - public static class MyRecyclerViewAdapter extends RecyclerView.Adapter implements View.OnClickListener { - private List mTitles; - private Context mContext; - private int mSelectIndex = 0; - private OnItemClickListener mOnItemClickListener = null; - - public MyRecyclerViewAdapter(Context context, List titles) { - mContext = context; - mTitles = titles; - } - - public void setSelectIndex(int index) { - mSelectIndex = index; - } - - public int getSelectIndex() { - return mSelectIndex; - } - - public void addOnItemClickListener(OnItemClickListener onItemClickListener) { - mOnItemClickListener = onItemClickListener; - } - - @NonNull - @Override - public MyViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { - View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.resolution_item_layout, parent, false); - MyViewHolder myViewHolder = new MyViewHolder(view); - view.setOnClickListener(this); - return myViewHolder; - } - - @Override - public void onBindViewHolder(@NonNull MyViewHolder holder, int position) { - holder.mTitle.setText(mTitles.get(position)); - if (position == mSelectIndex) { - holder.mRadioButton.setChecked(true); - holder.mTitle.setTextColor(mContext.getResources().getColor(R.color.colorAccent)); - } else { - holder.mRadioButton.setChecked(false); - holder.mTitle.setText(mTitles.get(position)); - holder.mTitle.setTextColor(Color.GRAY); - } - holder.itemView.setTag(position); - } - - @Override - public int getItemCount() { - return mTitles.size(); - } - - @Override - public void onClick(View v) { - if (mOnItemClickListener != null) { - mOnItemClickListener.onItemClick(v, (Integer) v.getTag()); - } - } - - public interface OnItemClickListener { - void onItemClick(View view, int position); - } - - class MyViewHolder extends RecyclerView.ViewHolder { - RadioButton mRadioButton; - TextView mTitle; - - public MyViewHolder(View itemView) { - super(itemView); - mRadioButton = itemView.findViewById(R.id.radio_btn); - mTitle = itemView.findViewById(R.id.item_title); - } + default: } } } diff --git a/app/src/main/java/com/byteflow/openglcamera2/adapter/MyRecyclerViewAdapter.java b/app/src/main/java/com/byteflow/openglcamera2/adapter/MyRecyclerViewAdapter.java new file mode 100644 index 0000000..4fb178a --- /dev/null +++ b/app/src/main/java/com/byteflow/openglcamera2/adapter/MyRecyclerViewAdapter.java @@ -0,0 +1,90 @@ +package com.byteflow.openglcamera2.adapter; + +import android.content.Context; +import android.graphics.Color; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; +import android.widget.RadioButton; +import android.widget.TextView; + +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.RecyclerView; + +import com.byteflow.openglcamera2.R; + +import java.util.List; + +public class MyRecyclerViewAdapter extends RecyclerView.Adapter implements View.OnClickListener { + private final List mTitles; + private final Context mContext; + private int mSelectIndex = 0; + private OnItemClickListener mOnItemClickListener = null; + + public MyRecyclerViewAdapter(Context context, List titles) { + mContext = context; + mTitles = titles; + } + + public void setSelectIndex(int index) { + mSelectIndex = index; + } + + public int getSelectIndex() { + return mSelectIndex; + } + + public void addOnItemClickListener(OnItemClickListener onItemClickListener) { + mOnItemClickListener = onItemClickListener; + } + + @NonNull + @Override + public MyViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { + View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.resolution_item_layout, parent, false); + MyViewHolder myViewHolder = new MyViewHolder(view); + view.setOnClickListener(this); + return myViewHolder; + } + + @Override + public void onBindViewHolder(@NonNull MyViewHolder holder, int position) { + holder.mTitle.setText(mTitles.get(position)); + if (position == mSelectIndex) { + holder.mRadioButton.setChecked(true); + holder.mTitle.setTextColor(mContext.getResources().getColor(R.color.colorAccent)); + } else { + holder.mRadioButton.setChecked(false); + holder.mTitle.setText(mTitles.get(position)); + holder.mTitle.setTextColor(Color.GRAY); + } + holder.itemView.setTag(position); + } + + @Override + public int getItemCount() { + return mTitles.size(); + } + + @Override + public void onClick(View v) { + if (mOnItemClickListener != null) { + mOnItemClickListener.onItemClick(v, (Integer) v.getTag()); + } + } + + public interface OnItemClickListener { + void onItemClick(View view, int position); + } + + static class MyViewHolder extends RecyclerView.ViewHolder { + RadioButton mRadioButton; + TextView mTitle; + + public MyViewHolder(View itemView) { + super(itemView); + mRadioButton = itemView.findViewById(R.id.radio_btn); + mTitle = itemView.findViewById(R.id.item_title); + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/byteflow/openglcamera2/camera/Camera2FrameCallback.java b/app/src/main/java/com/byteflow/openglcamera2/camera/Camera2FrameCallback.java index c8505ce..19d9304 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/camera/Camera2FrameCallback.java +++ b/app/src/main/java/com/byteflow/openglcamera2/camera/Camera2FrameCallback.java @@ -2,5 +2,6 @@ public interface Camera2FrameCallback { void onPreviewFrame(byte[] data, int width, int height); + void onCaptureFrame(byte[] data, int width, int height); } diff --git a/app/src/main/java/com/byteflow/openglcamera2/camera/Camera2Wrapper.java b/app/src/main/java/com/byteflow/openglcamera2/camera/Camera2Wrapper.java index 8ba4982..8fa3e7f 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/camera/Camera2Wrapper.java +++ b/app/src/main/java/com/byteflow/openglcamera2/camera/Camera2Wrapper.java @@ -27,6 +27,7 @@ import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; @@ -35,8 +36,8 @@ public class Camera2Wrapper { private static final int DEFAULT_CAMERA_ID = 0; private final float THRESHOLD = 0.001f; - private Camera2FrameCallback mCamera2FrameCallback; - private Context mContext; + private final Camera2FrameCallback mCamera2FrameCallback; + private final Context mContext; private CameraManager mCameraManager; private CameraCaptureSession mCameraCaptureSession; @@ -47,7 +48,7 @@ public class Camera2Wrapper { private ImageReader mPreviewImageReader, mCaptureImageReader; private Integer mSensorOrientation; - private Semaphore mCameraLock = new Semaphore(1); + private final Semaphore mCameraLock = new Semaphore(1); private Size mDefaultPreviewSize = new Size(1280, 720); private Size mDefaultCaptureSize = new Size(1280, 720); @@ -59,7 +60,7 @@ public class Camera2Wrapper { private Handler mBackgroundHandler; private HandlerThread mBackgroundThread; - private ImageReader.OnImageAvailableListener mOnPreviewImageAvailableListener = new ImageReader.OnImageAvailableListener() { + private final ImageReader.OnImageAvailableListener mOnPreviewImageAvailableListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { Image image = reader.acquireLatestImage(); @@ -72,7 +73,7 @@ public void onImageAvailable(ImageReader reader) { } }; - private ImageReader.OnImageAvailableListener mOnCaptureImageAvailableListener = new ImageReader.OnImageAvailableListener() { + private final ImageReader.OnImageAvailableListener mOnCaptureImageAvailableListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { Image image = reader.acquireLatestImage(); @@ -101,8 +102,7 @@ private void initCamera2Wrapper() { mCameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE); try { mSupportCameraIds = mCameraManager.getCameraIdList(); - if (checkCameraIdSupport(String.valueOf(DEFAULT_CAMERA_ID))) { - } else { + if (!checkCameraIdSupport(String.valueOf(DEFAULT_CAMERA_ID))) { throw new AndroidRuntimeException("Don't support the camera id: " + DEFAULT_CAMERA_ID); } mCameraId = String.valueOf(DEFAULT_CAMERA_ID); @@ -120,7 +120,9 @@ private void getCameraInfo(String cameraId) { } catch (CameraAccessException e) { e.printStackTrace(); } - + if(characteristics == null){ + return; + } StreamConfigurationMap streamConfigs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (streamConfigs != null) { mSupportPreviewSize = Arrays.asList(streamConfigs.getOutputSizes(SurfaceTexture.class)); @@ -145,7 +147,7 @@ private void getCameraInfo(String cameraId) { if (supportDefaultSize) { mPreviewSize = mDefaultPreviewSize; - } else if(sameRatioSize != null) { + } else if (sameRatioSize != null) { mPreviewSize = sameRatioSize; } @@ -168,7 +170,7 @@ private void getCameraInfo(String cameraId) { } if (supportDefaultSize) { mPictureSize = mDefaultCaptureSize; - } else if(sameRatioSize != null) { + } else if (sameRatioSize != null) { mPictureSize = sameRatioSize; } } @@ -179,9 +181,10 @@ private void getCameraInfo(String cameraId) { private boolean checkCameraIdSupport(String cameraId) { boolean isSupported = false; - for (String id: mSupportCameraIds) { + for (String id : mSupportCameraIds) { if (cameraId.equals(id)) { isSupported = true; + break; } } return isSupported; @@ -338,7 +341,8 @@ public void onError(@NonNull CameraDevice cameraDevice, int error) { private void createCaptureSession() { try { - if (null == mCameraDevice || null == mPreviewSurface || null == mCaptureImageReader) return; + if (null == mCameraDevice || null == mPreviewSurface || null == mCaptureImageReader) + return; mCameraDevice.createCaptureSession(Arrays.asList(mPreviewSurface, mCaptureImageReader.getSurface()), mSessionStateCallback, mBackgroundHandler); @@ -347,7 +351,7 @@ private void createCaptureSession() { } } - private CameraCaptureSession.StateCallback mSessionStateCallback = new CameraCaptureSession.StateCallback() { + private final CameraCaptureSession.StateCallback mSessionStateCallback = new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { mCameraCaptureSession = session; @@ -376,7 +380,7 @@ private CaptureRequest createPreviewRequest() { builder.addTarget(mPreviewSurface); return builder.build(); } catch (CameraAccessException e) { - Log.e(TAG, e.getMessage()); + Log.e(TAG, Objects.requireNonNull(e.getMessage())); return null; } } diff --git a/app/src/main/java/com/byteflow/openglcamera2/camera/CameraUtil.java b/app/src/main/java/com/byteflow/openglcamera2/camera/CameraUtil.java index eaac798..a604dc6 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/camera/CameraUtil.java +++ b/app/src/main/java/com/byteflow/openglcamera2/camera/CameraUtil.java @@ -11,7 +11,7 @@ public class CameraUtil { public static Size getFitInScreenSize(int previewWidth, int previewHeight, int screenWidth, int screenHeight) { Point res = new Point(0, 0); - float ratioPreview = previewWidth *1f / previewHeight; + float ratioPreview = previewWidth * 1f / previewHeight; float ratioScreen = 0.0f; //landscape @@ -19,20 +19,20 @@ public static Size getFitInScreenSize(int previewWidth, int previewHeight, int s ratioScreen = screenWidth * 1f / screenHeight; if (ratioPreview >= ratioScreen) { res.x = screenWidth; - res.y = (int)(res.x * previewHeight * 1f / previewWidth); - }else { + res.y = (int) (res.x * previewHeight * 1f / previewWidth); + } else { res.y = screenHeight; - res.x = (int)(res.y * previewWidth * 1f / previewHeight); + res.x = (int) (res.y * previewWidth * 1f / previewHeight); } - //portrait - }else { + //portrait + } else { ratioScreen = screenHeight * 1f / screenWidth; if (ratioPreview >= ratioScreen) { res.y = screenHeight; - res.x = (int)(res.y * previewHeight * 1f / previewWidth); - }else { + res.x = (int) (res.y * previewHeight * 1f / previewWidth); + } else { res.x = screenWidth; - res.y = (int)(res.x * previewWidth * 1f / previewHeight); + res.y = (int) (res.x * previewWidth * 1f / previewHeight); } } return new Size(res.x, res.y); diff --git a/app/src/main/java/com/byteflow/openglcamera2/frame/FrameUtil.java b/app/src/main/java/com/byteflow/openglcamera2/frame/FrameUtil.java index 67faed6..c4fe7bd 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/frame/FrameUtil.java +++ b/app/src/main/java/com/byteflow/openglcamera2/frame/FrameUtil.java @@ -9,6 +9,7 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; +import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -36,12 +37,12 @@ public static ByteFlowFrame decodeFrame(String filePath) { filePath.split("[\\d]+[xX]{1}[\\d]+"); try { in = new FileInputStream(file); - byte[] data=new byte[in.available()]; + byte[] data = new byte[in.available()]; in.read(data); in.close(); - return new ByteFlowFrame(data, getImageSize(filePath).getWidth(), getImageSize(filePath).getHeight()); - } catch (FileNotFoundException e) { - e.printStackTrace(); + return new ByteFlowFrame(data, + Objects.requireNonNull(getImageSize(filePath)).getWidth(), + Objects.requireNonNull(getImageSize(filePath)).getHeight()); } catch (IOException e) { e.printStackTrace(); } @@ -58,7 +59,7 @@ public static String encodeFrame(ByteFlowFrame flowFrame) { String filePath = getBaseDirPath() + File.separator + stringBuilder.toString(); File file = new File(filePath); try { - FileOutputStream outputStream =new FileOutputStream(file); + FileOutputStream outputStream = new FileOutputStream(file); outputStream.write(flowFrame.getData()); outputStream.close(); return filePath; @@ -71,11 +72,11 @@ public static String encodeFrame(ByteFlowFrame flowFrame) { public static Size getImageSize(String filePath) { Pattern pattern = Pattern.compile("[\\d]+[xX]{1}[\\d]+"); - Matcher matcher =pattern.matcher(filePath); + Matcher matcher = pattern.matcher(filePath); if (matcher.find()) { Log.d(TAG, "getImageSize() called with: filePath = [" + matcher.group() + "]"); String[] strs = matcher.group().split("[xX]"); - return new Size(Integer.valueOf(strs[0]), Integer.valueOf(strs[1])); + return new Size(Integer.parseInt(strs[0]), Integer.parseInt(strs[1])); } return null; } diff --git a/app/src/main/java/com/byteflow/openglcamera2/gesture/MyGestureListener.java b/app/src/main/java/com/byteflow/openglcamera2/gesture/MyGestureListener.java index bf6ab94..90cbfdd 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/gesture/MyGestureListener.java +++ b/app/src/main/java/com/byteflow/openglcamera2/gesture/MyGestureListener.java @@ -13,8 +13,8 @@ public enum SwipeDirection { private static final int SWIPE_MIN_DISTANCE = 100; private static final int SWIPE_THRESHOLD_VELOCITY = 100; - private GestureDetector mGestureDetector; - private SimpleGestureListener mGestureListener; + private final GestureDetector mGestureDetector; + private final SimpleGestureListener mGestureListener; public MyGestureListener(Context context, SimpleGestureListener listener) { mGestureDetector = new GestureDetector(context, this); diff --git a/app/src/main/java/com/byteflow/openglcamera2/render/ByteFlowRender.java b/app/src/main/java/com/byteflow/openglcamera2/render/ByteFlowRender.java index 10c69a9..2e5ecac 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/render/ByteFlowRender.java +++ b/app/src/main/java/com/byteflow/openglcamera2/render/ByteFlowRender.java @@ -10,7 +10,7 @@ public abstract class ByteFlowRender { public static final int IMAGE_FORMAT_I420 = 0x04; public static final int PARAM_TYPE_SET_SHADER_INDEX = 201; - public static final int PARAM_TYPE_SET_EXAMPLE = 202; + public static final int PARAM_TYPE_SET_EXAMPLE = 202; public static final int EXAMPLE_TYPE = 300; public static final int EXAMPLE_TYPE_KEY_CONVEYOR_BELT = EXAMPLE_TYPE; diff --git a/app/src/main/java/com/byteflow/openglcamera2/render/GLByteFlowRender.java b/app/src/main/java/com/byteflow/openglcamera2/render/GLByteFlowRender.java index b1a1839..e0159a3 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/render/GLByteFlowRender.java +++ b/app/src/main/java/com/byteflow/openglcamera2/render/GLByteFlowRender.java @@ -15,6 +15,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; +import java.util.Arrays; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; @@ -78,10 +79,9 @@ public void loadShaderFromAssetsFile(int shaderIndex, Resources r) { while ((ch = in.read()) != -1) { baos.write(ch); } - byte[] buff = baos.toByteArray(); baos.close(); in.close(); - result = new String(buff, "UTF-8"); + result = baos.toString("UTF-8"); result = result.replaceAll("\\r\\n", "\n"); } catch (Exception e) { e.printStackTrace(); @@ -134,8 +134,7 @@ private Bitmap createBitmapFromGLSurface(int x, int y, int w, int h) { Matrix matrix = new Matrix(); matrix.setRotate(180); matrix.postScale(-1, 1); - Bitmap newBM = Bitmap.createBitmap(bitmap, 0, 0, w, h, matrix, false); - return newBM; + return Bitmap.createBitmap(bitmap, 0, 0, w, h, matrix, false); } public void readPixels(Size size, String imagePath) @@ -159,8 +158,6 @@ private void saveToLocal(Bitmap bitmap, String imgPath) { if(mCallback != null) mCallback.onReadPixelsSaveToLocal(file.getAbsolutePath()); } bitmap.recycle(); - } catch (FileNotFoundException e) { - e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } From d6b0aa1233333c9520ee13596ffc2065978c0bef Mon Sep 17 00:00:00 2001 From: ouyangpeng Date: Fri, 21 Jan 2022 11:51:18 +0800 Subject: [PATCH 2/3] =?UTF-8?q?1.=20=E5=8D=87=E7=BA=A7=E4=B8=8BGradle=202.?= =?UTF-8?q?=20=E5=8D=87=E7=BA=A7=E4=B8=8BCMake=203.=20=E8=A7=A3=E5=86=B3?= =?UTF-8?q?=E4=B8=8B=E5=8D=87=E7=BA=A7Gradle4.0=E4=BB=A5=E5=90=8E=E7=89=88?= =?UTF-8?q?=E6=9C=AC=E5=AF=BC=E8=87=B4=E7=9A=84so=E7=BC=96=E8=AF=91?= =?UTF-8?q?=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/CMakeLists.txt | 66 ---------------- app/build.gradle | 3 +- app/src/main/cpp/CMakeLists.txt | 71 ++++++++++++++++++ .../arm64-v8a/libopencv_java3.so | Bin .../armeabi-v7a/libopencv_java3.so | Bin build.gradle | 2 +- gradle/wrapper/gradle-wrapper.properties | 2 +- 7 files changed, 75 insertions(+), 69 deletions(-) delete mode 100644 app/CMakeLists.txt create mode 100644 app/src/main/cpp/CMakeLists.txt rename app/src/main/{jniLibs => thirdPartyLibs}/arm64-v8a/libopencv_java3.so (100%) rename app/src/main/{jniLibs => thirdPartyLibs}/armeabi-v7a/libopencv_java3.so (100%) diff --git a/app/CMakeLists.txt b/app/CMakeLists.txt deleted file mode 100644 index 142b16e..0000000 --- a/app/CMakeLists.txt +++ /dev/null @@ -1,66 +0,0 @@ -# For more information about using CMake with Android Studio, read the -# documentation: https://d.android.com/studio/projects/add-native-code.html - -# Sets the minimum version of CMake required to build the native library. -cmake_minimum_required(VERSION 3.4.1) - -# Creates and names a library, sets it as either STATIC -# or SHARED, and provides the relative paths to its source code. -# You can define multiple libraries, and CMake builds them for you. -# Gradle automatically packages shared libraries with your APK. - -include_directories( - src/main/cpp/util - src/main/cpp/render - src/main/cpp/glm - src/main/cpp/context - src/main/cpp/example - src/main/cpp/thirdparty -) - -add_library( # Sets the name of the library. - byteflow_render - - # Sets the library as a shared library. - SHARED - - # Provides a relative path to your source file(s). - src/main/cpp/JniImpl.cpp - src/main/cpp/render/GLByteFlowRender.cpp - src/main/cpp/render/GLUtils.cpp - src/main/cpp/context/RenderContext.cpp - src/main/cpp/example/ConveyorBeltExample_1.cpp - src/main/cpp/example/BluelineChallengeExample.cpp - ) - -# Searches for a specified prebuilt library and stores the path as a -# variable. Because CMake includes system libraries in the search path by -# default, you only need to specify the name of the public NDK library -# you want to add. CMake verifies that the library exists before -# completing its build. - -find_library( # Sets the name of the path variable. - log-lib - - # Specifies the name of the NDK library that - # you want CMake to locate. - log ) - - -add_library(opencv_java3 SHARED IMPORTED ) -set_target_properties(opencv_java3 PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libopencv_java3.so) - -# Specifies libraries CMake should link to your target library. You -# can link multiple libraries, such as libraries you define in this -# build script, prebuilt third-party libraries, or system libraries. - -target_link_libraries( # Specifies the target library. - byteflow_render - - # Links the target library to the log library - # included in the NDK. - ${log-lib} - GLESv3 - android - opencv_java3 - ) \ No newline at end of file diff --git a/app/build.gradle b/app/build.gradle index 3ab965a..66c1c9e 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -32,7 +32,8 @@ android { } externalNativeBuild { cmake { - path file('CMakeLists.txt') + path "src/main/cpp/CMakeLists.txt" + version "3.10.2" } } } diff --git a/app/src/main/cpp/CMakeLists.txt b/app/src/main/cpp/CMakeLists.txt new file mode 100644 index 0000000..ce1805a --- /dev/null +++ b/app/src/main/cpp/CMakeLists.txt @@ -0,0 +1,71 @@ +# For more information about using CMake with Android Studio, read the +# documentation: https://d.android.com/studio/projects/add-native-code.html + +# Sets the minimum version of CMake required to build the native library. +cmake_minimum_required(VERSION 3.10.2) + +# Creates and names a library, sets it as either STATIC +# or SHARED, and provides the relative paths to its source code. +# You can define multiple libraries, and CMake builds them for you. +# Gradle automatically packages shared libraries with your APK. + +include_directories( + util + render + glm + context + example + thirdparty +) + +add_library( # Sets the name of the library. + byteflow_render + + # Sets the library as a shared library. + SHARED + + # Provides a relative path to your source file(s). + JniImpl.cpp + render/GLByteFlowRender.cpp + render/GLUtils.cpp + context/RenderContext.cpp + example/ConveyorBeltExample_1.cpp + example/BluelineChallengeExample.cpp + ) + +# Searches for a specified prebuilt library and stores the path as a +# variable. Because CMake includes system libraries in the search path by +# default, you only need to specify the name of the public NDK library +# you want to add. CMake verifies that the library exists before +# completing its build. + +find_library( # Sets the name of the path variable. + log-lib + + # Specifies the name of the NDK library that + # you want CMake to locate. + log) + +# 设置 thirdPartyLibs 目录的值,后面会引用这个变量${thirdPartyLibs} +set(thirdPartyLibs "${CMAKE_SOURCE_DIR}/../thirdPartyLibs") + +add_library(opencv_java3 SHARED IMPORTED) +set_target_properties(opencv_java3 + PROPERTIES + IMPORTED_LOCATION + ${thirdPartyLibs}/${CMAKE_ANDROID_ARCH_ABI}/libopencv_java3.so) + +# Specifies libraries CMake should link to your target library. You +# can link multiple libraries, such as libraries you define in this +# build script, prebuilt third-party libraries, or system libraries. + +target_link_libraries( # Specifies the target library. + byteflow_render + + # Links the target library to the log library + # included in the NDK. + ${log-lib} + GLESv3 + android + opencv_java3 + ) \ No newline at end of file diff --git a/app/src/main/jniLibs/arm64-v8a/libopencv_java3.so b/app/src/main/thirdPartyLibs/arm64-v8a/libopencv_java3.so similarity index 100% rename from app/src/main/jniLibs/arm64-v8a/libopencv_java3.so rename to app/src/main/thirdPartyLibs/arm64-v8a/libopencv_java3.so diff --git a/app/src/main/jniLibs/armeabi-v7a/libopencv_java3.so b/app/src/main/thirdPartyLibs/armeabi-v7a/libopencv_java3.so similarity index 100% rename from app/src/main/jniLibs/armeabi-v7a/libopencv_java3.so rename to app/src/main/thirdPartyLibs/armeabi-v7a/libopencv_java3.so diff --git a/build.gradle b/build.gradle index a5bb815..801467d 100644 --- a/build.gradle +++ b/build.gradle @@ -7,7 +7,7 @@ buildscript { } dependencies { - classpath 'com.android.tools.build:gradle:3.5.3' + classpath 'com.android.tools.build:gradle:4.1.3' // NOTE: Do not place your application dependencies here; they belong // in the individual module build.gradle files diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index e99f528..c1c1101 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-all.zip \ No newline at end of file From aa74cf99063aae3c4000d86568b0091d2c9ee622 Mon Sep 17 00:00:00 2001 From: ouyangpeng Date: Fri, 21 Jan 2022 18:01:35 +0800 Subject: [PATCH 3/3] =?UTF-8?q?1.=20=E4=BC=98=E5=8C=96=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/src/main/assets/shaders/fshader_0.glsl | 35 +- app/src/main/assets/shaders/fshader_1.glsl | 31 +- app/src/main/assets/shaders/fshader_10.glsl | 69 +-- app/src/main/assets/shaders/fshader_11.glsl | 36 +- app/src/main/assets/shaders/fshader_12.glsl | 31 +- app/src/main/assets/shaders/fshader_13.glsl | 81 ++-- app/src/main/assets/shaders/fshader_14.glsl | 34 +- app/src/main/assets/shaders/fshader_15.glsl | 30 +- app/src/main/assets/shaders/fshader_16.glsl | 42 +- app/src/main/assets/shaders/fshader_17.glsl | 44 +- app/src/main/assets/shaders/fshader_18.glsl | 29 +- app/src/main/assets/shaders/fshader_19.glsl | 39 +- app/src/main/assets/shaders/fshader_2.glsl | 43 +- app/src/main/assets/shaders/fshader_23.glsl | 44 +- app/src/main/assets/shaders/fshader_24.glsl | 69 +-- app/src/main/assets/shaders/fshader_25.glsl | 29 +- app/src/main/assets/shaders/fshader_26.glsl | 31 +- app/src/main/assets/shaders/fshader_27.glsl | 47 +- app/src/main/assets/shaders/fshader_28.glsl | 40 +- app/src/main/assets/shaders/fshader_29.glsl | 30 +- app/src/main/assets/shaders/fshader_3.glsl | 35 +- app/src/main/assets/shaders/fshader_30.glsl | 66 +-- app/src/main/assets/shaders/fshader_31.glsl | 33 +- app/src/main/assets/shaders/fshader_4.glsl | 31 +- app/src/main/assets/shaders/fshader_5.glsl | 34 +- app/src/main/assets/shaders/fshader_6.glsl | 34 +- app/src/main/assets/shaders/fshader_7.glsl | 40 +- app/src/main/assets/shaders/fshader_8.glsl | 41 +- app/src/main/assets/shaders/fshader_9.glsl | 46 +- app/src/main/cpp/context/RenderContext.cpp | 433 +++++++++--------- .../cpp/example/BluelineChallengeExample.cpp | 6 +- .../cpp/example/BluelineChallengeExample.h | 2 +- app/src/main/cpp/render/ByteFlowDef.h | 269 +++++------ app/src/main/cpp/render/GLByteFlowRender.cpp | 30 +- app/src/main/cpp/render/GLUtils.cpp | 8 +- .../openglcamera2/BaseRenderActivity.java | 2 +- .../byteflow/openglcamera2/MainActivity.java | 79 ++-- .../render/GLByteFlowRender.java | 1 + 38 files changed, 1129 insertions(+), 895 deletions(-) diff --git a/app/src/main/assets/shaders/fshader_0.glsl b/app/src/main/assets/shaders/fshader_0.glsl index 1923312..2da5126 100644 --- a/app/src/main/assets/shaders/fshader_0.glsl +++ b/app/src/main/assets/shaders/fshader_0.glsl @@ -4,15 +4,30 @@ varying vec2 v_texcoord; uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; + +vec4 YuvToRgb(vec2 uv) { + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0 //第三列 + ) * yuv; + return vec4(rgb, 1.0); +} + + void main() { - float y, u, v, r, g, b; - y = texture2D(s_textureY, v_texcoord).r; - u = texture2D(s_textureU, v_texcoord).r; - v = texture2D(s_textureV, v_texcoord).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - gl_FragColor = vec4(r, g, b, 1.0); + gl_FragColor = YuvToRgb(v_texcoord); } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_1.glsl b/app/src/main/assets/shaders/fshader_1.glsl index 7f9270d..05d2db2 100644 --- a/app/src/main/assets/shaders/fshader_1.glsl +++ b/app/src/main/assets/shaders/fshader_1.glsl @@ -4,18 +4,29 @@ varying vec2 v_texcoord; uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + void main() { vec4 sample0, sample1, sample2, sample3; float blurStep = 0.5; diff --git a/app/src/main/assets/shaders/fshader_10.glsl b/app/src/main/assets/shaders/fshader_10.glsl index 2874605..875ef30 100644 --- a/app/src/main/assets/shaders/fshader_10.glsl +++ b/app/src/main/assets/shaders/fshader_10.glsl @@ -14,18 +14,29 @@ float satLevels[kSatLevCount]; float valLevels[kValLevCount]; float edge_thres = 0.2; float edge_thres2 = 5.0; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + vec3 RGBtoHSV(float r, float g, float b) { float minv, maxv, delta; vec3 res; @@ -41,32 +52,33 @@ vec3 RGBtoHSV(float r, float g, float b) { return res; } if (r == maxv) - res.x = ( g - b ) / delta; + res.x = (g - b) / delta; else if (g == maxv) - res.x = 2.0 + ( b - r ) / delta; + res.x = 2.0 + (b - r) / delta; else - res.x = 4.0 + ( r - g ) / delta; + res.x = 4.0 + (r - g) / delta; res.x = res.x * 60.0; - if(res.x < 0.0) + if (res.x < 0.0) res.x = res.x + 360.0; return res; } -vec3 HSVtoRGB(float h, float s, float v ) { + +vec3 HSVtoRGB(float h, float s, float v) { int i; float f, p, q, t; vec3 res; - if(s == 0.0) { + if (s == 0.0) { res.x = v; res.y = v; res.z = v; return res; } h /= 60.0; - i = int(floor( h )); + i = int(floor(h)); f = h - float(i); - p = v * ( 1.0 - s ); - q = v * ( 1.0 - s * f ); - t = v * ( 1.0 - s * ( 1.0 - f ) ); + p = v * (1.0 - s); + q = v * (1.0 - s * f); + t = v * (1.0 - s * (1.0 - f)); if (i == 0) { res.x = v; res.y = t; @@ -94,12 +106,13 @@ vec3 HSVtoRGB(float h, float s, float v ) { } return res; } + float nearestLevel(float col, int mode) { int levCount; if (mode==0) levCount = kHueLevCount; if (mode==1) levCount = kSatLevCount; if (mode==2) levCount = kValLevCount; - for (int i=0; i= hueLevels[i] && col <= hueLevels[i+1]) { return hueLevels[i+1]; @@ -118,12 +131,15 @@ float nearestLevel(float col, int mode) { } return 0.0; } + float avgIntensity(vec4 pix) { return (pix.r + pix.g + pix.b)/3.; } + vec4 getPixel(vec2 coords, float dx, float dy) { return YuvToRgb(coords + vec2(dx, dy)); } + float IsEdge(in vec2 coords) { float dxtex = 1.0 / float(texSize.x); float dytex = 1.0 / float(texSize.y); @@ -131,14 +147,15 @@ float IsEdge(in vec2 coords) { int k = -1; float delta; for (int i=-1; i<2; i++) { - for(int j=-1; j<2; j++) { + for (int j=-1; j<2; j++) { k++; - pix[k] = avgIntensity(getPixel(coords,float(i)*dxtex, float(j)*dytex)); + pix[k] = avgIntensity(getPixel(coords, float(i)*dxtex, float(j)*dytex)); } } delta = (abs(pix[1]-pix[7]) + abs(pix[5]-pix[3]) + abs(pix[0]-pix[8])+ abs(pix[2]-pix[6]))/4.; - return clamp(edge_thres2*delta,0.0,1.0); + return clamp(edge_thres2*delta, 0.0, 1.0); } + void main() { hueLevels[0] = 0.0; hueLevels[1] = 140.0; @@ -159,11 +176,11 @@ void main() { valLevels[3] = 1.0; vec2 uv = v_texcoord; vec3 color = YuvToRgb(uv).rgb; - vec3 vHSV = RGBtoHSV(color.r, color.g, color.b); + vec3 vHSV = RGBtoHSV(color.r, color.g, color.b); vHSV.x = nearestLevel(vHSV.x, 0); vHSV.y = nearestLevel(vHSV.y, 1); vHSV.z = nearestLevel(vHSV.z, 2); float edg = IsEdge(uv); - vec3 vRGB = (edg >= edge_thres) ? vec3(0.0,0.0,0.0) : HSVtoRGB(vHSV.x,vHSV.y,vHSV.z); + vec3 vRGB = (edg >= edge_thres) ? vec3(0.0, 0.0, 0.0) : HSVtoRGB(vHSV.x, vHSV.y, vHSV.z); gl_FragColor = vec4(vRGB.x, vRGB.y, vRGB.z, 1.0); } diff --git a/app/src/main/assets/shaders/fshader_11.glsl b/app/src/main/assets/shaders/fshader_11.glsl index 09cecc5..eadaa08 100644 --- a/app/src/main/assets/shaders/fshader_11.glsl +++ b/app/src/main/assets/shaders/fshader_11.glsl @@ -5,17 +5,31 @@ varying vec2 v_texcoord; uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; + +vec4 YuvToRgb(vec2 uv) { + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); +} + void main() { - float y, u, v, r, g, b; - y = texture2D(s_textureY, v_texcoord).r; - u = texture2D(s_textureU, v_texcoord).r; - v = texture2D(s_textureV, v_texcoord).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - vec3 color = vec3(r, g, b); + vec3 color = YuvToRgb(v_texcoord).rgb; vec2 uv = v_texcoord.xy; vec3 colors[3]; colors[0] = vec3(0.,0.,1.); @@ -23,6 +37,6 @@ void main() { colors[2] = vec3(1.,0.,0.); float lum = (color.r + color.g + color.b)/3.; int idx = (lum < 0.5) ? 0 : 1; - vec3 rgb = mix(colors[idx],colors[idx+1],(lum-float(idx)*0.5)/0.5); + vec3 rgb = mix(colors[idx],colors[idx + 1],(lum - float(idx) * 0.5) / 0.5); gl_FragColor = vec4(rgb, 1.0); } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_12.glsl b/app/src/main/assets/shaders/fshader_12.glsl index 8efe808..6663348 100644 --- a/app/src/main/assets/shaders/fshader_12.glsl +++ b/app/src/main/assets/shaders/fshader_12.glsl @@ -6,18 +6,29 @@ uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform vec2 texSize; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + void main() { vec4 color; color.rgb = vec3(0.5); diff --git a/app/src/main/assets/shaders/fshader_13.glsl b/app/src/main/assets/shaders/fshader_13.glsl index a5f9f7b..71c0237 100644 --- a/app/src/main/assets/shaders/fshader_13.glsl +++ b/app/src/main/assets/shaders/fshader_13.glsl @@ -5,45 +5,56 @@ uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform vec2 texSize; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + void main() { - float newY, newX; - if(v_texcoord.y <= 1.0/3.0) - { - newY = v_texcoord.y + 1.0/3.0; - } - else if(1.0/3.0 <= v_texcoord.y && v_texcoord.y <= 2.0/3.0) - { - newY = v_texcoord.y; - } - else - { - newY = v_texcoord.y - 1.0/3.0; - } + // 九分屏 九分屏是4分屏的演变 + // 纹理坐标x、y变化规则: + // 当 x 在[0, 1/3]范围时,x = x*3 + // 当 x 在[1/3, 2/3]范围时,x = (x-1/3)*3 + // 当 x 在[2/3, 1]范围时,x = (x-2/3)*3 - if(v_texcoord.x <= 1.0/3.0) - { - newX = v_texcoord.x + 1.0/3.0; - } - else if(1.0/3.0 <= v_texcoord.x && v_texcoord.x <= 2.0/3.0) - { - newX = v_texcoord.x; + // 当 y 在[0, 1/3]范围时,y= y*3 + // 当 y 在[1/3, 2/3]范围时,y = (y-1/3)*3 + // 当 y在[2/3, 1]范围时,y = (y-2/3)*3 + + // 链接:https://juejin.cn/post/6859934701932118024 + // https://blog.csdn.net/lin1109221208/article/details/107900718 + lowp vec2 uv = v_texcoord.xy; + if (uv.x < 1.0 / 3.0) { + uv.x = uv.x * 3.0; + } else if (uv.x < 2.0 / 3.0) { + uv.x = (uv.x - 1.0 / 3.0) * 3.0; + } else { + uv.x = (uv.x - 2.0 / 3.0) * 3.0; } - else - { - newX = v_texcoord.x - 1.0/3.0; + if (uv.y <= 1.0 / 3.0) { + uv.y = uv.y * 3.0; + } else if (uv.y < 2.0 / 3.0) { + uv.y = (uv.y - 1.0 / 3.0) * 3.0; + } else { + uv.y = (uv.y - 2.0 / 3.0) * 3.0; } - - gl_FragColor = YuvToRgb(vec2(newX, newY)); + gl_FragColor = YuvToRgb(uv); } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_14.glsl b/app/src/main/assets/shaders/fshader_14.glsl index 3d09761..bad976d 100644 --- a/app/src/main/assets/shaders/fshader_14.glsl +++ b/app/src/main/assets/shaders/fshader_14.glsl @@ -6,20 +6,30 @@ uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform float u_offset; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } -void main() -{ + +void main(){ vec4 originColor = YuvToRgb(v_texcoord); vec4 offsetColor0 = YuvToRgb(vec2(v_texcoord.x + u_offset, v_texcoord.y + u_offset)); vec4 offsetColor1 = YuvToRgb(vec2(v_texcoord.x - u_offset, v_texcoord.y - u_offset)); diff --git a/app/src/main/assets/shaders/fshader_15.glsl b/app/src/main/assets/shaders/fshader_15.glsl index 7c3c7bc..a54eba5 100644 --- a/app/src/main/assets/shaders/fshader_15.glsl +++ b/app/src/main/assets/shaders/fshader_15.glsl @@ -5,17 +5,27 @@ uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform float u_offset; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } void main() { diff --git a/app/src/main/assets/shaders/fshader_16.glsl b/app/src/main/assets/shaders/fshader_16.glsl index d94349c..0e1e4db 100644 --- a/app/src/main/assets/shaders/fshader_16.glsl +++ b/app/src/main/assets/shaders/fshader_16.glsl @@ -7,28 +7,34 @@ uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform float u_offset; uniform vec2 texSize; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } -void main() -{ +void main() { vec2 imgTex = v_texcoord * texSize; - float r = (u_offset + 0.208 ) * texSize.x; - if(distance(imgTex, vec2(texSize.x / 2.0, texSize.y / 2.0)) < r) - { + float r = (u_offset + 0.208) * texSize.x; + if (distance(imgTex, vec2(texSize.x / 2.0, texSize.y / 2.0)) < r){ gl_FragColor = YuvToRgb(v_texcoord); - } - else - { + } else { gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0); } } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_17.glsl b/app/src/main/assets/shaders/fshader_17.glsl index 3d2ba38..7c7e558 100644 --- a/app/src/main/assets/shaders/fshader_17.glsl +++ b/app/src/main/assets/shaders/fshader_17.glsl @@ -7,25 +7,36 @@ uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform float u_offset; uniform vec2 texSize; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + const float PI = 3.141592653; + void main() { vec2 imgTex = v_texcoord * texSize; float r = 0.3 * texSize.x; - if(distance(imgTex, vec2(texSize.x / 2.0, texSize.y / 2.0)) < r) - { + if (distance(imgTex, vec2(texSize.x / 2.0, texSize.y / 2.0)) < r){ vec2 tranTex = v_texcoord - 0.5; vec2 imgTranTex = tranTex * texSize; float len = length(imgTranTex); @@ -33,10 +44,7 @@ void main() angle = acos(imgTranTex.x / len); - if(tranTex.y < 0.0) - { - angle *= -1.0; - } + if (tranTex.y < 0.0) { angle *= -1.0; } angle -= u_offset; @@ -46,9 +54,7 @@ void main() vec2 newTexCoors = imgTranTex / texSize + 0.5; gl_FragColor = YuvToRgb(newTexCoors); - } - else - { + } else { gl_FragColor = YuvToRgb(v_texcoord); } } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_18.glsl b/app/src/main/assets/shaders/fshader_18.glsl index fd068f8..41c079e 100644 --- a/app/src/main/assets/shaders/fshader_18.glsl +++ b/app/src/main/assets/shaders/fshader_18.glsl @@ -10,16 +10,25 @@ uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } float nrand(in float x,in float y){ diff --git a/app/src/main/assets/shaders/fshader_19.glsl b/app/src/main/assets/shaders/fshader_19.glsl index cd23249..9975ad4 100644 --- a/app/src/main/assets/shaders/fshader_19.glsl +++ b/app/src/main/assets/shaders/fshader_19.glsl @@ -10,16 +10,25 @@ uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } vec4 LutFilter(vec2 texCoord) @@ -57,14 +66,10 @@ vec4 LutFilter(vec2 texCoord) return mix(textureColor, vec4(newColor.rgb, textureColor.w), 1.0); } -void main() -{ - if(v_texcoord.y > 0.5) - { +void main() { + if (v_texcoord.y > 0.5){ gl_FragColor = LutFilter(v_texcoord); - } - else - { + } else { gl_FragColor = YuvToRgb(v_texcoord); } } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_2.glsl b/app/src/main/assets/shaders/fshader_2.glsl index 583df09..2d9d4ab 100644 --- a/app/src/main/assets/shaders/fshader_2.glsl +++ b/app/src/main/assets/shaders/fshader_2.glsl @@ -6,30 +6,41 @@ uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform vec2 texSize; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + void main() { vec2 pos = v_texcoord.xy; vec2 onePixel = vec2(1, 1) / texSize; vec4 color = vec4(0); mat3 edgeDetectionKernel = mat3( - -1, -1, -1, - -1, 8, -1, - -1, -1, -1 + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 ); - for(int i = 0; i < 3; i++) { - for(int j = 0; j < 3; j++) { - vec2 samplePos = pos + vec2(i - 1 , j - 1) * onePixel; + for (int i = 0; i < 3; i++) { + for (int j = 0; j < 3; j++) { + vec2 samplePos = pos + vec2(i - 1, j - 1) * onePixel; vec4 sampleColor = YuvToRgb(samplePos); sampleColor *= edgeDetectionKernel[i][j]; color += sampleColor; diff --git a/app/src/main/assets/shaders/fshader_23.glsl b/app/src/main/assets/shaders/fshader_23.glsl index 7f6cb45..f722e1b 100644 --- a/app/src/main/assets/shaders/fshader_23.glsl +++ b/app/src/main/assets/shaders/fshader_23.glsl @@ -7,18 +7,29 @@ uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform float u_offset; uniform vec2 texSize; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + void main() { vec2 imgTexCoord = v_texcoord * texSize; @@ -29,15 +40,12 @@ void main() float offset = u_offset * maxOffset; - if(offset <= x - && x <= sideLength - offset - && offset <= y - && y <= sideLength - offset) - { + if (offset <= x + && x <= sideLength - offset + && offset <= y + && y <= sideLength - offset){ gl_FragColor = YuvToRgb(v_texcoord); - } - else - { + } else { gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0); } } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_24.glsl b/app/src/main/assets/shaders/fshader_24.glsl index f3fe52e..901660c 100644 --- a/app/src/main/assets/shaders/fshader_24.glsl +++ b/app/src/main/assets/shaders/fshader_24.glsl @@ -5,38 +5,53 @@ varying vec2 v_texcoord; uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + +// 四分屏 +// 屏幕四等分,分别显示缩小的纹理图片: +// 当 x 在[0, 0.5]范围时,x = x*2 +// 当 x在[0.5, 1]范围时,x = (x-0.5)*2 + +// 当 y 在[0, 0.5]范围时,y = y*2 +// 当 y 在[0.5, 1]范围时,y = (y-0.5)*2 + +// 链接:https://juejin.cn/post/6859934701932118024 +// https://blog.csdn.net/lin1109221208/article/details/107900718 + void main() { - vec2 newTexCoord = v_texcoord; - if(newTexCoord.x < 0.5) - { - newTexCoord.x = newTexCoord.x * 2.0; - } - else - { - newTexCoord.x = (newTexCoord.x - 0.5) * 2.0; + lowp vec2 uv = v_texcoord.xy; + if (uv.x <= 0.5){ + uv.x = uv.x * 2.0; + } else { + uv.x = (uv.x - 0.5) * 2.0; } - if(newTexCoord.y < 0.5) - { - newTexCoord.y = newTexCoord.y * 2.0; + if (uv.y<= 0.5) { + uv.y = uv.y * 2.0; + } else { + uv.y = (uv.y - 0.5) * 2.0; } - else - { - newTexCoord.y = (newTexCoord.y - 0.5) * 2.0; - } - - gl_FragColor = YuvToRgb(newTexCoord); + gl_FragColor = YuvToRgb(uv); } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_25.glsl b/app/src/main/assets/shaders/fshader_25.glsl index 4509cc9..3df7321 100644 --- a/app/src/main/assets/shaders/fshader_25.glsl +++ b/app/src/main/assets/shaders/fshader_25.glsl @@ -9,16 +9,25 @@ uniform vec2 texSize; const float OPACITY = 1.0; vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } vec4 SkinSoften(vec2 uv) diff --git a/app/src/main/assets/shaders/fshader_26.glsl b/app/src/main/assets/shaders/fshader_26.glsl index c8cd429..291718b 100644 --- a/app/src/main/assets/shaders/fshader_26.glsl +++ b/app/src/main/assets/shaders/fshader_26.glsl @@ -9,19 +9,30 @@ uniform float u_offset; uniform vec2 texSize; vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + const float MAX_ALPHA = 0.5; const float MAX_SCALE = 0.8; + void main() { float alpha = MAX_ALPHA * (1.0 - u_offset); diff --git a/app/src/main/assets/shaders/fshader_27.glsl b/app/src/main/assets/shaders/fshader_27.glsl index 62308dc..549d2ab 100644 --- a/app/src/main/assets/shaders/fshader_27.glsl +++ b/app/src/main/assets/shaders/fshader_27.glsl @@ -14,26 +14,37 @@ float rand(float n) { } vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } const int MAX_COUNT = 5; + void main() { float MAX_LEN = (texSize.x < texSize.y ? texSize.y : texSize.x) / 10.0; vec2 points[MAX_COUNT]; points[0].x = rand(u_time); points[0].y = rand(points[0].x); - for(int i=1; i radius) { gl_FragColor = vec4(0.25); } else { - gl_FragColor = vec4(r, g, b, 1.0); + gl_FragColor = YuvToRgb(quad); } } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_7.glsl b/app/src/main/assets/shaders/fshader_7.glsl index 1cebb34..0b40f64 100644 --- a/app/src/main/assets/shaders/fshader_7.glsl +++ b/app/src/main/assets/shaders/fshader_7.glsl @@ -6,25 +6,39 @@ uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform vec2 texSize; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + void main() { vec2 tileNum = vec2(40.0, 20.0); vec2 uv = v_texcoord; vec2 uv2 = floor(uv * tileNum) / tileNum; uv -= uv2; uv *= tileNum; - vec3 color = YuvToRgb(uv2 + vec2(step(1.0 - uv.y, uv.x) / (2.0 * tileNum.x), - step(uv.x, uv.y) / (2.0 * tileNum.y))).rgb; - gl_FragColor = vec4(color, 1.0); + + vec2 coord = vec2( + uv2 + vec2(step(1.0 - uv.y, uv.x) / (2.0 * tileNum.x), + step(uv.x, uv.y) / (2.0 * tileNum.y)) + ); + gl_FragColor = YuvToRgb(coord); } \ No newline at end of file diff --git a/app/src/main/assets/shaders/fshader_8.glsl b/app/src/main/assets/shaders/fshader_8.glsl index 6e42b0c..9c98b63 100644 --- a/app/src/main/assets/shaders/fshader_8.glsl +++ b/app/src/main/assets/shaders/fshader_8.glsl @@ -6,22 +6,39 @@ uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform vec2 texSize; + +vec4 YuvToRgb(vec2 uv) { + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); +} + + void main() { vec2 pixelSize = vec2(texSize.x/100.0, texSize.y/100.0); vec2 uv = v_texcoord.xy; float dx = pixelSize.x*(1./texSize.x); float dy = pixelSize.y*(1./texSize.y); - vec2 coord = vec2(dx*floor(uv.x/dx), - dy*floor(uv.y/dy)); - float y, u, v, r, g, b; - y = texture2D(s_textureY, coord).r; - u = texture2D(s_textureU, coord).r; - v = texture2D(s_textureV, coord).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - gl_FragColor = vec4(r, g, b, 1.0); + vec2 coord = vec2( + dx*floor(uv.x/dx), + dy*floor(uv.y/dy) + ); + gl_FragColor = YuvToRgb(coord); } diff --git a/app/src/main/assets/shaders/fshader_9.glsl b/app/src/main/assets/shaders/fshader_9.glsl index 1ea59c6..ab45740 100644 --- a/app/src/main/assets/shaders/fshader_9.glsl +++ b/app/src/main/assets/shaders/fshader_9.glsl @@ -6,18 +6,29 @@ uniform lowp sampler2D s_textureY; uniform lowp sampler2D s_textureU; uniform lowp sampler2D s_textureV; uniform vec2 texSize; + vec4 YuvToRgb(vec2 uv) { - float y, u, v, r, g, b; - y = texture2D(s_textureY, uv).r; - u = texture2D(s_textureU, uv).r; - v = texture2D(s_textureV, uv).r; - u = u - 0.5; - v = v - 0.5; - r = y + 1.403 * v; - g = y - 0.344 * u - 0.714 * v; - b = y + 1.770 * u; - return vec4(r, g, b, 1.0); + // YUV转RGB + // R = Y + 1.402 * (V - 128) + // G = Y - 0.34414 * (U - 128) - 0.71414 * (V - 128) + // B = Y + 1.772 * (U - 128) + + vec3 yuv; + // 只赋值x 则会出现黑白照片效果 + yuv.x = texture2D(s_textureY, uv).r; + // 因为YUV转RGB用的是Y、U-128和V-128,texture函数返回向量值的范围是0-1.0,128表示0.5, 所以要减去0.5 + yuv.y = texture2D(s_textureU, uv).r - 0.5; + yuv.z = texture2D(s_textureV, uv).r - 0.5; + + // YUV 和 RGB 的转换矩阵 + highp vec3 rgb = mat3( + 1.0, 1.0, 1.0, //第一列 + 0.0, -0.34414, 1.772, //第二列 + 1.403, -0.71414, 0.0//第三列 + ) * yuv; + return vec4(rgb, 1.0); } + vec4 CrossStitching(vec2 uv) { float stitchSize = texSize.x / 35.0; int invert = 0; @@ -28,20 +39,15 @@ vec4 CrossStitching(vec2 uv) { tlPos *= size; int remX = int(mod(cPos.x, size)); int remY = int(mod(cPos.y, size)); - if (remX == 0 && remY == 0) - tlPos = cPos; + if (remX == 0 && remY == 0) tlPos = cPos; vec2 blPos = tlPos; blPos.y += (size - 1.0); if ((remX == remY) || (((int(cPos.x) - int(blPos.x)) == (int(blPos.y) - int(cPos.y))))) { - if (invert == 1) - color = vec4(0.2, 0.15, 0.05, 1.0); - else - color = YuvToRgb(tlPos * vec2(1.0 / texSize.x, 1.0 / texSize.y)) * 1.4; + if (invert == 1) color = vec4(0.2, 0.15, 0.05, 1.0); + else color = YuvToRgb(tlPos * vec2(1.0 / texSize.x, 1.0 / texSize.y)) * 1.4; } else { - if (invert == 1) - color = YuvToRgb(tlPos * vec2(1.0 / texSize.x, 1.0 / texSize.y)) * 1.4; - else - color = vec4(0.0, 0.0, 0.0, 1.0); + if (invert == 1) color = YuvToRgb(tlPos * vec2(1.0 / texSize.x, 1.0 / texSize.y)) * 1.4; + else color = vec4(0.0, 0.0, 0.0, 1.0); } return color; } diff --git a/app/src/main/cpp/context/RenderContext.cpp b/app/src/main/cpp/context/RenderContext.cpp index 6544d6b..eb3959a 100644 --- a/app/src/main/cpp/context/RenderContext.cpp +++ b/app/src/main/cpp/context/RenderContext.cpp @@ -13,287 +13,264 @@ #include #include "RenderContext.h" -jfieldID ByteFlowRenderContext::s_ContextHandle = 0L; +jfieldID ByteFlowRenderContext::s_ContextHandle = nullptr; ByteFlowRenderContext::ByteFlowRenderContext(int renderType) : - m_pByteFlowRender(nullptr), m_pCurGlFilter(nullptr), m_pBeforeGlFilter(nullptr), m_bIsExampleMode( - false) -{ - switch (renderType) - { - case GL_RENDER_TYPE: - m_pByteFlowRender = new GLByteFlowRender(); - break; - case CL_RENDER_TYPE: - break; - default: - m_pByteFlowRender = new GLByteFlowRender(); - } + m_pByteFlowRender(nullptr), m_pCurGlFilter(nullptr), m_pBeforeGlFilter(nullptr), + m_bIsExampleMode( + false) { + switch (renderType) { + case GL_RENDER_TYPE: + m_pByteFlowRender = new GLByteFlowRender(); + break; + case CL_RENDER_TYPE: + break; + default: + m_pByteFlowRender = new GLByteFlowRender(); + } } -ByteFlowRenderContext::~ByteFlowRenderContext() -{ - if (m_pByteFlowRender != NULL) - { - delete m_pByteFlowRender; - m_pByteFlowRender = NULL; - } +ByteFlowRenderContext::~ByteFlowRenderContext() { + if (m_pByteFlowRender != nullptr) { + delete m_pByteFlowRender; + m_pByteFlowRender = nullptr; + } } -void ByteFlowRenderContext::CreateRenderContext(JNIEnv *env, jobject instance, jint renderType) -{ - LOGCATE("ByteFlowRenderContext::CreateRenderContext renderType = %d", renderType); - ByteFlowRenderContext *pContext = new ByteFlowRenderContext(renderType); - StoreRenderContext(env, instance, pContext); +void ByteFlowRenderContext::CreateRenderContext(JNIEnv *env, jobject instance, jint renderType) { + LOGCATE("ByteFlowRenderContext::CreateRenderContext renderType = %d", renderType); + ByteFlowRenderContext *pContext = new ByteFlowRenderContext(renderType); + StoreRenderContext(env, instance, pContext); } -void ByteFlowRenderContext::StoreRenderContext(JNIEnv *env, jobject instance, ByteFlowRenderContext *pContext) -{ - LOGCATE("ByteFlowRenderContext::StoreRenderContext"); - jclass cls = env->GetObjectClass(instance); - if (cls == NULL) - { - LOGCATE("ByteFlowRenderContext::StoreRenderContext cls == NULL"); - return; - } - - s_ContextHandle = env->GetFieldID(cls, "mNativeContextHandle", "J"); - if (s_ContextHandle == NULL) - { - LOGCATE("ByteFlowRenderContext::StoreRenderContext s_ContextHandle == NULL"); - return; - } - - env->SetLongField(instance, s_ContextHandle, reinterpret_cast(pContext)); +void ByteFlowRenderContext::StoreRenderContext(JNIEnv *env, jobject instance, + ByteFlowRenderContext *pContext) { + LOGCATE("ByteFlowRenderContext::StoreRenderContext"); + jclass cls = env->GetObjectClass(instance); + if (cls == nullptr) { + LOGCATE("ByteFlowRenderContext::StoreRenderContext cls == nullptr"); + return; + } + + s_ContextHandle = env->GetFieldID(cls, "mNativeContextHandle", "J"); + if (s_ContextHandle == nullptr) { + LOGCATE("ByteFlowRenderContext::StoreRenderContext s_ContextHandle == nullptr"); + return; + } + env->SetLongField(instance, s_ContextHandle, reinterpret_cast(pContext)); } -void ByteFlowRenderContext::DeleteRenderContext(JNIEnv *env, jobject instance) -{ - LOGCATE("ByteFlowRenderContext::DeleteRenderContext"); - if (s_ContextHandle == NULL) - { - LOGCATE("ByteFlowRenderContext::DeleteRenderContext Could not find render context."); - return; - } - - ByteFlowRenderContext *pContext = reinterpret_cast(env->GetLongField( - instance, s_ContextHandle)); - if (pContext) - { - delete pContext; - } - env->SetLongField(instance, s_ContextHandle, 0L); +void ByteFlowRenderContext::DeleteRenderContext(JNIEnv *env, jobject instance) { + LOGCATE("ByteFlowRenderContext::DeleteRenderContext"); + if (s_ContextHandle == nullptr) { + LOGCATE("ByteFlowRenderContext::DeleteRenderContext Could not find render context."); + return; + } + + auto *pContext = reinterpret_cast(env->GetLongField( + instance, s_ContextHandle)); + if (pContext) { + delete pContext; + } + env->SetLongField(instance, s_ContextHandle, 0L); } -ByteFlowRenderContext *ByteFlowRenderContext::GetRenderContext(JNIEnv *env, jobject instance) -{ - LOGCATE("ByteFlowRenderContext::GetRenderContext"); +ByteFlowRenderContext *ByteFlowRenderContext::GetRenderContext(JNIEnv *env, jobject instance) { + LOGCATE("ByteFlowRenderContext::GetRenderContext"); - if (s_ContextHandle == NULL) - { - LOGCATE("ByteFlowRenderContext::GetRenderContext Could not find render context."); - return NULL; - } + if (s_ContextHandle == nullptr) { + LOGCATE("ByteFlowRenderContext::GetRenderContext Could not find render context."); + return nullptr; + } - ByteFlowRenderContext *pContext = reinterpret_cast(env->GetLongField( - instance, s_ContextHandle)); - return pContext; + auto *pContext = reinterpret_cast(env->GetLongField( + instance, s_ContextHandle)); + return pContext; } -int ByteFlowRenderContext::Init(int initType) -{ - return m_pByteFlowRender->Init(initType); +int ByteFlowRenderContext::Init(int initType) { + return m_pByteFlowRender->Init(initType); } -int ByteFlowRenderContext::UnInit() -{ - if(m_pCurGlFilter) { - delete m_pCurGlFilter; - m_pCurGlFilter = nullptr; - } - - if(m_pBeforeGlFilter) { - delete m_pBeforeGlFilter; - m_pBeforeGlFilter = nullptr; - } - return m_pByteFlowRender->UnInit(); +int ByteFlowRenderContext::UnInit() { + if (m_pCurGlFilter) { + delete m_pCurGlFilter; + m_pCurGlFilter = nullptr; + } + + if (m_pBeforeGlFilter) { + delete m_pBeforeGlFilter; + m_pBeforeGlFilter = nullptr; + } + return m_pByteFlowRender->UnInit(); } -void ByteFlowRenderContext::UpdateFrame(int format, uint8_t *pBuffer, int width, int height) -{ - LOGCATE("ByteFlowRenderContext::UpdateFrame format=%d, width=%d, height=%d, pData=%p", - format, width, height, pBuffer); - NativeImage nativeImage; - nativeImage.format = format; - nativeImage.width = width; - nativeImage.height = height; - nativeImage.ppPlane[0] = pBuffer; - - switch (format) - { - case IMAGE_FORMAT_NV12: - case IMAGE_FORMAT_NV21: - nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height; - break; - case IMAGE_FORMAT_I420: - nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height; - nativeImage.ppPlane[2] = nativeImage.ppPlane[1] + width * height / 4; - break; - default: - break; - } - - if(m_bIsExampleMode && m_pCurGlFilter) { - m_pCurGlFilter->LoadImage(&nativeImage); - } else { - m_pByteFlowRender->UpdateFrame(&nativeImage); - } +void ByteFlowRenderContext::UpdateFrame(int format, uint8_t *pBuffer, int width, int height) { + LOGCATE("ByteFlowRenderContext::UpdateFrame format=%d, width=%d, height=%d, pData=%p", + format, width, height, pBuffer); + NativeImage nativeImage; + nativeImage.format = format; + nativeImage.width = width; + nativeImage.height = height; + nativeImage.ppPlane[0] = pBuffer; + + switch (format) { + case IMAGE_FORMAT_NV12: + case IMAGE_FORMAT_NV21: + nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height; + break; + case IMAGE_FORMAT_I420: + nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height; + nativeImage.ppPlane[2] = nativeImage.ppPlane[1] + width * height / 4; + break; + default: + break; + } + + if (m_bIsExampleMode && m_pCurGlFilter) { + m_pCurGlFilter->LoadImage(&nativeImage); + } else { + m_pByteFlowRender->UpdateFrame(&nativeImage); + } } -void ByteFlowRenderContext::SetTransformMatrix(float translateX, float translateY, float scaleX, float scaleY, int degree, int mirror) -{ - m_pByteFlowRender->SetTransformMatrix(translateX, translateY, scaleX, scaleY, degree, mirror); +void ByteFlowRenderContext::SetTransformMatrix(float translateX, float translateY, float scaleX, + float scaleY, int degree, int mirror) { + m_pByteFlowRender->SetTransformMatrix(translateX, translateY, scaleX, scaleY, degree, mirror); - m_TransformMatrix.translateX = translateX; - m_TransformMatrix.translateY = translateY; - m_TransformMatrix.scaleX = scaleX; - m_TransformMatrix.scaleY = scaleY; - m_TransformMatrix.degree = degree; - m_TransformMatrix.mirror = mirror; + m_TransformMatrix.translateX = translateX; + m_TransformMatrix.translateY = translateY; + m_TransformMatrix.scaleX = scaleX; + m_TransformMatrix.scaleY = scaleY; + m_TransformMatrix.degree = degree; + m_TransformMatrix.mirror = mirror; } -void ByteFlowRenderContext::SetParamsInt(int paramType, int param) -{ - LOGCATE("ByteFlowRenderContext::SetParamsInt paramType = %d, param = %d", paramType, param); - switch (paramType) - { +void ByteFlowRenderContext::SetParamsInt(int paramType, int param) { + LOGCATE("ByteFlowRenderContext::SetParamsInt paramType = %d, param = %d", paramType, param); + switch (paramType) { // case PARAM_TYPE_SET_SHADER_INDEX: // m_bIsExampleMode = false; // m_pByteFlowRender->SetShaderIndex(param); // break; - case PARAM_TYPE_SET_EXAMPLE: - CreateExample(param); - break; - default: - break; - } + case PARAM_TYPE_SET_EXAMPLE: + CreateExample(param); + break; + default: + break; + } } -int ByteFlowRenderContext::GetParamsInt(int paramType) -{ - LOGCATE("ByteFlowRenderContext::GetParamsInt paramType = %d", paramType); - switch (paramType) - { - case PARAM_TYPE_SET_SHADER_INDEX: - return m_pByteFlowRender->GetShaderIndex(); - default: - break; - } - return -1; +int ByteFlowRenderContext::GetParamsInt(int paramType) { + LOGCATE("ByteFlowRenderContext::GetParamsInt paramType = %d", paramType); + switch (paramType) { + case PARAM_TYPE_SET_SHADER_INDEX: + return m_pByteFlowRender->GetShaderIndex(); + default: + break; + } + return -1; } -void ByteFlowRenderContext::OnSurfaceCreated() -{ - m_pByteFlowRender->OnSurfaceCreated(); +void ByteFlowRenderContext::OnSurfaceCreated() { + m_pByteFlowRender->OnSurfaceCreated(); } -void ByteFlowRenderContext::OnSurfaceChanged(int width, int height) -{ - m_ViewPort = vec2(width, height); - m_pByteFlowRender->OnSurfaceChanged(width, height); +void ByteFlowRenderContext::OnSurfaceChanged(int width, int height) { + m_ViewPort = vec2(width, height); + m_pByteFlowRender->OnSurfaceChanged(width, height); } -void ByteFlowRenderContext::OnDrawFrame() -{ - if(m_bIsExampleMode) { - if(m_pBeforeGlFilter) { - m_pBeforeGlFilter->Destroy(); - delete m_pBeforeGlFilter; - m_pBeforeGlFilter = nullptr; - } - - if(m_pCurGlFilter) { - m_pCurGlFilter->Init(); - m_pCurGlFilter->SetTransformMatrix(m_TransformMatrix); - m_pCurGlFilter->Draw(m_ViewPort.x, m_ViewPort.y); - } - - } - else { - m_pByteFlowRender->OnDrawFrame(); - } +void ByteFlowRenderContext::OnDrawFrame() { + if (m_bIsExampleMode) { + if (m_pBeforeGlFilter) { + m_pBeforeGlFilter->Destroy(); + delete m_pBeforeGlFilter; + m_pBeforeGlFilter = nullptr; + } + + if (m_pCurGlFilter) { + m_pCurGlFilter->Init(); + m_pCurGlFilter->SetTransformMatrix(m_TransformMatrix); + m_pCurGlFilter->Draw(m_ViewPort.x, m_ViewPort.y); + } + + } else { + m_pByteFlowRender->OnDrawFrame(); + } } -void ByteFlowRenderContext::LoadLutImageData(int index, int format, int width, int height, uint8_t *pData) -{ - LOGCATE("ByteFlowRenderContext::LoadFilterImageData index=%d, format=%d, width=%d, height=%d, pData=%p", - index, format, width, height, pData); - NativeImage nativeImage; - nativeImage.format = format; - nativeImage.width = width; - nativeImage.height = height; - nativeImage.ppPlane[0] = pData; - - switch (format) - { - case IMAGE_FORMAT_NV12: - case IMAGE_FORMAT_NV21: - nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height; - break; - case IMAGE_FORMAT_I420: - nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height; - nativeImage.ppPlane[2] = nativeImage.ppPlane[1] + width * height / 4; - break; - default: - break; - } - - m_pByteFlowRender->LoadFilterImageData(index, &nativeImage); +void ByteFlowRenderContext::LoadLutImageData(int index, int format, int width, int height, + uint8_t *pData) { + LOGCATE("ByteFlowRenderContext::LoadFilterImageData index=%d, format=%d, width=%d, height=%d, pData=%p", + index, format, width, height, pData); + NativeImage nativeImage; + nativeImage.format = format; + nativeImage.width = width; + nativeImage.height = height; + nativeImage.ppPlane[0] = pData; + + switch (format) { + case IMAGE_FORMAT_NV12: + case IMAGE_FORMAT_NV21: + nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height; + break; + case IMAGE_FORMAT_I420: + nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height; + nativeImage.ppPlane[2] = nativeImage.ppPlane[1] + width * height / 4; + break; + default: + break; + } + + m_pByteFlowRender->LoadFilterImageData(index, &nativeImage); } -void ByteFlowRenderContext::LoadFragShaderScript(int shaderIndex, char *pShaderStr, int strLen) -{ - LOGCATE("ByteFlowRenderContext::LoadFragShaderScript shaderIndex = %d, pShaderStr = %s, strLen = %d", shaderIndex, pShaderStr, strLen); +void ByteFlowRenderContext::LoadFragShaderScript(int shaderIndex, char *pShaderStr, int strLen) { + LOGCATE("ByteFlowRenderContext::LoadFragShaderScript shaderIndex = %d, strLen = %d", + shaderIndex, strLen); + + LOGCATE("ByteFlowRenderContext::LoadFragShaderScript pShaderStr = %s",pShaderStr); - if(m_bIsExampleMode) { + if (m_bIsExampleMode) { m_bIsExampleMode = false; - if(m_pBeforeGlFilter) { - m_pBeforeGlFilter->Destroy(); - delete m_pBeforeGlFilter; - m_pBeforeGlFilter = nullptr; - } - - if(m_pCurGlFilter) { - m_pCurGlFilter->Destroy(); - delete m_pCurGlFilter; - m_pCurGlFilter = nullptr; - } + if (m_pBeforeGlFilter) { + m_pBeforeGlFilter->Destroy(); + delete m_pBeforeGlFilter; + m_pBeforeGlFilter = nullptr; + } + + if (m_pCurGlFilter) { + m_pCurGlFilter->Destroy(); + delete m_pCurGlFilter; + m_pCurGlFilter = nullptr; + } } - m_pByteFlowRender->LoadFragShaderScript(shaderIndex, pShaderStr, strLen); + m_pByteFlowRender->LoadFragShaderScript(shaderIndex, pShaderStr, strLen); } void ByteFlowRenderContext::CreateExample(int exampleIndex) { - LOGCATE("ByteFlowRenderContext:CreateExample exampleIndex=%d", exampleIndex); - m_pBeforeGlFilter = m_pCurGlFilter; - switch (exampleIndex) { - case SAMPLE_TYPE_KEY_CONVEYOR_BELT: - m_pCurGlFilter = new ConveyorBeltExample_1(); - break; - case SAMPLE_TYPE_KEY_BLUE_LINE_CHALLENGE: - m_pCurGlFilter = new BluelineChallengeExample(); - break; - default: - m_pCurGlFilter = nullptr; - break; - } - - if(m_pCurGlFilter != nullptr) m_bIsExampleMode = true; + LOGCATE("ByteFlowRenderContext:CreateExample exampleIndex=%d", exampleIndex); + m_pBeforeGlFilter = m_pCurGlFilter; + switch (exampleIndex) { + case SAMPLE_TYPE_KEY_CONVEYOR_BELT: + m_pCurGlFilter = new ConveyorBeltExample_1(); + break; + case SAMPLE_TYPE_KEY_BLUE_LINE_CHALLENGE: + m_pCurGlFilter = new BluelineChallengeExample(); + break; + default: + m_pCurGlFilter = nullptr; + break; + } + + if (m_pCurGlFilter != nullptr) m_bIsExampleMode = true; } diff --git a/app/src/main/cpp/example/BluelineChallengeExample.cpp b/app/src/main/cpp/example/BluelineChallengeExample.cpp index 3224077..fa04406 100644 --- a/app/src/main/cpp/example/BluelineChallengeExample.cpp +++ b/app/src/main/cpp/example/BluelineChallengeExample.cpp @@ -120,12 +120,12 @@ void BluelineChallengeExample::Init() glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[0]); glEnableVertexAttribArray(0); - glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), (const void *)0); + glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), nullptr); glBindBuffer(GL_ARRAY_BUFFER, GL_NONE); glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[1]); glEnableVertexAttribArray(1); - glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), (const void *)0); + glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), nullptr); glBindBuffer(GL_ARRAY_BUFFER, GL_NONE); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_VboIds[2]); @@ -223,7 +223,7 @@ void BluelineChallengeExample::Draw(int width, int height) glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_TextureId); GLUtils::setInt(m_ProgramObj, "u_texture", 0); - glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, (const void *)0); + glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, nullptr); glBindVertexArray(GL_NONE); m_frameIndex ++; diff --git a/app/src/main/cpp/example/BluelineChallengeExample.h b/app/src/main/cpp/example/BluelineChallengeExample.h index d959239..d5d4766 100644 --- a/app/src/main/cpp/example/BluelineChallengeExample.h +++ b/app/src/main/cpp/example/BluelineChallengeExample.h @@ -41,7 +41,7 @@ class BluelineChallengeExample : public GLExampleBase virtual void SetTransformMatrix(TransformMatrix &transformMatrix); - void UpdateMVPMatrix(glm::mat4 &mat4Matrix, TransformMatrix &transformMatrix); + static void UpdateMVPMatrix(glm::mat4 &mat4Matrix, TransformMatrix &transformMatrix); private: GLuint m_TextureId; diff --git a/app/src/main/cpp/render/ByteFlowDef.h b/app/src/main/cpp/render/ByteFlowDef.h index 48b6fe3..8f9b0a2 100644 --- a/app/src/main/cpp/render/ByteFlowDef.h +++ b/app/src/main/cpp/render/ByteFlowDef.h @@ -17,161 +17,134 @@ #define BF_ERROR -1 //for YUV420p_I420 -typedef struct ByteFlowFrame -{ - size_t width; - size_t height; - size_t yPitch; - size_t uPitch; - size_t vPitch; - uint8_t *pYPlane; - uint8_t *pUPlane; - uint8_t *pVPlane; - - ByteFlowFrame() - { - width = 0; - height= 0; - yPitch= 0; - uPitch= 0; - vPitch= 0; - - pYPlane = NULL; - pUPlane = NULL; - pVPlane = NULL; - } +typedef struct ByteFlowFrame { + size_t width; + size_t height; + size_t yPitch; + size_t uPitch; + size_t vPitch; + uint8_t *pYPlane; + uint8_t *pUPlane; + uint8_t *pVPlane; + + ByteFlowFrame() { + width = 0; + height = 0; + yPitch = 0; + uPitch = 0; + vPitch = 0; + + pYPlane = NULL; + pUPlane = NULL; + pVPlane = NULL; + } }; -typedef struct TransformMatrix -{ - int degree; - int mirror; - float translateX; - float translateY; - float scaleX; - float scaleY; - - TransformMatrix(): - translateX(0), - translateY(0), - scaleX(1.0), - scaleY(1.0), - degree(0), - mirror(0) - { - - } - void Reset() - { - translateX = 0; - translateY = 0; - scaleX = 1.0; - scaleY = 1.0; - degree = 0; - mirror = 0; - - } +typedef struct TransformMatrix { + int degree; + int mirror; + float translateX; + float translateY; + float scaleX; + float scaleY; + + TransformMatrix() : + translateX(0), + translateY(0), + scaleX(1.0), + scaleY(1.0), + degree(0), + mirror(0) {} + + void Reset() { + translateX = 0; + translateY = 0; + scaleX = 1.0; + scaleY = 1.0; + degree = 0; + mirror = 0; + } }; -class ByteFlowFrameUtil -{ +class ByteFlowFrameUtil { public: - static void AllocFrame(ByteFlowFrame *pFrame) - { - if (pFrame != NULL) - { - if (pFrame->yPitch == 0 || pFrame->uPitch == 0 || pFrame->vPitch == 0) - { - pFrame->yPitch = pFrame->width; - pFrame->uPitch = pFrame->width / 2; - pFrame->vPitch = pFrame->width / 2; - } - - size_t mem_size = static_cast(pFrame->yPitch * pFrame->height + - pFrame->uPitch * (pFrame->height >> 1) + - pFrame->vPitch * (pFrame->height >> 1)); - pFrame->pYPlane = static_cast(malloc(mem_size)); - pFrame->pUPlane = pFrame->pYPlane + pFrame->yPitch * pFrame->height; - pFrame->pVPlane = pFrame->pUPlane + pFrame->uPitch * (pFrame->height >> 1); - } - - } - - static void FreeFrame(ByteFlowFrame *pFrame) - { - if (pFrame != NULL && pFrame->pYPlane != NULL) - { - free(pFrame->pYPlane); - pFrame->pYPlane = NULL; - } - } - - static bool CopyFrame(ByteFlowFrame *pSrcFrame, ByteFlowFrame *pDstFrame) - { - if (pSrcFrame == NULL || pSrcFrame->pYPlane == NULL || pDstFrame == NULL || - pDstFrame->pYPlane == NULL) - { - return false; - } - - size_t ySize = pSrcFrame->yPitch * pSrcFrame->height; - size_t uSize = pSrcFrame->uPitch * (pSrcFrame->height >> 1); - size_t vSize = pSrcFrame->vPitch * (pSrcFrame->height >> 1); - if (pSrcFrame->width == pSrcFrame->yPitch) - { - - memcpy(pDstFrame->pYPlane, pSrcFrame->pYPlane, ySize); - } - else - { - uint8_t *pSrcY = pSrcFrame->pYPlane; - uint8_t *pDstY = pDstFrame->pYPlane; - for (int i = 0; i < pSrcFrame->height; ++i) - { - memcpy(pDstY, pSrcY, pDstFrame->width); - pSrcY += pSrcFrame->yPitch; - pDstY += pDstFrame->width; - } - } - - if (pSrcFrame->width / 2 == pSrcFrame->uPitch) - { - - memcpy(pDstFrame->pUPlane, pSrcFrame->pUPlane, uSize); - } - else - { - uint8_t *pSrcU = pSrcFrame->pUPlane; - uint8_t *pDstU = pDstFrame->pUPlane; - for (int i = 0; i < pSrcFrame->height / 2; ++i) - { - memcpy(pDstU, pSrcU, pDstFrame->width / 2); - pSrcU += pSrcFrame->uPitch; - pDstU += pDstFrame->width / 2; - } - } - - if (pSrcFrame->width / 2 == pSrcFrame->vPitch) - { - - memcpy(pDstFrame->pVPlane, pSrcFrame->pVPlane, vSize); - } - else - { - uint8_t *pSrcV = pSrcFrame->pVPlane; - uint8_t *pDstV = pDstFrame->pVPlane; - for (int i = 0; i < pSrcFrame->height / 2; ++i) - { - memcpy(pDstV, pSrcV, pDstFrame->width / 2); - pSrcV += pSrcFrame->vPitch; - pDstV += pDstFrame->width / 2; - } - } - - return true; - } + static void AllocFrame(ByteFlowFrame *pFrame) { + if (pFrame != NULL) { + if (pFrame->yPitch == 0 || pFrame->uPitch == 0 || pFrame->vPitch == 0) { + pFrame->yPitch = pFrame->width; + pFrame->uPitch = pFrame->width / 2; + pFrame->vPitch = pFrame->width / 2; + } + + size_t mem_size = static_cast(pFrame->yPitch * pFrame->height + + pFrame->uPitch * (pFrame->height >> 1) + + pFrame->vPitch * (pFrame->height >> 1)); + pFrame->pYPlane = static_cast(malloc(mem_size)); + pFrame->pUPlane = pFrame->pYPlane + pFrame->yPitch * pFrame->height; + pFrame->pVPlane = pFrame->pUPlane + pFrame->uPitch * (pFrame->height >> 1); + } + + } + + static void FreeFrame(ByteFlowFrame *pFrame) { + if (pFrame != NULL && pFrame->pYPlane != NULL) { + free(pFrame->pYPlane); + pFrame->pYPlane = NULL; + } + } + + static bool CopyFrame(ByteFlowFrame *pSrcFrame, ByteFlowFrame *pDstFrame) { + if (pSrcFrame == NULL || pSrcFrame->pYPlane == NULL || pDstFrame == NULL || + pDstFrame->pYPlane == NULL) { + return false; + } + + size_t ySize = pSrcFrame->yPitch * pSrcFrame->height; + size_t uSize = pSrcFrame->uPitch * (pSrcFrame->height >> 1); + size_t vSize = pSrcFrame->vPitch * (pSrcFrame->height >> 1); + if (pSrcFrame->width == pSrcFrame->yPitch) { + + memcpy(pDstFrame->pYPlane, pSrcFrame->pYPlane, ySize); + } else { + uint8_t *pSrcY = pSrcFrame->pYPlane; + uint8_t *pDstY = pDstFrame->pYPlane; + for (int i = 0; i < pSrcFrame->height; ++i) { + memcpy(pDstY, pSrcY, pDstFrame->width); + pSrcY += pSrcFrame->yPitch; + pDstY += pDstFrame->width; + } + } + + if (pSrcFrame->width / 2 == pSrcFrame->uPitch) { + + memcpy(pDstFrame->pUPlane, pSrcFrame->pUPlane, uSize); + } else { + uint8_t *pSrcU = pSrcFrame->pUPlane; + uint8_t *pDstU = pDstFrame->pUPlane; + for (int i = 0; i < pSrcFrame->height / 2; ++i) { + memcpy(pDstU, pSrcU, pDstFrame->width / 2); + pSrcU += pSrcFrame->uPitch; + pDstU += pDstFrame->width / 2; + } + } + + if (pSrcFrame->width / 2 == pSrcFrame->vPitch) { + + memcpy(pDstFrame->pVPlane, pSrcFrame->pVPlane, vSize); + } else { + uint8_t *pSrcV = pSrcFrame->pVPlane; + uint8_t *pDstV = pDstFrame->pVPlane; + for (int i = 0; i < pSrcFrame->height / 2; ++i) { + memcpy(pDstV, pSrcV, pDstFrame->width / 2); + pSrcV += pSrcFrame->vPitch; + pDstV += pDstFrame->width / 2; + } + } + + return true; + } }; diff --git a/app/src/main/cpp/render/GLByteFlowRender.cpp b/app/src/main/cpp/render/GLByteFlowRender.cpp index 2e18f60..25cd1d7 100644 --- a/app/src/main/cpp/render/GLByteFlowRender.cpp +++ b/app/src/main/cpp/render/GLByteFlowRender.cpp @@ -62,7 +62,6 @@ GLByteFlowRender::GLByteFlowRender() : m_IsUpdateExtTexture = false; m_pFragShaderBuf = nullptr; memset(&m_ExtRgbaImage, 0, sizeof(NativeImage)); - } GLByteFlowRender::~GLByteFlowRender() { @@ -94,7 +93,7 @@ void GLByteFlowRender::UpdateFrame(NativeImage *pImage) { LOGCATE("GLByteFlowRender::UpdateFrame"); if (pImage == nullptr) return; if (pImage->width != m_RenderFrame.width || pImage->height != m_RenderFrame.height) { - if (m_RenderFrame.ppPlane[0] != NULL) { + if (m_RenderFrame.ppPlane[0] != nullptr) { NativeImageUtil::FreeNativeImage(&m_RenderFrame); } memset(&m_RenderFrame, 0, sizeof(NativeImage)); @@ -116,7 +115,6 @@ GLByteFlowRender::SetTransformMatrix(float translateX, float translateY, float s m_TransformMatrix.degree = degree; m_TransformMatrix.mirror = mirror; m_IsProgramChanged = true; - } void GLByteFlowRender::SetShaderIndex(int shaderIndex) { @@ -133,8 +131,8 @@ int GLByteFlowRender::GetShaderIndex() { bool GLByteFlowRender::CreateTextures() { LOGCATE("GLByteFlowRender::CreateTextures"); - GLsizei yWidth = static_cast(m_RenderFrame.width); - GLsizei yHeight = static_cast(m_RenderFrame.height); + auto yWidth = m_RenderFrame.width; + auto yHeight = m_RenderFrame.height; glActiveTexture(GL_TEXTURE0); glGenTextures(1, &m_YTextureId); @@ -144,15 +142,15 @@ bool GLByteFlowRender::CreateTextures() { glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, yWidth, yHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, - NULL); + nullptr); if (!m_YTextureId) { GLUtils::CheckGLError("GLByteFlowRender::CreateTextures Create Y texture"); return false; } - GLsizei uWidth = static_cast(m_RenderFrame.width / 2); - GLsizei uHeight = yHeight / 2; + auto uWidth = m_RenderFrame.width / 2; + auto uHeight = yHeight / 2; glActiveTexture(GL_TEXTURE1); glGenTextures(1, &m_UTextureId); @@ -162,15 +160,15 @@ bool GLByteFlowRender::CreateTextures() { glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, uWidth, uHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, - NULL); + nullptr); if (!m_UTextureId) { GLUtils::CheckGLError("GLByteFlowRender::CreateTextures Create U texture"); return false; } - GLsizei vWidth = static_cast(m_RenderFrame.width / 2); - GLsizei vHeight = (GLsizei) yHeight / 2; + auto vWidth = m_RenderFrame.width / 2; + auto vHeight = yHeight / 2; glActiveTexture(GL_TEXTURE2); glGenTextures(1, &m_VTextureId); @@ -180,19 +178,18 @@ bool GLByteFlowRender::CreateTextures() { glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, vWidth, vHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, - NULL); + nullptr); if (!m_VTextureId) { GLUtils::CheckGLError("GLByteFlowRender::CreateTextures Create V texture"); return false; } - return true; } bool GLByteFlowRender::UpdateTextures() { LOGCATE("GLByteFlowRender::UpdateTextures"); - if (m_RenderFrame.ppPlane[0] == NULL) { + if (m_RenderFrame.ppPlane[0] == nullptr) { return false; } @@ -510,9 +507,10 @@ void GLByteFlowRender::SetShaderProgramDynamicAttrib(int shaderIndex) { glUniform1f(m_OffsetHandle, offset); } GLUtils::setInt(m_Program, "s_textureMapping", 3); - GLUtils::setVec2(m_Program, "asciiTexSize", m_ExtRgbaImage.width, m_ExtRgbaImage.height); + GLUtils::setVec2(m_Program, "asciiTexSize", m_ExtRgbaImage.width, + m_ExtRgbaImage.height); } - break; + break; case ASCII_SHADER_MATTE_BLUR: { GLUtils::setFloat(m_Program, "blurSamplerScale", 4.0); GLUtils::setFloat(m_Program, "factor", 0.1); diff --git a/app/src/main/cpp/render/GLUtils.cpp b/app/src/main/cpp/render/GLUtils.cpp index 0fc0a18..1ec9255 100644 --- a/app/src/main/cpp/render/GLUtils.cpp +++ b/app/src/main/cpp/render/GLUtils.cpp @@ -8,7 +8,7 @@ #include "GLUtils.h" #include "LogUtil.h" -#include +#include #include #include @@ -19,7 +19,7 @@ GLuint GLUtils::LoadShader(GLenum shaderType, const char *pSource) shader = glCreateShader(shaderType); if (shader) { - glShaderSource(shader, 1, &pSource, NULL); + glShaderSource(shader, 1, &pSource, nullptr); glCompileShader(shader); GLint compiled = 0; glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); @@ -32,7 +32,7 @@ GLuint GLUtils::LoadShader(GLenum shaderType, const char *pSource) char* buf = (char*) malloc((size_t)infoLen); if (buf) { - glGetShaderInfoLog(shader, infoLen, NULL, buf); + glGetShaderInfoLog(shader, infoLen, nullptr, buf); LOGCATE("GLUtils::LoadShader Could not compile shader %d:\n%s\n", shaderType, buf); free(buf); } @@ -80,7 +80,7 @@ GLuint GLUtils::CreateProgram(const char *pVertexShaderSource, const char *pFrag char* buf = (char*) malloc((size_t)bufLength); if (buf) { - glGetProgramInfoLog(program, bufLength, NULL, buf); + glGetProgramInfoLog(program, bufLength, nullptr, buf); LOGCATE("GLUtils::CreateProgram Could not link program:\n%s\n", buf); free(buf); } diff --git a/app/src/main/java/com/byteflow/openglcamera2/BaseRenderActivity.java b/app/src/main/java/com/byteflow/openglcamera2/BaseRenderActivity.java index 54f04ce..8edac1c 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/BaseRenderActivity.java +++ b/app/src/main/java/com/byteflow/openglcamera2/BaseRenderActivity.java @@ -38,7 +38,7 @@ public abstract class BaseRenderActivity extends AppCompatActivity implements My protected GLSurfaceView mGLSurfaceView; protected MyGestureListener mGestureDetector; //protected int mCurrentShaderIndex = SHADER_NUM - 1; - protected int mCurrentShaderIndex = 23; + protected int mCurrentShaderIndex = 31; protected Size mRootViewSize, mScreenSize; @Override diff --git a/app/src/main/java/com/byteflow/openglcamera2/MainActivity.java b/app/src/main/java/com/byteflow/openglcamera2/MainActivity.java index b2627b8..f6e873f 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/MainActivity.java +++ b/app/src/main/java/com/byteflow/openglcamera2/MainActivity.java @@ -393,31 +393,7 @@ public void onSwipe(MyGestureListener.SwipeDirection direction) { case SWIPE_RIGHT: mCurrentShaderIndex++; mCurrentShaderIndex = mCurrentShaderIndex % SHADER_NUM; - switch (mCurrentShaderIndex) { - case LUT_A_SHADER_INDEX: - loadRGBAImage(R.drawable.lut_a, 0); - break; - case LUT_B_SHADER_INDEX: - loadRGBAImage(R.drawable.lut_b, 0); - break; - case LUT_C_SHADER_INDEX: - loadRGBAImage(R.drawable.lut_c, 0); - break; - case LUT_D_SHADER_INDEX: - loadRGBAImage(R.drawable.lut_d, 0); - break; - case ASCII_SHADER_INDEX: - loadRGBAImage(R.drawable.ascii_mapping, ASCII_SHADER_INDEX); - break; - default: - } - - if (LUT_A_SHADER_INDEX <= mCurrentShaderIndex && mCurrentShaderIndex <= LUT_D_SHADER_INDEX) { - mByteFlowRender.loadShaderFromAssetsFile(LUT_A_SHADER_INDEX, getResources()); - } else { - mByteFlowRender.loadShaderFromAssetsFile(mCurrentShaderIndex, getResources()); - } - + loadShader(mCurrentShaderIndex); //mByteFlowRender.setParamsInt(PARAM_TYPE_SET_SHADER_INDEX, mCurrentShaderIndex); break; case SWIPE_LEFT: @@ -425,31 +401,7 @@ public void onSwipe(MyGestureListener.SwipeDirection direction) { if (mCurrentShaderIndex < 0) { mCurrentShaderIndex += SHADER_NUM; } - switch (mCurrentShaderIndex) { - case LUT_A_SHADER_INDEX: - loadRGBAImage(R.drawable.lut_a, 0); - break; - case LUT_B_SHADER_INDEX: - loadRGBAImage(R.drawable.lut_b, 0); - break; - case LUT_C_SHADER_INDEX: - loadRGBAImage(R.drawable.lut_c, 0); - break; - case LUT_D_SHADER_INDEX: - loadRGBAImage(R.drawable.lut_d, 0); - break; - case ASCII_SHADER_INDEX: - loadRGBAImage(R.drawable.ascii_mapping, ASCII_SHADER_INDEX); - break; - default: - } - - if (LUT_A_SHADER_INDEX <= mCurrentShaderIndex && mCurrentShaderIndex <= LUT_D_SHADER_INDEX) { - mByteFlowRender.loadShaderFromAssetsFile(LUT_A_SHADER_INDEX, getResources()); - } else { - mByteFlowRender.loadShaderFromAssetsFile(mCurrentShaderIndex, getResources()); - } - + loadShader(mCurrentShaderIndex); //mByteFlowRender.setParamsInt(PARAM_TYPE_SET_SHADER_INDEX, mCurrentShaderIndex); break; default: @@ -458,6 +410,33 @@ public void onSwipe(MyGestureListener.SwipeDirection direction) { } + private void loadShader(int mCurrentShaderIndex) { + switch (mCurrentShaderIndex) { + case LUT_A_SHADER_INDEX: + loadRGBAImage(R.drawable.lut_a, 0); + break; + case LUT_B_SHADER_INDEX: + loadRGBAImage(R.drawable.lut_b, 0); + break; + case LUT_C_SHADER_INDEX: + loadRGBAImage(R.drawable.lut_c, 0); + break; + case LUT_D_SHADER_INDEX: + loadRGBAImage(R.drawable.lut_d, 0); + break; + case ASCII_SHADER_INDEX: + loadRGBAImage(R.drawable.ascii_mapping, ASCII_SHADER_INDEX); + break; + default: + } + + if (LUT_A_SHADER_INDEX <= mCurrentShaderIndex && mCurrentShaderIndex <= LUT_D_SHADER_INDEX) { + mByteFlowRender.loadShaderFromAssetsFile(LUT_A_SHADER_INDEX, getResources()); + } else { + mByteFlowRender.loadShaderFromAssetsFile(mCurrentShaderIndex, getResources()); + } + } + @Override public void onClick(View v) { switch (v.getId()) { diff --git a/app/src/main/java/com/byteflow/openglcamera2/render/GLByteFlowRender.java b/app/src/main/java/com/byteflow/openglcamera2/render/GLByteFlowRender.java index e0159a3..2a22f00 100644 --- a/app/src/main/java/com/byteflow/openglcamera2/render/GLByteFlowRender.java +++ b/app/src/main/java/com/byteflow/openglcamera2/render/GLByteFlowRender.java @@ -71,6 +71,7 @@ public void loadLutImage(int index, int format, int width, int height, byte[] by } public void loadShaderFromAssetsFile(int shaderIndex, Resources r) { + Log.d(TAG,"loadShaderFromAssetsFile shaderIndex = " + shaderIndex); String result = null; try { InputStream in = r.getAssets().open("shaders/fshader_" + shaderIndex + ".glsl");