我目前的问题是,我想找到一种在 Android 上的 webrtc 连接期间捕获帧/屏幕截图的方法。我知道这里已经有一些解决方案,但没有一个对我有用。
在我目前的方法中,我遵循了这个 Gist .
问题是它返回一个黑色位图。我将附加我的方法,但它基本上与要点相同。如果有人对如何解决这个问题有任何想法,请提前致谢。
Activity SingleFrameCapturer.BitmapListener gotFrameListener = new
SingleFrameCapturer.BitmapListener() {
@Override
public void gotBitmap(Bitmap theBitmap) {
Log.e(TAG, "got bitmap!");
ImageView imageView = findViewById(R.id.object_preview);
imageView.setImageBitmap(theBitmap);
imageView.setVisibility(View.VISIBLE);
}
};
MediaStream stream = contextManager.getStream();
SingleFrameCapturer.toBitmap(this, stream, gotFrameListener);
}
}
单帧捕捉器
import android.graphics.Bitmap;
import android.util.Base64;
import android.util.Log;
import java.nio.ByteBuffer;
import org.webrtc.VideoTrack;
import org.webrtc.MediaStream;
import org.webrtc.EglBase;
import org.webrtc.RendererCommon;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLDisplay;
public class SingleFrameCapturer {
public interface BitmapListener {
public void gotBitmap(Bitmap theBitmap);
}
private static boolean firstTimeOnly = true;
// the below pixelBuffer code is based on from
// https://github.com/CyberAgent/android-gpuimage/blob/master/library/src/jp/co/cyberagent/android/gpuimage/PixelBuffer.java
//
class PixelBuffer implements org.webrtc.VideoRenderer.Callbacks {
final static String TAG = "PixelBuffer";
final static boolean LIST_CONFIGS = false;
int mWidth, mHeight;
EGL10 mEGL;
EGLDisplay mEGLDisplay;
boolean gotFrame = false;
String mThreadOwner;
BitmapListener listener;
android.app.Activity activity;
public PixelBuffer(android.app.Activity activity, BitmapListener listener) {
this.listener = listener;
this.activity = activity;
}
private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
@Override
public void renderFrame(final org.webrtc.VideoRenderer.I420Frame i420Frame) {
Log.d(TAG, "entered renderFrame");
//
// we only want to grab a single frame but our method may get called
// a few times before we're done.
//
if (gotFrame || i420Frame.width == 0 || i420Frame.height == 0) {
Log.d(TAG, "Already got frame so taking honourable exit");
org.webrtc.VideoRenderer.renderFrameDone(i420Frame);
return;
}
activity.runOnUiThread(new Runnable() {
public void run() {
int width = i420Frame.width;
int height = i420Frame.height;
Log.d(TAG, "about to call initWithSize");
initWithSize(width, height);
Bitmap bitmap = toBitmap(i420Frame);
org.webrtc.VideoRenderer.renderFrameDone(i420Frame);
gotFrame = true;
listener.gotBitmap(bitmap);
destroy();
}
});
}
private int buildARGB(int r, int g, int b) {
return (0xff << 24) |(r << 16) | (g << 8) | b;
}
private Bitmap toBitmap(org.webrtc.VideoRenderer.I420Frame frame) {
if (frame.yuvFrame) {
//EglBase eglBase = EglBase.create();
EglBase eglBase = StreamActivity.rootEglBase;
if(firstTimeOnly) {
eglBase.createDummyPbufferSurface();
firstTimeOnly = false;
}
eglBase.makeCurrent();
TextureToRGB textureToRGB = new TextureToRGB();
int numPixels = mWidth *mHeight;
final int bytesPerPixel = 4;
ByteBuffer framebuffer = ByteBuffer.allocateDirect(numPixels*bytesPerPixel);
final float frameAspectRatio = (float) frame.rotatedWidth() / (float) frame.rotatedHeight();
final float[] rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
false, frameAspectRatio, (float) mWidth / mHeight);
final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
textureToRGB.convert(framebuffer, mWidth, mHeight, frame.textureId, texMatrix);
byte [] frameBytes = framebuffer.array();
int [] dataARGB = new int[numPixels];
for(int i = 0, j = 0; j < numPixels; i+=bytesPerPixel, j++) {
//
// data order in frameBytes is red, green, blue, alpha, red, green, ....
//
dataARGB[j] = buildARGB(frameBytes[i] & 0xff,frameBytes[i+1] &0xff,frameBytes[i+2] &0xff);
}
Bitmap bitmap = Bitmap.createBitmap(dataARGB, mWidth, mHeight, Bitmap.Config.ARGB_8888);
return bitmap;
}
else {
return null;
}
}
private void initWithSize(final int width, final int height) {
mWidth = width;
mHeight = height;
// Record thread owner of OpenGL context
mThreadOwner = Thread.currentThread().getName();
}
public void destroy() {
}
private int getConfigAttrib(final EGLConfig config, final int attribute) {
int[] value = new int[1];
return mEGL.eglGetConfigAttrib(mEGLDisplay, config,
attribute, value) ? value[0] : 0;
}
}
final private static String TAG = "SingleFrameCapturer";
org.webrtc.VideoRenderer renderer;
private SingleFrameCapturer(final android.app.Activity activity, MediaStream mediaStream, final BitmapListener gotFrameListener) {
if( mediaStream.videoTracks.size() == 0) {
Log.e(TAG, "No video track to capture from");
return;
}
final VideoTrack videoTrack = mediaStream.videoTracks.get(0);
final PixelBuffer vg = new PixelBuffer(activity, new BitmapListener() {
@Override
public void gotBitmap(final Bitmap bitmap) {
activity.runOnUiThread(new Runnable(){
public void run() {
videoTrack.removeRenderer(renderer);
try {
gotFrameListener.gotBitmap(bitmap);
} catch( Exception e1) {
Log.e(TAG, "Exception in gotBitmap callback:" + e1.getMessage());
e1.printStackTrace(System.err);
}
}
});
}
});
renderer = new org.webrtc.VideoRenderer(vg);
videoTrack.addRenderer(renderer);
}
/**
* This constructor builds an object which captures a frame from mediastream to a Bitmap.
* @param mediaStream The input media mediaStream.
* @param gotFrameListener A callback which will receive the Bitmap.
*/
public static void toBitmap(android.app.Activity activity, MediaStream mediaStream, final BitmapListener gotFrameListener) {
new SingleFrameCapturer(activity, mediaStream, gotFrameListener);
}
/**
* This method captures a frame from the supplied media stream to a jpeg file written to the supplied outputStream.
* @param mediaStream the source media stream
* @param quality the quality of the jpeq 0 to 100.
* @param outputStream the output stream the jpeg file will be written to.
* @param done a runnable that will be invoked when the outputstream has been written to.
* @return The frame capturer. You should keep a reference to the frameCapturer until the done object is invoked.
*/
public static void toOutputStream(android.app.Activity activity, MediaStream mediaStream, final int quality, final java.io.OutputStream outputStream, final Runnable done) {
BitmapListener gotFrameListener = new BitmapListener() {
@Override
public void gotBitmap(Bitmap theBitmap) {
theBitmap.compress(Bitmap.CompressFormat.JPEG, quality, outputStream);
try {
done.run();
} catch( Exception e1) {
Log.e(TAG, "Exception in toOutputStream done callback:" + e1.getMessage());
e1.printStackTrace(System.err);
}
}
};
toBitmap(activity, mediaStream, gotFrameListener);
}
/**
* This method captures a frame from the supplied mediastream to a dataurl written to a StringBuilder.
* @param mediaStream the source media stream
* @param quality the quality of the jpeq 0 to 100.
* @param output a StringBuilder which will be the recipient of the dataurl.
* @param done a runnable that will be invoked when the dataurl is built.
* @return The frame capturer. You should keep a reference to the frameCapturer until the done object is invoked.
*/
public static void toDataUrl(android.app.Activity activity, MediaStream mediaStream, final int quality, final StringBuilder output, final Runnable done) {
final java.io.ByteArrayOutputStream outputStream = new java.io.ByteArrayOutputStream();
Runnable convertToUrl = new Runnable() {
@Override
public void run() {
output.append("data:image/jpeg;base64,");
output.append(Base64.encodeToString(outputStream.toByteArray(), Base64.DEFAULT));
try {
done.run();
} catch( Exception e1) {
Log.e(TAG, "Exception in toDataUrl done callback:" + e1.getMessage());
e1.printStackTrace(System.err);
}
}
};
toOutputStream(activity, mediaStream, quality, outputStream, convertToUrl);
}
}
纹理转RGB
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import org.webrtc.*;
/**
* Class for converting OES textures RGBA. It should be constructed on a thread with
* an active EGL context, and only be used from that thread. It is used by the EasyrtcSingleFrameCapturer.
*/
public class TextureToRGB {
// Vertex coordinates in Normalized Device Coordinates, i.e.
// (-1, -1) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer TEXTURE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
private static final String VERTEX_SHADER =
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform samplerExternalOES oesTex;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(oesTex, interp_tc);\n"
+ "}\n";
// clang-format on
private final GlTextureFrameBuffer textureFrameBuffer;
private final GlShader shader;
private final int texMatrixLoc;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
private boolean released = false;
/**
* This class should be constructed on a thread that has an active EGL context.
*/
public TextureToRGB() {
threadChecker.checkIsOnValidThread();
textureFrameBuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
shader.useProgram();
texMatrixLoc = shader.getUniformLocation("texMatrix");
GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes.
shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
// If the width is not a multiple of 4 pixels, the texture
// will be scaled up slightly and clipped at the right border.
shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
}
public void convert(ByteBuffer buf, int width, int height, int srcTextureId,
float[] transformMatrix) {
threadChecker.checkIsOnValidThread();
if (released) {
throw new IllegalStateException("TextureToRGB.convert called on released object");
}
int size = width * height;
if (buf.capacity() < size) {
throw new IllegalArgumentException("TextureToRGB.convert called with too small buffer");
}
// Produce a frame buffer starting at top-left corner, not
// bottom-left.
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix());
final int frameBufferWidth = width;
final int frameBufferHeight =height;
textureFrameBuffer.setSize(frameBufferWidth, frameBufferHeight);
// Bind our framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureFrameBuffer.getFrameBufferId());
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, srcTextureId);
GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
GLES20.glViewport(0, 0, width, height);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glReadPixels(
0, 0, frameBufferWidth, frameBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
// Unbind texture. Reportedly needed on some devices to get
// the texture updated from the camera.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GlUtil.checkNoGLES2Error("TextureToRGB.convert");
}
public void release() {
threadChecker.checkIsOnValidThread();
released = true;
shader.release();
textureFrameBuffer.release();
}
}
最佳答案
我刚刚为您的问题找到了解决方案,这就是您如何使用 SurfaceViewRenderer 在 Android 中获取 webrtc 调用的屏幕截图:
基本上,您必须创建一个自定义类来实现 EGLRenderer.FrameListener ,并使用它,<your_surface_view_renderer>.AddFrameListener(EGLRenderer.FrameListener listener, float scale) .
然后,在 onFrame你类(class)的方法,你会得到一个Bitmap每一帧。不要忘记使用 removeFrameListener稍后。
关于java - WebRTC,捕获屏幕,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/50415439/
我真的很习惯使用Ruby编写以下代码:my_hash={}my_hash['test']=1Java中对应的数据结构是什么? 最佳答案 HashMapmap=newHashMap();map.put("test",1);我假设? 关于java-等价于Java中的RubyHash,我们在StackOverflow上找到一个类似的问题: https://stackoverflow.com/questions/22737685/
我正在尝试使用boilerpipe来自JRuby。我看过guide从JRuby调用Java,并成功地将它与另一个Java包一起使用,但无法弄清楚为什么同样的东西不能用于boilerpipe。我正在尝试基本上从JRuby中执行与此Java等效的操作:URLurl=newURL("http://www.example.com/some-location/index.html");Stringtext=ArticleExtractor.INSTANCE.getText(url);在JRuby中试过这个:require'java'url=java.net.URL.new("http://www
我只想对我一直在思考的这个问题有其他意见,例如我有classuser_controller和classuserclassUserattr_accessor:name,:usernameendclassUserController//dosomethingaboutanythingaboutusersend问题是我的User类中是否应该有逻辑user=User.newuser.do_something(user1)oritshouldbeuser_controller=UserController.newuser_controller.do_something(user1,user2)我
什么是ruby的rack或python的Java的wsgi?还有一个路由库。 最佳答案 来自Python标准PEP333:Bycontrast,althoughJavahasjustasmanywebapplicationframeworksavailable,Java's"servlet"APImakesitpossibleforapplicationswrittenwithanyJavawebapplicationframeworktoruninanywebserverthatsupportstheservletAPI.ht
相信很多人在录制视频的时候都会遇到各种各样的问题,比如录制的视频没有声音。屏幕录制为什么没声音?今天小编就和大家分享一下如何录制音画同步视频的具体操作方法。如果你有录制的视频没有声音,你可以试试这个方法。 一、检查是否打开电脑系统声音相信很多小伙伴在录制视频后会发现录制的视频没有声音,屏幕录制为什么没声音?如果当时没有打开音频录制,则录制好的视频是没有声音的。因此,建议在录制前进行检查。屏幕上没有声音,很可能是因为你的电脑系统的声音被禁止了。您只需打开电脑系统的声音,即可录制音频和图画同步视频。操作方法:步骤1:点击电脑屏幕右下侧的“小喇叭”图案,在上方的选项中,选择“声音”。 步骤2:在“声
这篇文章是继上一篇文章“Observability:从零开始创建Java微服务并监控它(一)”的续篇。在上一篇文章中,我们讲述了如何创建一个Javaweb应用,并使用Filebeat来收集应用所生成的日志。在今天的文章中,我来详述如何收集应用的指标,使用APM来监控应用并监督web服务的在线情况。源码可以在地址 https://github.com/liu-xiao-guo/java_observability 进行下载。摄入指标指标被视为可以随时更改的时间点值。当前请求的数量可以改变任何毫秒。你可能有1000个请求的峰值,然后一切都回到一个请求。这也意味着这些指标可能不准确,你还想提取最小/
HashMap中为什么引入红黑树,而不是AVL树呢1.概述开始学习这个知识点之前我们需要知道,在JDK1.8以及之前,针对HashMap有什么不同。JDK1.7的时候,HashMap的底层实现是数组+链表JDK1.8的时候,HashMap的底层实现是数组+链表+红黑树我们要思考一个问题,为什么要从链表转为红黑树呢。首先先让我们了解下链表有什么不好???2.链表上述的截图其实就是链表的结构,我们来看下链表的增删改查的时间复杂度增:因为链表不是线性结构,所以每次添加的时候,只需要移动一个节点,所以可以理解为复杂度是N(1)删:算法时间复杂度跟增保持一致查:既然是非线性结构,所以查询某一个节点的时候
遍历文件夹我们通常是使用递归进行操作,这种方式比较简单,也比较容易理解。本文为大家介绍另一种不使用递归的方式,由于没有使用递归,只用到了循环和集合,所以效率更高一些!一、使用递归遍历文件夹整体思路1、使用File封装初始目录,2、打印这个目录3、获取这个目录下所有的子文件和子目录的数组。4、遍历这个数组,取出每个File对象4-1、如果File是否是一个文件,打印4-2、否则就是一个目录,递归调用代码实现publicclassSearchFile{publicstaticvoidmain(String[]args){//初始目录Filedir=newFile("d:/Dev");Datebeg
我正在尝试使用ruby编写一个双线程客户端,一个线程从套接字读取数据并将其打印出来,另一个线程读取本地数据并将其发送到远程服务器。我发现的问题是Ruby似乎无法捕获线程内的错误,这是一个示例:#!/usr/bin/rubyThread.new{loop{$stdout.puts"hi"abc.putsefsleep1}}loop{sleep1}显然,如果我在线程外键入abc.putsef,代码将永远不会运行,因为Ruby将报告“undefinedvariableabc”。但是,如果它在一个线程内,则没有错误报告。我的问题是,如何让Ruby捕获这样的错误?或者至少,报告线程中的错误?
我基本上来自Java背景并且努力理解Ruby中的模运算。(5%3)(-5%3)(5%-3)(-5%-3)Java中的上述操作产生,2个-22个-2但在Ruby中,相同的表达式会产生21个-1-2.Ruby在逻辑上有多擅长这个?模块操作在Ruby中是如何实现的?如果将同一个操作定义为一个web服务,两个服务如何匹配逻辑。 最佳答案 在Java中,模运算的结果与被除数的符号相同。在Ruby中,它与除数的符号相同。remainder()在Ruby中与被除数的符号相同。您可能还想引用modulooperation.