截图的几种方法
Android获取屏幕截图主要有以下三种方法
1、通过view.getDrawingCache()获取指定View的绘制缓存来实现截屏。
这种方式Android 5.0之前也可以,且不需要权限。可以截取本应用内任意位置的屏幕截屏,可能会有些webview会截取不到。
注意这种方式只能获取当前应用内的截图(连顶部状态栏中的时间等都信息都获取不到,获取到的状态栏是一片空白)。
2、通过Linux底层驱动来实现截屏。
linux的图像信息都是通过FrameBuffer来写到显示设备上的,所以可以通过读取这个buffer的信息来获取屏幕截图。
DDMS工具就是通过这种方式来获取截图的。FrameBuffer对应的设备文件目录是/dev/graphics/fb0。
但是这种方法需要root权限,由于是直接从底层显示数据读取的,所以理论上是属于手机屏幕的任何信息都可以截取到。
3、通过Android 5.0后的MediaProjection API实现截屏。
该接口官方说是用来屏幕录制和音频录制,可以参照系统自带的sample案例ScreenCapture。
关于sample的分析可参照这篇 http://www.cnblogs.com/tgyf/p/4675082.html 。
Android 5.0后系统自带的截图功能也是使用此API,过程分析详见 http://blog.csdn.NET/kong92917/article/details/50495740 。
这种方法不用root,api是开放的,不过只针对L版以上。
MediaProjection介绍
做过Android屏幕截图的朋友应该知道在Android5.0之前如果希望截图屏幕,是需要获取系统root权限的。但在5.0之后Android开放了新的接口android.media.projection,使用该接口,第三方应用程序无需获取系统root权限也可以直接进行屏幕截图操作了。查询其官方api可知,该接口主要用来“屏幕截图”操作和“音频录制”操作,这里只讨论用于屏幕截图的功能。由于使用了媒体的映射技术手段,故截取的屏幕并不是真正的设备屏幕,而是截取的通过映射出来的“虚拟屏幕”。不过,因为截图我们希望的得到的肯定是一张图而已,而“映射”出来的图片与系统屏幕完全一致,所以,对于普通截屏操作,该方法完全可行。
MediaProjection由MediaProjectionManager来管理和获取,可以截取当前屏幕和录制屏幕视频。
MediaProjection使用步骤:
- 首先获取MediaProjectionManager,和其他的Manager一样通过 Context.getSystemService() 传入参数MEDIA_PROJECTION_SERVICE获得
- 接着调用MediaProjectionManager.createScreenCaptureIntent(),调用后会弹出一个dialog询问用户是否授权应用捕捉屏幕
- 然后在onActivityResult()中获取授权结果
- 如果授权成功,通过MediaProjectionManager.getMediaProjection(int resultCode, Intent resultData)获取MediaProjection实例,通过MediaProjection.createVirtualDisplay(String name, int width, int height, int dpi, int flags, Surface surface, VirtualDisplay.Callback callback, Handler handler)创建VirtualDisplay实例。实际上在上述方法中传入的surface参数是真正用来截屏或者录屏的。
Activity
public class Activity2 extends ListActivity {
private MediaProjectionManager mMpMngr;
private static final int REQUEST_MEDIA_PROJECTION = 10086;
boolean isCapture;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
String[] array = { "截屏", "录屏", };
setListAdapter(new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, new ArrayList<String>(Arrays.asList(array))));
//1、首先获取MediaProjectionManager
mMpMngr = (MediaProjectionManager) getApplicationContext().getSystemService(Context.MEDIA_PROJECTION_SERVICE);
}
@Override
protected void onListItemClick(ListView l, View v, int position, long id) {
switch (position) {
case 0:
isCapture = true;
stopService(new Intent(getApplicationContext(), RecordService.class));
startIntent();
break;
case 1:
isCapture = false;
stopService(new Intent(getApplicationContext(), CaptureService.class));
startIntent();
break;
case 2:
break;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
//3、通过onActivityResult()获取授权结果。
if (requestCode == REQUEST_MEDIA_PROJECTION && resultCode == RESULT_OK) {
MyApplication.mResultCode = resultCode;
MyApplication.mResultIntent = data;
MyApplication.mMpmngr = mMpMngr;
startIntent();
}
}
private void startIntent() {
if (MyApplication.mResultIntent != null && MyApplication.mResultCode != 0) {//已授权
if (isCapture) startService(new Intent(getApplicationContext(), CaptureService.class));//开始截屏
else startService(new Intent(getApplicationContext(), RecordService.class));//开始录屏
}
//2、调用MediaProjectionManager.createScreenCaptureIntent()后,会弹出一个dialog询问用户是否授权应用捕捉屏幕
else startActivityForResult(mMpMngr.createScreenCaptureIntent(), REQUEST_MEDIA_PROJECTION);//未授权
}
}
RecordService
/**
* 录屏服务
*/
public class RecordService extends Service {
private static final String TAG = "bqt";
private static final String mVideoPath = Environment.getExternalStorageDirectory().getPath() + "/";
private MediaProjection mMpj;
private VirtualDisplay mVirtualDisplay;
private int windowWidth;
private int windowHeight;
private int screenDensity;
private Surface mSurface;
private MediaCodec mMediaCodec;
private MediaMuxer mMuxer;
private LinearLayout mCaptureLl;
private WindowManager wm;
private boolean isRecordOn;
private AtomicBoolean mIsQuit = new AtomicBoolean(false);
private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
private boolean mMuxerStarted = false;
private int mVideoTrackIndex = -1;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
createEnvironment();
configureMedia();
createFloatView();
}
private void configureMedia() {
//MediaFormat是用来定义视频格式相关信息的。video/avc,这里的avc是高级视频编码Advanced Video Coding
//windowWidth和windowHeight是视频的尺寸,这个尺寸不能超过视频采集时采集到的尺寸,否则会直接crash
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", windowWidth, windowHeight);
//设置码率,通常码率越高,视频越清晰,但是对应的视频也越大,500000代表500k
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2000000);
//设置帧率,通常这个值越高,视频会显得越流畅,一般默认我设置成30,你最低可以设置成24,不要低于这个值,低于24会明显卡顿
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
//COLOR_FormatSurface这里表明数据将是一个graphicbuffer元数据
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
//设置帧间隔,它指的是,关键帧的间隔时间。通常情况下,你设置成多少问题都不大。
//比如你设置成10,那就是10秒一个关键帧,那10秒内的预览都是一个截图。如果你有需求要做视频的预览,那你最好设置成1
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);
try {
//创建一个MediaCodec的实例
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
e.printStackTrace();
}
//定义这个实例的格式,也就是上面我们定义的format,其他参数不用过于关注
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
//这一步非常关键,它设置的是MediaCodec的编码源,也就是说,我要告诉mEncoder,你给我解码哪些流。
//很出乎大家的意料,MediaCodec并没有要求我们传一个流文件进去,而是要求我们指定一个surface,这个surface其实就是用来展示屏幕采集数据的surface
mSurface = mMediaCodec.createInputSurface();
mMediaCodec.start();
}
private void createEnvironment() {
wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics metric = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(metric);
windowWidth = metric.widthPixels;
windowHeight = metric.heightPixels;
screenDensity = metric.densityDpi;
}
@SuppressLint("InflateParams")
private void createFloatView() {
final WindowManager.LayoutParams params = new WindowManager.LayoutParams(WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.RGBA_8888);
params.x = windowWidth;
params.y = windowHeight / 2;
params.gravity = Gravity.LEFT | Gravity.TOP;
params.width = WindowManager.LayoutParams.WRAP_CONTENT;
params.height = WindowManager.LayoutParams.WRAP_CONTENT;
LayoutInflater inflater = LayoutInflater.from(getApplicationContext());
mCaptureLl = (LinearLayout) inflater.inflate(R.layout.float_record, null);
final ImageView mCaptureIv = (ImageView) mCaptureLl.findViewById(R.id.iv_record);
wm.addView(mCaptureLl, params);
mCaptureIv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
isRecordOn = !isRecordOn;
if (isRecordOn) {
mCaptureIv.setImageResource(R.drawable.ic_recording);
Toast.makeText(RecordService.this.getApplicationContext(), "开始录屏", Toast.LENGTH_SHORT).show();
recordStart();
} else {
mCaptureIv.setImageResource(R.drawable.ic_record);
Toast.makeText(RecordService.this.getApplicationContext(), "结束录屏", Toast.LENGTH_SHORT).show();
recordStop();
}
}
});
mCaptureIv.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
params.x = (int) (motionEvent.getRawX() - mCaptureIv.getMeasuredWidth() / 2);
params.y = (int) (motionEvent.getRawY() - mCaptureIv.getMeasuredHeight() / 2 - 20);
wm.updateViewLayout(mCaptureLl, params);
return false;
}
});
}
private void recordStop() {
mIsQuit.set(true);
}
private void recordStart() {
configureMedia();
startVirtual();
new Thread() {
@Override
public void run() {
Log.e(TAG, "start startRecord");
startRecord();
}
}.start();
}
private void startRecord() {
try {
mMuxer = new MediaMuxer(mVideoPath + System.currentTimeMillis() + ".mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
recordVirtualDisplay();
} catch (IOException e) {
e.printStackTrace();
} finally {
release();
}
}
private void startVirtual() {
if (mMpj == null) mMpj = MyApplication.mMpmngr.getMediaProjection(MyApplication.mResultCode, MyApplication.mResultIntent);
mVirtualDisplay = mMpj.createVirtualDisplay("record_screen", windowWidth, windowHeight, screenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mSurface, null, null);
}
private void recordVirtualDisplay() {
while (!mIsQuit.get()) {
int index = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10000);
Log.i(TAG, "dequeue output buffer index=" + index);
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {//后续输出格式变化
resetOutputFormat();
} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {//请求超时
Log.d(TAG, "retrieving buffers time out!");
try {
// wait 10ms
Thread.sleep(10);
} catch (InterruptedException e) {
}
} else if (index >= 0) {//有效输出
if (!mMuxerStarted) {
throw new IllegalStateException("MediaMuxer dose not call addTrack(format) ");
}
encodeToVideoTrack(index);
mMediaCodec.releaseOutputBuffer(index, false);
}
}
}
private void encodeToVideoTrack(int index) {
ByteBuffer encodedData = mMediaCodec.getOutputBuffer(index);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {//是编码需要的特定数据,不是媒体数据
// The codec config data was pulled out and fed to the muxer when we got the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size == 0) {
Log.d(TAG, "info.size == 0, drop it.");
encodedData = null;
} else {
Log.d(TAG, "got buffer, info: size=" + mBufferInfo.size + ", presentationTimeUs=" + mBufferInfo.presentationTimeUs + ", offset="
+ mBufferInfo.offset);
}
if (encodedData != null) {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mVideoTrackIndex, encodedData, mBufferInfo);//写入
Log.i(TAG, "sent " + mBufferInfo.size + " bytes to muxer...");
}
}
private void resetOutputFormat() {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new IllegalStateException("output format already changed!");
}
MediaFormat newFormat = mMediaCodec.getOutputFormat();
Log.i(TAG, "output format changed.\n new format: " + newFormat.toString());
mVideoTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
Log.i(TAG, "started media muxer, videoIndex=" + mVideoTrackIndex);
}
private void release() {
mIsQuit.set(false);
mMuxerStarted = false;
Log.i(TAG, " release() ");
if (mMediaCodec != null) {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
}
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
mVirtualDisplay = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
@Override
public void onDestroy() {
super.onDestroy();
release();
if (mMpj != null) {
mMpj.stop();
}
if (mCaptureLl != null) {
wm.removeView(mCaptureLl);
}
}
}
CaptureService
/**
* 截屏服务
* @author 白乾涛
*/
public class CaptureService extends Service {
private static final String TAG = "bqt";
private static final String mImagePath = Environment.getExternalStorageDirectory().getPath() + "/screenshort/";
private MediaProjection mMpj;
private ImageView mCaptureIv;
private LinearLayout mCaptureLl;
private ImageReader mImageReader;
private String mImageName;
private int screenDensity;
private int windowWidth;
private int windowHeight;
private VirtualDisplay mVirtualDisplay;
private WindowManager wm;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
createEnvironment();
createFloatView();
}
private void createEnvironment() {
wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics metric = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(metric);
windowWidth = metric.widthPixels;
windowHeight = metric.heightPixels;
screenDensity = metric.densityDpi;
mImageReader = ImageReader.newInstance(windowWidth, windowHeight, 0x1, 2);
}
@SuppressLint("InflateParams")
private void createFloatView() {
final WindowManager.LayoutParams params = new WindowManager.LayoutParams(WindowManager.LayoutParams.TYPE_PHONE,
WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, PixelFormat.RGBA_8888);
params.x = 0;
params.y = windowHeight / 2;
params.gravity = Gravity.LEFT | Gravity.TOP;
params.width = WindowManager.LayoutParams.WRAP_CONTENT;
params.height = WindowManager.LayoutParams.WRAP_CONTENT;
LayoutInflater inflater = LayoutInflater.from(getApplicationContext());
mCaptureLl = (LinearLayout) inflater.inflate(R.layout.float_capture, null);
mCaptureIv = (ImageView) mCaptureLl.findViewById(R.id.iv_capture);
wm.addView(mCaptureLl, params);
mCaptureIv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mCaptureIv.setVisibility(View.INVISIBLE);
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e(TAG, "start startVirtual");
startVirtual();
}
}, 500);
// Handler handler1 = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e(TAG, "start startCapture");
startCapture();
}
}, 1000);
// Handler handler2 = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.e(TAG, "start stopVirtual");
mCaptureIv.setVisibility(View.VISIBLE);
stopVirtual();
}
}, 1500);
}
});
mCaptureIv.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
params.x = (int) (motionEvent.getRawX() - mCaptureIv.getMeasuredWidth() / 2);
params.y = (int) (motionEvent.getRawY() - mCaptureIv.getMeasuredHeight() / 2 - 20);
wm.updateViewLayout(mCaptureLl, params);
return false;
}
});
}
private void stopVirtual() {
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
mVirtualDisplay = null;
}
}
private void startCapture() {
mImageName = System.currentTimeMillis() + ".png";
Log.e(TAG, "image name is : " + mImageName);
Image image = mImageReader.acquireLatestImage();
int width = image.getWidth();
int height = image.getHeight();
final Image.Plane[] planes = image.getPlanes();
final ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * width;
Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
image.close();
if (bitmap != null) {
Log.e(TAG, "bitmap create success ");
try {
File fileFolder = new File(mImagePath);
if (!fileFolder.exists()) fileFolder.mkdirs();
File file = new File(mImagePath, mImageName);
if (!file.exists()) {
Log.e(TAG, "file create success ");
file.createNewFile();
}
FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, out);
out.flush();
out.close();
Log.e(TAG, "file save success ");
Toast.makeText(this.getApplicationContext(), "截图成功", Toast.LENGTH_SHORT).show();
} catch (IOException e) {
Log.e(TAG, e.toString());
e.printStackTrace();
}
}
}
private void startVirtual() {
if (mMpj == null) mMpj = MyApplication.mMpmngr.getMediaProjection(MyApplication.mResultCode, MyApplication.mResultIntent);
mVirtualDisplay = mMpj.createVirtualDisplay("capture_screen", windowWidth, windowHeight, screenDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mImageReader.getSurface(), null, null);
}
@Override
public void onDestroy() {
super.onDestroy();
if (mCaptureLl != null) {
wm.removeView(mCaptureLl);
}
if (mMpj != null) {
mMpj.stop();
mMpj = null;
}
}
}