1. 程式人生 > 其它 >一個簡易的錄屏demo

一個簡易的錄屏demo

MyScreenRecord.cpp

//#define LOG_NODEBUG 0
#define LOG_TAG "myrecord"

#include <signal.h>
#include <string.h>
#include <assert.h>

#include <utils/Errors.h>
#include <ui/DisplayState.h>
#include <ui/DisplayConfig.h>
#include <ui/PhysicalDisplayId.h>
#include <media/stagefright/MediaCodec.h>
#include 
<media/stagefright/foundation/AMessage.h> #include <media/stagefright/foundation/ALooper.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/MediaCodecConstants.h> #include <gui/SurfaceComposerClient.h> #include <media/openmax/OMX_IVCommon.h> #include
<media/NdkMediaMuxer.h> #include <media/NdkMediaCodec.h> #include <media/MediaCodecBuffer.h> #include <media/stagefright/foundation/ABuffer.h> #include <gui/Surface.h> #include <binder/ProcessState.h> #include <mediadrm/ICrypto.h> #include <media/NdkMediaFormatPriv.h> using
namespace android; namespace ui = android::ui; static struct sigaction gOrigSigactionINT; static struct sigaction gOrigSigactionHUP; static PhysicalDisplayId gPhysicalDisplayId; static uint32_t gVideoWidth = 0; static uint32_t gVideoHeight = 0; static uint32_t gBitRate = 20000000; static uint32_t gBframes = 0; static const char* kMimeTypeAvc = "video/avc"; static const char* kMimeTypeApplicationOctetstream = "application/octet-stream"; static volatile bool gStopRequested = false; static void signalCatcher(int signum) { gStopRequested = true; switch(signum){ case SIGINT: case SIGHUP: sigaction(SIGINT, &gOrigSigactionINT, NULL); sigaction(SIGHUP, &gOrigSigactionHUP, NULL); break; default: abort(); break; } } static status_t configureSignals() { struct sigaction act; memset(&act, 0, sizeof(act)); act.sa_handler = signalCatcher; sigaction(SIGINT, &act, &gOrigSigactionINT); sigaction(SIGHUP, &act, &gOrigSigactionHUP); signal(SIGPIPE,SIG_IGN); return NO_ERROR; } static status_t prepareEncoder(float displayFps, sp<MediaCodec> *pCodec, sp<IGraphicBufferProducer> *pBufferProducer) { sp<AMessage> format = new AMessage; format->setInt32(KEY_WIDTH, gVideoWidth); format->setInt32(KEY_HEIGHT, gVideoHeight); format->setString(KEY_MIME, kMimeTypeAvc); format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque); format->setInt32(KEY_BIT_RATE, gBitRate); format->setInt32(KEY_FRAME_RATE, displayFps); format->setInt32(KEY_I_FRAME_INTERVAL, 10); format->setInt32(KEY_MAX_B_FRAMES, 10); if(gBframes > 0) { format->setInt32(KEY_PROFILE, AVCProfileMain); format->setInt32(KEY_LEVEL, AVCLevel41); } sp<android::ALooper> looper = new android::ALooper; looper->setName("screenrecord_looper"); looper->start(); sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true); if(codec == NULL) { fprintf(stderr, "ERROR: unable to create %s codec instance\n", kMimeTypeAvc); return UNKNOWN_ERROR; } status_t err = codec->configure(format, NULL, NULL, MediaCodec::CONFIGURE_FLAG_ENCODE); CHECK_EQ(err, OK); sp<IGraphicBufferProducer> bufferProducer; err = codec->createInputSurface(&bufferProducer); CHECK_EQ(err, OK); err = codec->start(); CHECK_EQ(err, OK); *pCodec = codec; *pBufferProducer = bufferProducer; return OK; } static status_t setDisplayProjection(SurfaceComposerClient::Transaction& t, const sp<IBinder>& dpy, const ui::DisplayState& displayState) { const ui::Size& viewport = displayState.viewport; Rect layerStackRect(viewport); float displayAspect = viewport.getHeight() / static_cast<float>(viewport.getWidth()); uint32_t videoWidth, videoHeight; uint32_t outWidth, outHeight; videoWidth = gVideoWidth; videoHeight = gVideoHeight; if (videoHeight > (uint32_t)(videoWidth * displayAspect)) { outWidth = videoWidth; outHeight = (uint32_t)(videoWidth * displayAspect); } else{ outHeight = videoHeight; outWidth = (uint32_t)(videoHeight / displayAspect); } uint32_t offX, offY; offX = (videoWidth - outWidth) / 2; offY = (videoHeight - outHeight) / 2; Rect displayRect(offX, offY, offX + outWidth, offY + outHeight); t.setDisplayProjection(dpy, ui::ROTATION_0, layerStackRect, displayRect); return NO_ERROR; } static status_t prepareVirtualDisplay(const ui::DisplayState& displayState, const sp<IGraphicBufferProducer>& bufferProducer, sp<IBinder> *pDisplayHandle) { sp<IBinder> dpy = SurfaceComposerClient::createDisplay(String8("ScreenRecorder"), false); SurfaceComposerClient::Transaction t; t.setDisplaySurface(dpy, bufferProducer); setDisplayProjection(t, dpy, displayState); t.setDisplayLayerStack(dpy, displayState.layerStack); t.apply(); *pDisplayHandle = dpy; return NO_ERROR; } static status_t runEncoder(const sp<MediaCodec>& encoder, AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder> &display, const sp<IBinder>& virtualDpy, ui::Rotation orientation) { static int kTimeout = 250000; Vector<sp<MediaCodecBuffer>> buffers; status_t err = encoder->getOutputBuffers(&buffers); CHECK_EQ(err, OK); ssize_t trackIdx = -1; ssize_t metaTrackIdx = -1; Vector<int64_t> timestamps; while(!gStopRequested){ size_t bufIndex, offset, size; int64_t ptsUsec; uint32_t flags; err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec, &flags, kTimeout); switch(err) { case NO_ERROR: { if((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) { ALOGD("get codec config buffer %d bytes", size); if(muxer != NULL) size = 0; } if(size != 0){ ALOGD("get codec output buffer %d bytes", size); { ui::DisplayState displayState; err = SurfaceComposerClient::getDisplayState(display, &displayState); SurfaceComposerClient::Transaction t; setDisplayProjection(t, virtualDpy, displayState); t.apply(); orientation = displayState.orientation; } if(ptsUsec == 0) ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000; sp<ABuffer> buffer = new ABuffer(buffers[bufIndex]->data(), buffers[bufIndex]->size()); AMediaCodecBufferInfo bufferInfo = {0, static_cast<int32_t>(buffer->size()), ptsUsec, flags}; err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo); if(err != NO_ERROR) { ALOGD("Failed writing data to muxer (err=%d)", err); return err; } timestamps.add(ptsUsec); } err = encoder->releaseOutputBuffer(bufIndex); if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) { gStopRequested = true; } break; } case -EAGAIN: ALOGD("Got -EAGAIN, looping"); break; case INFO_FORMAT_CHANGED: { ALOGD("Encoder format changed"); sp<AMessage> newFormat; encoder->getOutputFormat(&newFormat); AMediaFormat *ndkFormat = AMediaFormat_fromMsg(&newFormat); trackIdx = AMediaMuxer_addTrack(muxer, ndkFormat); ALOGD("trackIdx = %d", trackIdx); AMediaFormat *metaFormat = AMediaFormat_new(); AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream); metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat); AMediaFormat_delete(metaFormat); ALOGD("starting muxer"); err = AMediaMuxer_start(muxer); break; } case android::INFO_OUTPUT_BUFFERS_CHANGED: ALOGD("dequeueOutputBuffer returned INFO_OUTPUT_BUFFERS_CHANGED"); err = encoder->getOutputBuffers(&buffers); break; case INVALID_OPERATION: ALOGD("dequeueOutputBuffer returned INVALID_OPERATION"); return err; default: ALOGD("GOT other result"); return err; } } ALOGD("Encoder stopping (req=%d)", gStopRequested); return OK; } status_t recordScreen(const char *fileName) { configureSignals(); sp<ProcessState> self = ProcessState::self(); self->startThreadPool(); sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(gPhysicalDisplayId); if(display == NULL){ fprintf(stderr, "ERROR: no display\n"); return NAME_NOT_FOUND; } ui::DisplayState displayState; status_t err = SurfaceComposerClient::getDisplayState(display, &displayState); if(err != NO_ERROR) { fprintf(stderr, "ERROR: unable to get display state\n"); return err; } DisplayConfig displayConfig; err = SurfaceComposerClient::getActiveDisplayConfig(display, &displayConfig); if(err != NO_ERROR){ fprintf(stderr, "ERROR: unable to get displau config\n"); return err; } const ui::Size& viewport = displayState.viewport; // 要注意以下,viewport獲得的寬高不能是奇數 gVideoWidth = viewport.getWidth(); gVideoHeight = viewport.getHeight(); ALOGD("gVideoWidth = %d, gVideoHeight = %d", gVideoWidth, gVideoHeight); sp<MediaCodec> encoder; sp<IGraphicBufferProducer> encoderInputSurface; prepareEncoder(displayConfig.refreshRate, &encoder, &encoderInputSurface); sp<IGraphicBufferProducer> bufferProducer = encoderInputSurface; // Configure virtual display. sp<IBinder> dpy; err = prepareVirtualDisplay(displayState, bufferProducer, &dpy); CHECK_EQ(err, OK); AMediaMuxer *muxer = NULL; err = unlink(fileName); if(err != 0 && errno != ENOENT) { fprintf(stderr, "ERROR: couldn't remove existing file\n"); abort(); } int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); if(fd < 0) { fprintf(stderr, "ERROR: couldn't open file\n"); abort(); } muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4); close(fd); err = runEncoder(encoder, muxer, NULL, display, dpy, displayState.orientation); encoderInputSurface = NULL; SurfaceComposerClient::destroyDisplay(dpy); encoder->stop(); err = AMediaMuxer_stop(muxer); encoder->release(); return OK; } int main(int argc, char** argv) { std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId(); if(!displayId) { fprintf(stderr, "Failed to get token for internal display\n"); return 1; } gPhysicalDisplayId = *displayId; const char* fileName = argv[1]; status_t err = recordScreen(fileName); return OK; }

Android.bp

cc_binary{
    name: "myscreenrecord",
    
    srcs: [
        "myscreenrecord.cpp",
    ],
    
    shared_libs:[
        "libstagefright",
        "libmedia",
        "libmediandk",
        "libmedia_omx",
        "libutils",
        "libbinder",
        "libstagefright_foundation",
        "libui",
        "libgui",
        "libcutils",
        "liblog",  
    ],

    header_libs: [
        "libmediadrm_headers",
        "libmediametrics_headers",
    ],
    
    include_dirs: [
        "frameworks/av/media/libstagefright",
        "frameworks/av/media/libstagefright/include",
        "frameworks/native/include/media/openmax",
    ],    
}