android CMakeLists C语言 播放animated webp

根据公司需要,最近在研究webp,android原生已经支持webp了,可是好像不支持动图,就想找c语言实现的开源项目,发现fresco是支持animated webp的就想看一下,结果没有发现源码,又找到了Android-WebP,虽然是c语言支持库,可实现是c++,c语言是可以调用c++代码的,不过我想改成c语言的,同时想删除gif的代码,就有了这篇博文,

先把framesSquencce\src\main\cpp文件夹下的webp文件夹复制到自己项目的cpp文件夹下,修改CMakeLists.txt文件

cmake_minimum_required(VERSION 3.4.1)
add_library(
             native-lib

             SHARED

             src/main/cpp/native-lib.cpp
             src/main/cpp/webp_stream.c
             src/main/cpp/webp_frame_sequence.c

             src/main/cpp/webp/dec/alpha.c
             src/main/cpp/webp/dec/buffer.c
             src/main/cpp/webp/dec/frame.c
             src/main/cpp/webp/dec/idec.c
             src/main/cpp/webp/dec/io.c
             src/main/cpp/webp/dec/quant.c
             src/main/cpp/webp/dec/tree.c
             src/main/cpp/webp/dec/vp8.c
             src/main/cpp/webp/dec/vp8l.c
             src/main/cpp/webp/dec/webp.c

             src/main/cpp/webp/demux/demux.c

            src/main/cpp/webp/dsp/alpha_processing.c
            src/main/cpp/webp/dsp/alpha_processing_sse2.c
            src/main/cpp/webp/dsp/cpu-features.c
            src/main/cpp/webp/dsp/cpu.c
            src/main/cpp/webp/dsp/dec.c
            src/main/cpp/webp/dsp/dec_clip_tables.c
            src/main/cpp/webp/dsp/dec_mips32.c
            src/main/cpp/webp/dsp/dec_neon.c
            src/main/cpp/webp/dsp/dec_sse2.c
            src/main/cpp/webp/dsp/lossless.c
            src/main/cpp/webp/dsp/lossless_mips32.c
            src/main/cpp/webp/dsp/lossless_neon.c
            src/main/cpp/webp/dsp/lossless_sse2.c
            src/main/cpp/webp/dsp/upsampling.c
            src/main/cpp/webp/dsp/upsampling_neon.c
            src/main/cpp/webp/dsp/upsampling_sse2.c
            src/main/cpp/webp/dsp/yuv.c
            src/main/cpp/webp/dsp/yuv_mips32.c
            src/main/cpp/webp/dsp/yuv_sse2.c

            src/main/cpp/webp/utils/bit_reader.c
            src/main/cpp/webp/utils/color_cache.c
            src/main/cpp/webp/utils/filters.c
            src/main/cpp/webp/utils/huffman.c
            src/main/cpp/webp/utils/quant_levels_dec.c
            src/main/cpp/webp/utils/random.c
            src/main/cpp/webp/utils/rescaler.c
            src/main/cpp/webp/utils/thread.c
            src/main/cpp/webp/utils/utils.c
              )


find_library( 
              log-lib
              log )


target_link_libraries( 
                       native-lib
                       jnigraphics                   
                       ${log-lib} )

创建webp_frame_sequence.c和webp_stream.c和webp_types.h文件,再把utils文件夹和Color.h文件复制到自己项目的cpp文件夹下

在webp_types.h内创建webp_stream,webp_frame_sequence和webp_frame_sequence_state三个结构体

typedef struct struct_webp_stream {

    char* mPeekBuffer;
    size_t mPeekSize;
    size_t mPeekOffset;

    uint8_t* mBuffer;
    size_t mRemaining;


    size_t (*peek)(struct struct_webp_stream *stream, void* buffer, size_t size);
    size_t (*read)(struct struct_webp_stream *stream,void* buffer, size_t size);

}webp_stream;
webp_stream* webp_stream_create(void* buffer, size_t size);
void webp_stream_free(webp_stream* stream);


typedef struct struct_webp_frame_sequence {
    WebPData mData;
    WebPDemuxer* mDemux;
    int mLoopCount;
    uint32_t mFormatFlags;
    bool* mIsKeyFrame;


    int (*getWidth)(struct struct_webp_frame_sequence *frame_sequence);

    int (*getHeight)(struct struct_webp_frame_sequence *frame_sequence);
    bool (*isOpaque)(struct struct_webp_frame_sequence *frame_sequence);
    int (*getFrameCount)(struct struct_webp_frame_sequence *frame_sequence);

    int (*getDefaultLoopCount)(struct struct_webp_frame_sequence *frame_sequence);

    WebPDemuxer* (*getDemuxer)(struct struct_webp_frame_sequence *frame_sequence);

    bool (*isKeyFrame)(struct struct_webp_frame_sequence *frame_sequence,size_t frameNr);


}webp_frame_sequence;
webp_frame_sequence* webp_frame_sequence_create(webp_stream* stream);
void webp_frame_sequence_free(webp_frame_sequence* frame_sequence);

typedef struct struct_webp_frame_sequence_state {
    webp_frame_sequence *mFrameSequence;
    WebPDecoderConfig mDecoderConfig;
    Color8888* mPreservedBuffer;

    long (*drawFrame)(struct struct_webp_frame_sequence_state *webp_frame_sequence_state,int frameNr,
                           Color8888* outputPtr, int outputPixelStride, int previousFrameNr);
}webp_frame_sequence_state;
webp_frame_sequence_state* webp_frame_sequence_state_create(webp_frame_sequence* frame_sequence);
void webp_frame_sequence_state_free(webp_frame_sequence_state* frame_sequence_state);

webp_stream.c代码

static size_t doRead(webp_stream* stream,void* buffer, size_t size) {
    size = min(size, stream->mRemaining);
    memcpy(buffer, stream->mBuffer, size);
    stream->mBuffer += size;
    stream->mRemaining -= size;
    return size;
}
static size_t peek(webp_stream* stream,void* buffer, size_t size) {
    size_t peek_remaining = stream->mPeekSize - stream->mPeekOffset;
    if (size > peek_remaining) {
        char* old_peek = stream->mPeekBuffer;
        stream->mPeekBuffer = malloc(sizeof(char)*size);
        if (old_peek) {
            memcpy(stream->mPeekBuffer, old_peek + stream->mPeekOffset, peek_remaining);
            free(old_peek);
        }
        size_t read = doRead(stream,stream->mPeekBuffer + stream->mPeekOffset, size - peek_remaining);
        stream->mPeekOffset = 0;
        stream->mPeekSize = peek_remaining + read;
    }
    size = min(size, stream->mPeekSize - stream->mPeekOffset);
    memcpy(buffer, stream->mPeekBuffer + stream->mPeekOffset, size);
    return size;
}

static size_t read(webp_stream* stream,void* buffer, size_t size) {
    size_t bytes_read = 0;
    size_t peek_remaining = stream->mPeekSize - stream->mPeekOffset;
    if (peek_remaining) {
        bytes_read = min(size, peek_remaining);
        memcpy(buffer, stream->mPeekBuffer + stream->mPeekOffset, bytes_read);
        stream->mPeekOffset += bytes_read;
        if (stream->mPeekOffset == stream->mPeekSize) {
            free(stream->mPeekBuffer) ;
            stream->mPeekBuffer = 0;
            stream->mPeekOffset = 0;
            stream->mPeekSize = 0;
        }
        size -= bytes_read;
        buffer = ((char*) buffer) + bytes_read;
    }
    if (size) {
        bytes_read += doRead(stream,buffer, size);
    }
    return bytes_read;
}


webp_stream* webp_stream_create(void* buffer, size_t size){
    webp_stream* stream = malloc(sizeof(webp_stream));
    stream->mPeekBuffer = 0;
    stream->mPeekOffset = 0;
    stream->mPeekSize = 0;
    stream->mBuffer = buffer;
    stream->mRemaining = size;

    stream->peek = peek;
    stream->read = read;
    return stream;
}
void webp_stream_free(webp_stream* stream){
    free(stream->mPeekBuffer);
    free(stream);
}

webp_frame_sequence.c代码

int getFrameCount(webp_frame_sequence *frame_sequence) {
    return WebPDemuxGetI(frame_sequence->mDemux, WEBP_FF_FRAME_COUNT);
}

int getWidth(webp_frame_sequence *frame_sequence) {
    return WebPDemuxGetI(frame_sequence->mDemux, WEBP_FF_CANVAS_WIDTH);
}

int getHeight(webp_frame_sequence *frame_sequence) {
    return WebPDemuxGetI(frame_sequence->mDemux, WEBP_FF_CANVAS_HEIGHT);
}

bool isOpaque(webp_frame_sequence *frame_sequence) {
    return !(frame_sequence->mFormatFlags & ALPHA_FLAG);
}


int getDefaultLoopCount(webp_frame_sequence *frame_sequence) {
    return frame_sequence->mLoopCount;
}

WebPDemuxer *getDemuxer(webp_frame_sequence *frame_sequence) {
    return frame_sequence->mDemux;
}

bool isKeyFrame(webp_frame_sequence *frame_sequence, size_t frameNr) {
    return frame_sequence->mIsKeyFrame[frameNr];
}


static bool isWebP(void *header, int header_size) {
    const uint8_t *const header_str = (const uint8_t *) header;
    return (header_size >= RIFF_HEADER_SIZE) &&
           !memcmp("RIFF", header_str, 4) &&
           !memcmp("WEBP", header_str + 8, 4);
}

static bool isFullFrame(const WebPIterator *frame, int canvasWidth, int canvasHeight) {
    return (frame->width == canvasWidth && frame->height == canvasHeight);
}


static void constructDependencyChain(webp_frame_sequence *frame_sequence) {
    const size_t frameCount = (const size_t) getFrameCount(frame_sequence);
    frame_sequence->mIsKeyFrame = malloc(sizeof(bool) * frameCount);
    const int canvasWidth = getWidth(frame_sequence);
    const int canvasHeight = getHeight(frame_sequence);

    WebPIterator prev;
    WebPIterator curr;

    // Note: WebPDemuxGetFrame() uses base-1 counting.
    int ok = WebPDemuxGetFrame(frame_sequence->mDemux, 1, &curr);
    ALOG_ASSERT(ok, "Could not retrieve frame# 0");
    frame_sequence->mIsKeyFrame[0] = true;  // 0th frame is always a key frame.
    for (size_t i = 1; i < frameCount; i++) {
        prev = curr;
        ok = WebPDemuxGetFrame(frame_sequence->mDemux, i + 1, &curr);  // Get ith frame.
        ALOG_ASSERT(ok, "Could not retrieve frame# %d", i);

        if ((!curr.has_alpha || curr.blend_method == WEBP_MUX_NO_BLEND) &&
            isFullFrame(&curr, canvasWidth, canvasHeight)) {
            frame_sequence->mIsKeyFrame[i] = true;
        } else {
            frame_sequence->mIsKeyFrame[i] = (prev.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) &&
                                             (isFullFrame(&prev, canvasWidth, canvasHeight) ||
                                              frame_sequence->mIsKeyFrame[i - 1]);
        }
    }
    WebPDemuxReleaseIterator(&prev);
    WebPDemuxReleaseIterator(&curr);

#if WEBP_DEBUG
    ALOGD("Dependency chain:");
    for (size_t i = 0; i < frameCount; i++) {
        ALOGD("Frame# %zu: %s", i, mIsKeyFrame[i] ? "Key frame" : "NOT a key frame");
    }
#endif
}


webp_frame_sequence *webp_frame_sequence_create(webp_stream *stream) {
    int headerSize = RIFF_HEADER_SIZE;
    char header[headerSize];
    headerSize = (int) stream->peek(stream, header, (size_t) headerSize);
    if (!isWebP(header, headerSize)) {
        return NULL;
    }
    webp_frame_sequence *frame_sequence = malloc(sizeof(webp_frame_sequence));
    uint8_t riff_header[RIFF_HEADER_SIZE];
    if (stream->read(stream, riff_header, RIFF_HEADER_SIZE) != RIFF_HEADER_SIZE) {
        ALOGE("WebP header load failed");
        return NULL;
    }
    frame_sequence->mData.size = CHUNK_HEADER_SIZE + GetLE32(riff_header + TAG_SIZE);
    frame_sequence->mData.bytes = malloc(sizeof(uint8_t) * frame_sequence->mData.size);
    memcpy((void *) frame_sequence->mData.bytes, riff_header, RIFF_HEADER_SIZE);

    // Read rest of the bytes.
    void *remaining_bytes = (void *) (frame_sequence->mData.bytes + RIFF_HEADER_SIZE);
    size_t remaining_size = frame_sequence->mData.size - RIFF_HEADER_SIZE;
    if (stream->read(stream, remaining_bytes, remaining_size) != remaining_size) {
        ALOGE("WebP full load failed");
        return NULL;
    }

    frame_sequence->mDemux = WebPDemux(&frame_sequence->mData);
    if (!frame_sequence->mDemux) {
        ALOGE("Parsing of WebP container file failed");
        return NULL;
    }
    frame_sequence->mLoopCount = WebPDemuxGetI(frame_sequence->mDemux, WEBP_FF_LOOP_COUNT);
    frame_sequence->mFormatFlags = WebPDemuxGetI(frame_sequence->mDemux, WEBP_FF_FORMAT_FLAGS);
#if WEBP_DEBUG
    ALOGD("FrameSequence_webp created with size = %d x %d, number of frames = %d, flags = 0x%X",
          getWidth(), getHeight(), getFrameCount(), mFormatFlags);
#endif
    constructDependencyChain(frame_sequence);

    frame_sequence->getWidth = getWidth;
    frame_sequence->getHeight = getHeight;
    frame_sequence->isOpaque = isOpaque;
    frame_sequence->getFrameCount = getFrameCount;
    frame_sequence->getDefaultLoopCount = getDefaultLoopCount;
    frame_sequence->getDemuxer = getDemuxer;
    frame_sequence->isKeyFrame = isKeyFrame;

    return frame_sequence;

}

void webp_frame_sequence_free(webp_frame_sequence *frame_sequence) {
    WebPDemuxDelete(frame_sequence->mDemux);
    free(frame_sequence->mIsKeyFrame);
    free((void *) frame_sequence->mData.bytes);
    free(frame_sequence);
}

static void clearLine(Color8888 *dst, int width) {
    memset(dst, 0, width * sizeof(*dst));  // Note: Assumes TRANSPARENT == 0x0.
}

static void copyFrame(const Color8888 *src, int srcStride, Color8888 *dst, int dstStride,
                      int width, int height) {
    for (int y = 0; y < height; y++) {
        memcpy(dst, src, width * sizeof(*dst));
        src += srcStride;
        dst += dstStride;
    }
}

static bool checkIfCover(const WebPIterator *target, const WebPIterator *covered) {
    const int covered_x_max = covered->x_offset + covered->width;
    const int target_x_max = target->x_offset + target->width;
    const int covered_y_max = covered->y_offset + covered->height;
    const int target_y_max = target->y_offset + target->height;
    return target->x_offset <= covered->x_offset
           && covered_x_max <= target_x_max
           && target->y_offset <= covered->y_offset
           && covered_y_max <= target_y_max;
}

void initializeFrame(webp_frame_sequence *frame_sequence, const WebPIterator *currIter,
                     Color8888 *currBuffer,
                     int currStride, const WebPIterator *prevIter, const Color8888 *prevBuffer,
                     int prevStride) {
    const int canvasWidth = getWidth(frame_sequence);
    const int canvasHeight = getHeight(frame_sequence);
    const bool currFrameIsKeyFrame = isKeyFrame(frame_sequence, (size_t) (currIter->frame_num - 1));

    if (currFrameIsKeyFrame) {  // Clear canvas.
        for (int y = 0; y < canvasHeight; y++) {
            Color8888 *dst = currBuffer + y * currStride;
            clearLine(dst, canvasWidth);
        }
    } else {
// Preserve previous frame as starting state of current frame.
        copyFrame(prevBuffer, prevStride, currBuffer, currStride, canvasWidth, canvasHeight);

// Dispose previous frame rectangle to Background if needed.
        bool prevFrameCompletelyCovered =
                (!currIter->has_alpha || currIter->blend_method == WEBP_MUX_NO_BLEND) &&
                checkIfCover(&currIter, &prevIter);
        if ((prevIter->dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) &&
            !prevFrameCompletelyCovered) {
            Color8888 *dst = currBuffer + prevIter->x_offset + prevIter->y_offset * currStride;
            for (int j = 0; j < prevIter->height; j++) {
                clearLine(dst, prevIter->width);
                dst += currStride;
            }
        }
    }
}

static bool willBeCleared(const WebPIterator *iter) {
    return iter->dispose_method == WEBP_MUX_DISPOSE_BACKGROUND;
}

static bool FrameContainsPixel(const WebPIterator *frame, int x, int y) {
    const int left = frame->x_offset;
    const int right = left + frame->width;
    const int top = frame->y_offset;
    const int bottom = top + frame->height;
    return x >= left && x < right && y >= top && y < bottom;
}

bool decodeFrame(webp_frame_sequence_state *frame_sequence_state, const WebPIterator *currIter,
                 Color8888 *currBuffer,
                 int currStride, const WebPIterator *prevIter, const Color8888 *prevBuffer,
                 int prevStride) {
    Color8888 *dst = currBuffer + currIter->x_offset + currIter->y_offset * currStride;
    frame_sequence_state->mDecoderConfig.output.u.RGBA.rgba = (uint8_t *) dst;
    frame_sequence_state->mDecoderConfig.output.u.RGBA.stride = currStride * 4;
    frame_sequence_state->mDecoderConfig.output.u.RGBA.size =
            frame_sequence_state->mDecoderConfig.output.u.RGBA.stride * currIter->height;

    const WebPData currFrame = currIter->fragment;
    if (WebPDecode(currFrame.bytes, currFrame.size, &frame_sequence_state->mDecoderConfig) !=
        VP8_STATUS_OK) {
        return false;
    }

    const int canvasWidth = getWidth(frame_sequence_state->mFrameSequence);
    const int canvasHeight = getHeight(frame_sequence_state->mFrameSequence);
    const bool currFrameIsKeyFrame = isKeyFrame(frame_sequence_state->mFrameSequence,
                                                currIter->frame_num - 1);

    if (currIter->blend_method == WEBP_MUX_BLEND && !currFrameIsKeyFrame) {
        if (prevIter->dispose_method == WEBP_MUX_DISPOSE_NONE) {
            for (int y = 0; y < currIter->height; y++) {
                const int canvasY = currIter->y_offset + y;
                for (int x = 0; x < currIter->width; x++) {
                    const int canvasX = currIter->x_offset + x;
                    int index = canvasY * currStride + canvasX;
                    Color8888 currPixel = (Color8888) currBuffer[index];
                    // FIXME: Use alpha-blending when alpha is between 0 and 255.
                    if (!(currPixel & COLOR_8888_ALPHA_MASK)) {
                        const Color8888 prevPixel = prevBuffer[canvasY * prevStride + canvasX];
                        currBuffer[index] = prevPixel;
                    }
                }
            }
        } else {

            for (int y = 0; y < currIter->height; y++) {
                const int canvasY = currIter->y_offset + y;
                for (int x = 0; x < currIter->width; x++) {
                    const int canvasX = currIter->x_offset + x;
                    int index = canvasY * currStride + canvasX;
                    Color8888 currPixel = currBuffer[index];
                    // FIXME: Use alpha-blending when alpha is between 0 and 255.
                    if (!(currPixel & COLOR_8888_ALPHA_MASK)
                        && !FrameContainsPixel(prevIter, canvasX, canvasY)) {
                        const Color8888 prevPixel = prevBuffer[canvasY * prevStride + canvasX];
                        currBuffer[index] = prevPixel;
                    }
                }
            }
        }
    }
    return true;
}


long drawFrame(webp_frame_sequence_state *frame_sequence_state, int frameNr,
               Color8888 *outputPtr, int outputPixelStride, int previousFrameNr) {

    webp_frame_sequence *mFrameSequence = frame_sequence_state->mFrameSequence;
    WebPDemuxer *demux = getDemuxer(mFrameSequence);
    ALOG_ASSERT(demux, "Cannot drawFrame, mDemux is NULL");

#if WEBP_DEBUG
    ALOGD("  drawFrame called for frame# %d, previous frame# %d", frameNr, previousFrameNr);
#endif

    const int canvasWidth = getWidth(mFrameSequence);
    const int canvasHeight = getHeight(mFrameSequence);

    // Find the first frame to be decoded.
    int start = max(previousFrameNr + 1, 0);
    int earliestRequired = frameNr;
    while (earliestRequired > start) {
        if (isKeyFrame(mFrameSequence, (size_t) earliestRequired)) {
            start = earliestRequired;
            break;
        }
        earliestRequired--;
    }

    WebPIterator currIter;
    WebPIterator prevIter;
    int ok = WebPDemuxGetFrame(demux, start, &currIter);  // Get frame number 'start - 1'.
    ALOG_ASSERT(ok, "Could not retrieve frame# %d", start - 1);

    // Use preserve buffer only if needed.
    Color8888 *prevBuffer = (frameNr == 0) ? outputPtr : frame_sequence_state->mPreservedBuffer;
    int prevStride = (frameNr == 0) ? outputPixelStride : canvasWidth;
    Color8888 *currBuffer = outputPtr;
    int currStride = outputPixelStride;

    for (int i = start; i <= frameNr; i++) {
        prevIter = currIter;
        ok = WebPDemuxGetFrame(demux, i + 1, &currIter);  // Get ith frame.
        ALOG_ASSERT(ok, "Could not retrieve frame# %d", i);
#if WEBP_DEBUG
        ALOGD("      producing frame %d (has_alpha = %d, dispose = %s, blend = %s, duration = %d)",
              i, currIter.has_alpha,
              (currIter.dispose_method == WEBP_MUX_DISPOSE_NONE) ? "none" : "background",
              (currIter.blend_method == WEBP_MUX_BLEND) ? "yes" : "no", currIter.duration);
#endif
        // We swap the prev/curr buffers as we go.
        Color8888 *tmpBuffer = prevBuffer;
        prevBuffer = currBuffer;
        currBuffer = tmpBuffer;

        int tmpStride = prevStride;
        prevStride = currStride;
        currStride = tmpStride;

#if WEBP_DEBUG
        ALOGD("            prev = %p, curr = %p, out = %p, tmp = %p",
              prevBuffer, currBuffer, outputPtr, mPreservedBuffer);
#endif
        // Process this frame.
        initializeFrame(mFrameSequence, &currIter, currBuffer, currStride, &prevIter, prevBuffer,
                        prevStride);

        if (i == frameNr || !willBeCleared(&currIter)) {
            if (!decodeFrame(frame_sequence_state,&currIter, currBuffer, currStride, &prevIter, prevBuffer, prevStride)) {
                ALOGE("Error decoding frame# %d", i);
                return -1;
            }
        }
    }

    if (outputPtr != currBuffer) {
        copyFrame(currBuffer, currStride, outputPtr, outputPixelStride, canvasWidth, canvasHeight);
    }

    // Return last frame's delay.
    const int frameCount = getFrameCount(mFrameSequence);
    const int lastFrame = (frameNr + frameCount - 1) % frameCount;
    ok = WebPDemuxGetFrame(demux, lastFrame, &currIter);
    ALOG_ASSERT(ok, "Could not retrieve frame# %d", lastFrame - 1);
    const int lastFrameDelay = currIter.duration;

    WebPDemuxReleaseIterator(&currIter);
    WebPDemuxReleaseIterator(&prevIter);

    return lastFrameDelay;
}

webp_frame_sequence_state *webp_frame_sequence_state_create(webp_frame_sequence *frame_sequence) {
    webp_frame_sequence_state *frame_sequence_state = malloc(sizeof(webp_frame_sequence_state));
    frame_sequence_state->mFrameSequence = frame_sequence;
    WebPInitDecoderConfig(&frame_sequence_state->mDecoderConfig);
    frame_sequence_state->mDecoderConfig.output.is_external_memory = 1;
    frame_sequence_state->mDecoderConfig.output.colorspace = MODE_rgbA;  // Pre-multiplied alpha mode.
    const int canvasWidth = getWidth(frame_sequence);
    const int canvasHeight = getHeight(frame_sequence);
    frame_sequence_state->mPreservedBuffer = malloc(sizeof(Color8888) * canvasWidth * canvasHeight);
    frame_sequence_state->drawFrame = drawFrame;
    return frame_sequence_state;
}

void webp_frame_sequence_state_free(webp_frame_sequence_state *frame_sequence_state) {
    free(frame_sequence_state->mPreservedBuffer);
    free(frame_sequence_state);
}

将FrameSequence.java,FrameSequenceDrawable.java和WebpImageView.java复制到自己项目,将sample项目下的MainActivity.java里代码复制到自己的项目内,把缺的资源都复制到自己的项目内

修改FrameSequence.java代码

static {
    System.loadLibrary("native-lib");
}

删除一些native函数,剩下

private static native long nativeDecodeByteArray(byte[] data, int offset, int length);
public static native void nativeDestroyFrameSequence(long nativeFrameSequence);
private static native long nativeCreateState(long nativeFrameSequence);
private static native void nativeDestroyState(long nativeState);
private static native long nativeGetFrame(long nativeState, int frameNr,
                                          Bitmap output, int previousFrameNr);

private static native int getWidth(long nativeFrameSequence);
private static native int getHeight(long nativeFrameSequence);
private static native boolean isOpaque(long nativeFrameSequence);
private static native int getFrameCount(long nativeFrameSequence);

在native-lib.cpp内添加对应的native函数

extern "C"
JNIEXPORT jstring

JNICALL
Java_com_example_animatedwebp_MainActivity_stringFromJNI(
        JNIEnv *env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    return env->NewStringUTF(hello.c_str());
}
extern "C"
JNIEXPORT webp_frame_sequence * JNICALL
Java_com_example_animatedwebp_FrameSequence_nativeDecodeByteArray(JNIEnv *env, jclass type,
                                                                  jbyteArray data_, jint offset,
                                                                  jint length) {
    jbyte *data = env->GetByteArrayElements(data_, NULL);

    // TODO
    webp_stream *stream = webp_stream_create(data + offset, (size_t) length);
    webp_frame_sequence *frame_sequence = webp_frame_sequence_create(stream);

    env->ReleaseByteArrayElements(data_, data, 0);
    webp_stream_free(stream);
    return frame_sequence;
}extern "C"
JNIEXPORT void JNICALL
Java_com_example_animatedwebp_FrameSequence_nativeDestroyFrameSequence(JNIEnv *env, jclass type,
                                                                       jlong nativeFrameSequence) {

    // TODO
    webp_frame_sequence *frame_sequence = (webp_frame_sequence *) nativeFrameSequence;
    webp_frame_sequence_free(frame_sequence);


}extern "C"
JNIEXPORT webp_frame_sequence_state * JNICALL
Java_com_example_animatedwebp_FrameSequence_nativeCreateState(JNIEnv *env, jclass type,
                                                              jlong nativeFrameSequence) {

    // TODO
    webp_frame_sequence *frame_sequence = (webp_frame_sequence *) nativeFrameSequence;

    webp_frame_sequence_state *frame_sequence_state = webp_frame_sequence_state_create(frame_sequence);

    return frame_sequence_state;

}extern "C"
JNIEXPORT void JNICALL
Java_com_example_animatedwebp_FrameSequence_nativeDestroyState(JNIEnv *env, jclass type,
                                                               jlong nativeState) {

    // TODO
    webp_frame_sequence_state *frame_sequence_state = (webp_frame_sequence_state *) nativeState;
    webp_frame_sequence_state_free(frame_sequence_state);
}
extern "C"
JNIEXPORT jlong JNICALL
Java_com_example_animatedwebp_FrameSequence_nativeGetFrame(JNIEnv *env, jclass type,
                                                           jlong nativeState, jint frameNr,
                                                           jobject output, jint previousFrameNr) {

    // TODO
    webp_frame_sequence_state *frame_sequence_state = (webp_frame_sequence_state *) nativeState;

    int ret;
    AndroidBitmapInfo info;
    void* pixels;
    if ((ret = AndroidBitmap_getInfo(env, output, &info)) < 0) {
        return 0;
    }

    if ((ret = AndroidBitmap_lockPixels(env, output, &pixels)) < 0) {
        return 0;
    }
    int pixelStride = info.stride >> 2;
    jlong delayMs = frame_sequence_state->drawFrame(frame_sequence_state,frameNr,
                                                  (Color8888*) pixels, pixelStride, previousFrameNr);
    AndroidBitmap_unlockPixels(env, output);
    return delayMs;

}extern "C"
JNIEXPORT jint JNICALL
Java_com_example_animatedwebp_FrameSequence_getWidth__J(JNIEnv *env, jclass type,
                                                        jlong nativeFrameSequence) {

    // TODO
    webp_frame_sequence *frame_sequence = (webp_frame_sequence *) nativeFrameSequence;
    return frame_sequence->getWidth(frame_sequence);
}extern "C"
JNIEXPORT jint JNICALL
Java_com_example_animatedwebp_FrameSequence_getHeight__J(JNIEnv *env, jclass type,
                                                         jlong nativeFrameSequence) {

    // TODO
    webp_frame_sequence *frame_sequence = (webp_frame_sequence *) nativeFrameSequence;
    return frame_sequence->getHeight(frame_sequence);

}extern "C"
JNIEXPORT jboolean JNICALL
Java_com_example_animatedwebp_FrameSequence_isOpaque__J(JNIEnv *env, jclass type,
                                                        jlong nativeFrameSequence) {

    // TODO
    webp_frame_sequence *frame_sequence = (webp_frame_sequence *) nativeFrameSequence;
    return (jboolean) frame_sequence->isOpaque(frame_sequence);
}extern "C"
JNIEXPORT jint JNICALL
Java_com_example_animatedwebp_FrameSequence_getFrameCount__J(JNIEnv *env, jclass type,
                                                             jlong nativeFrameSequence) {

    // TODO
    webp_frame_sequence *frame_sequence = (webp_frame_sequence *) nativeFrameSequence;
    return frame_sequence->getFrameCount(frame_sequence);
}

因为native-lib.cpp是c++文件,我改的是c文件所以要在native-lib.cpp内引用webp_types.h时要

extern "C"{
#include "webp_types.h"
}

百度云

密码:k3tv


猜你喜欢

转载自blog.csdn.net/u010302327/article/details/80221435
今日推荐