cocos2d-x节点(CCImageCommon_cpp.h)API

时间:2023-02-07 19:55:01

本文来自http://blog.csdn.net/runaying ,引用必须注明出处!

cocos2d-x节点(CCImageCommon_cpp.h)API

温馨提醒:为了大家能更好学习,强烈推荐大家看看本人的这篇博客 Cocos2d-X权威指南笔记

初始化各种类型的图片,储存图片

///cocos2d-x-3.0alpha0/cocos2dx/platform
//初始化各种类型的图片,储存图片



#include "CCImage.h"

#include <string>
#include <ctype.h>

#ifdef EMSCRIPTEN
#include <SDL/SDL.h>
#include <SDL/SDL_image.h>
#endif // EMSCRIPTEN

extern "C"
{
#include "png.h"
#include "tiffio.h"
#include "etc1.h"
#include "jpeglib.h"
}
#include "third_party/common/s3tc/s3tc.h"
#include "third_party/common/atitc/atitc.h"
#if defined(__native_client__) || defined(EMSCRIPTEN)
// TODO(sbc): 我敢肯定,所有平台应该以这种方式包括 webph headers
#include "webp/decode.h"
#else
#include "decode.h"
#endif

#include "ccMacros.h"
#include "platform/CCCommon.h"
#include "CCStdC.h"
#include "CCFileUtils.h"
#include "CCConfiguration.h"
#include "support/ccUtils.h"
#include "support/zip_support/ZipUtils.h"
#if (CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID)
#include "platform/android/CCFileUtilsAndroid.h"
#endif

#define CC_GL_ATC_RGB_AMD                                          0x8C92
#define CC_GL_ATC_RGBA_EXPLICIT_ALPHA_AMD                          0x8C93
#define CC_GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD                      0x87EE

NS_CC_BEGIN

//////////////////////////////////////////////////////////////////////////
// pvr 结构的 struct 、 data

namespace
{
    static const int PVR_TEXTURE_FLAG_TYPE_MASK = 0xff;
    
    // Values taken from PVRTexture.h from http://www.imgtec.com
    enum class PVR2TextureFlag
    {
        Mipmap         = (1<<8),        // has mip map levels                       //具有mipmap级别
        Twiddle        = (1<<9),        // is twiddled                              //旋弄
        Bumpmap        = (1<<10),       // has normals encoded for a bump map       //使用法线进行编码的凹凸贴图
        Tiling         = (1<<11),       // is bordered for tiled pvr                //与 tiled pvr 接壤
        Cubemap        = (1<<12),       // is a cubemap/skybox                      //立方体贴图/天空盒
        FalseMipCol    = (1<<13),       // are there false colored MIP levels       //彩色MIP级别是 false
        Volume         = (1<<14),       // is this a volume texture                 //这是一个体积 texture
        Alpha          = (1<<15),       // v2.1 is there transparency info in the texture //texture 的透明度
        VerticalFlip   = (1<<16),       // v2.1 is the texture vertically flipped   //垂直翻转 texture
    };
    
    enum class PVR3TextureFlag
    {
        PremultipliedAlpha	= (1<<1)	// has premultiplied alpha                  //有预乘alpha
    };
    
    static const char gPVRTexIdentifier[5] = "PVR!";
    
    // v2
    enum class PVR2TexturePixelFormat : unsigned char
    {
        RGBA4444 = 0x10,
        RGBA5551,
        RGBA8888,
        RGB565,
        RGB555,          // unsupported
        RGB888,
        I8,
        AI88,
        PVRTC2BPP_RGBA,
        PVRTC4BPP_RGBA,
        BGRA8888,
        A8,
    };
        
    // v3
    enum class PVR3TexturePixelFormat : uint64_t
    {
        PVRTC2BPP_RGB  = 0ULL,
        PVRTC2BPP_RGBA = 1ULL,
        PVRTC4BPP_RGB  = 2ULL,
        PVRTC4BPP_RGBA = 3ULL,
            
        BGRA8888       = 0x0808080861726762ULL,
        RGBA8888       = 0x0808080861626772ULL,
        RGBA4444       = 0x0404040461626772ULL,
        RGBA5551       = 0x0105050561626772ULL,
        RGB565         = 0x0005060500626772ULL,
        RGB888         = 0x0008080800626772ULL,
        A8             = 0x0000000800000061ULL,
        L8             = 0x000000080000006cULL,
        LA88           = 0x000008080000616cULL,
    };
        
        
    // v2
    typedef const std::map<PVR2TexturePixelFormat, Texture2D::PixelFormat> _pixel2_formathash;
    
    static const _pixel2_formathash::value_type v2_pixel_formathash_value[] =
    {
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::BGRA8888,	    Texture2D::PixelFormat::BGRA8888),
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::RGBA8888,	    Texture2D::PixelFormat::RGBA8888),
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::RGBA4444,	    Texture2D::PixelFormat::RGBA4444),
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::RGBA5551,	    Texture2D::PixelFormat::RGB5A1),
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::RGB565,	    Texture2D::PixelFormat::RGB565),
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::RGB888,	    Texture2D::PixelFormat::RGB888),
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::A8,	        Texture2D::PixelFormat::A8),
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::I8,	        Texture2D::PixelFormat::I8),
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::AI88,	        Texture2D::PixelFormat::AI88),
            
#ifdef GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::PVRTC2BPP_RGBA,	    Texture2D::PixelFormat::PVRTC2A),
        _pixel2_formathash::value_type(PVR2TexturePixelFormat::PVRTC4BPP_RGBA,	    Texture2D::PixelFormat::PVRTC4A),
#endif
    };
        
    static const int PVR2_MAX_TABLE_ELEMENTS = sizeof(v2_pixel_formathash_value) / sizeof(v2_pixel_formathash_value[0]);
    static const _pixel2_formathash v2_pixel_formathash(v2_pixel_formathash_value, v2_pixel_formathash_value + PVR2_MAX_TABLE_ELEMENTS);
        
    // v3
    typedef const std::map<PVR3TexturePixelFormat, Texture2D::PixelFormat> _pixel3_formathash;
    static _pixel3_formathash::value_type v3_pixel_formathash_value[] =
    {
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::BGRA8888,	Texture2D::PixelFormat::BGRA8888),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::RGBA8888,	Texture2D::PixelFormat::RGBA8888),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::RGBA4444,	Texture2D::PixelFormat::RGBA4444),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::RGBA5551,	Texture2D::PixelFormat::RGB5A1),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::RGB565,	    Texture2D::PixelFormat::RGB565),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::RGB888,	    Texture2D::PixelFormat::RGB888),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::A8,	        Texture2D::PixelFormat::A8),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::L8,	        Texture2D::PixelFormat::I8),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::LA88,	    Texture2D::PixelFormat::AI88),
            
#ifdef GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::PVRTC2BPP_RGB,	    Texture2D::PixelFormat::PVRTC2),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::PVRTC2BPP_RGBA,	    Texture2D::PixelFormat::PVRTC2A),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::PVRTC4BPP_RGB,	    Texture2D::PixelFormat::PVRTC4),
        _pixel3_formathash::value_type(PVR3TexturePixelFormat::PVRTC4BPP_RGBA,	    Texture2D::PixelFormat::PVRTC4A),
#endif
    };
        
    static const int PVR3_MAX_TABLE_ELEMENTS = sizeof(v3_pixel_formathash_value) / sizeof(v3_pixel_formathash_value[0]);
        
    static const _pixel3_formathash v3_pixel_formathash(v3_pixel_formathash_value, v3_pixel_formathash_value + PVR3_MAX_TABLE_ELEMENTS);
        
    typedef struct _PVRTexHeader
    {
        unsigned int headerLength;
        unsigned int height;
        unsigned int width;
        unsigned int numMipmaps;
        unsigned int flags;
        unsigned int dataLength;
        unsigned int bpp;
        unsigned int bitmaskRed;
        unsigned int bitmaskGreen;
        unsigned int bitmaskBlue;
        unsigned int bitmaskAlpha;
        unsigned int pvrTag;
        unsigned int numSurfs;
    } PVRv2TexHeader;
        
#ifdef _MSC_VER
#pragma pack(push,1)
#endif
    typedef struct
    {
        uint32_t version;
        uint32_t flags;
        uint64_t pixelFormat;
        uint32_t colorSpace;
        uint32_t channelType;
        uint32_t height;
        uint32_t width;
        uint32_t depth;
        uint32_t numberOfSurfaces;
        uint32_t numberOfFaces;
        uint32_t numberOfMipmaps;
        uint32_t metadataLength;
#ifdef _MSC_VER
    } PVRv3TexHeader;
#pragma pack(pop)
#else
    } __attribute__((packed)) PVRv3TexHeader;
#endif
}
//pvr structure end

//////////////////////////////////////////////////////////////////////////

// s3tc(dds) 结构的 struct 、 data
namespace
{
    struct DDColorKey
    {
        uint32_t colorSpaceLowValue;
        uint32_t colorSpaceHighValue;
    };
    
    struct DDSCaps
    {
        uint32_t caps;
        uint32_t caps2;
        uint32_t caps3;
        uint32_t caps4;
    };
    
    struct DDPixelFormat
    {
        uint32_t size;
        uint32_t flags;
        uint32_t fourCC;
        uint32_t RGBBitCount;
        uint32_t RBitMask;
        uint32_t GBitMask;
        uint32_t BBitMask;
        uint32_t ABitMask;
    };
    
    
    struct DDSURFACEDESC2
    {
        uint32_t size;
        uint32_t flags;
        uint32_t height;
        uint32_t width;
        
        union
        {
            uint32_t pitch;
            uint32_t linearSize;
        } DUMMYUNIONNAMEN1;
        
        union
        {
            uint32_t backBufferCount;
            uint32_t depth;
        } DUMMYUNIONNAMEN5;
        
        union
        {
            uint32_t mipMapCount;
            uint32_t refreshRate;
            uint32_t srcVBHandle;
        } DUMMYUNIONNAMEN2;
        
        uint32_t alphaBitDepth;
        uint32_t reserved;
        uint32_t surface;
        
        union
        {
            DDColorKey ddckCKDestOverlay;
            uint32_t emptyFaceColor;
        } DUMMYUNIONNAMEN3;
        
        DDColorKey ddckCKDestBlt;
        DDColorKey ddckCKSrcOverlay;
        DDColorKey ddckCKSrcBlt;
        
        union
        {
            DDPixelFormat ddpfPixelFormat;
            uint32_t FVF;
        } DUMMYUNIONNAMEN4;
        
        DDSCaps ddsCaps;
        uint32_t textureStage;
    } ;
    
#pragma pack(push,1)
    
    struct S3TCTexHeader
    {
        char fileCode[4];
        DDSURFACEDESC2 ddsd;
    };
    
#pragma pack(pop)

}
//s3tc struct end

//////////////////////////////////////////////////////////////////////////

// atitc(ktx) 结构的 struct 、 data
namespace
{
    struct ATITCTexHeader
    {
        //HEADER
        char identifier[12];
        uint32_t endianness;
        uint32_t glType;
        uint32_t glTypeSize;
        uint32_t glFormat;
        uint32_t glInternalFormat;
        uint32_t glBaseInternalFormat;
        uint32_t pixelWidth;
        uint32_t pixelHeight;
        uint32_t pixelDepth;
        uint32_t numberOfArrayElements;
        uint32_t numberOfFaces;
        uint32_t numberOfMipmapLevels;
        uint32_t bytesOfKeyValueData;
    };
}
//atittc struct end

//////////////////////////////////////////////////////////////////////////

namespace
{
    typedef struct 
    {
        const unsigned char * data;
        int size;
        int offset;
    }tImageSource;
    
    static void pngReadCallback(png_structp png_ptr, png_bytep data, png_size_t length)
    {
        tImageSource* isource = (tImageSource*)png_get_io_ptr(png_ptr);
        
        if((int)(isource->offset + length) <= isource->size)
        {
            memcpy(data, isource->data+isource->offset, length);
            isource->offset += length;
        }
        else
        {
            png_error(png_ptr, "pngReaderCallback failed");
        }
    }
}

//////////////////////////////////////////////////////////////////////////
// Implement Image                                  //实现图像
//////////////////////////////////////////////////////////////////////////

Image::Image()
: _data(0)
, _dataLen(0)
, _width(0)
, _height(0)
, _fileType(Format::UNKOWN)
, _renderFormat(Texture2D::PixelFormat::NONE)
, _preMulti(false)
, _hasPremultipliedAlpha(true)
, _numberOfMipmaps(0)
{

}

Image::~Image()
{
    CC_SAFE_DELETE_ARRAY(_data);
}

bool Image::initWithImageFile(const char * strPath)
{
    bool bRet = false;
    std::string fullPath = FileUtils::getInstance()->fullPathForFilename(strPath);

#ifdef EMSCRIPTEN
    // Emscripten 包含 SDL的 re-implementation 它使用 HTML5 画布操作
    // 因此,加载图像通过 IMG_Load (an SDL
    // API) 速度超过了
    SDL_Surface *iSurf = IMG_Load(fullPath.c_str());

    int size = 4 * (iSurf->w * iSurf->h);
    bRet = initWithRawData((void*)iSurf->pixels, size, iSurf->w, iSurf->h, 8, true);

    unsigned int *tmp = (unsigned int *)_data;
    int nrPixels = iSurf->w * iSurf->h;
    for(int i = 0; i < nrPixels; i++)
    {
        unsigned char *p = _data + i * 4;
        tmp[i] = CC_RGB_PREMULTIPLY_ALPHA( p[0], p[1], p[2], p[3] );
    }

    SDL_FreeSurface(iSurf);
#else
    unsigned long bufferLen = 0;
    unsigned char* buffer = FileUtils::getInstance()->getFileData(fullPath.c_str(), "rb", &bufferLen);

    if (buffer != nullptr && bufferLen > 0)
    {
        bRet = initWithImageData(buffer, bufferLen);
    }

    CC_SAFE_DELETE_ARRAY(buffer);
#endif // EMSCRIPTEN

    return bRet;
}

bool Image::initWithImageFileThreadSafe(const char *fullpath)
{
    bool bRet = false;
    unsigned long dataLen = 0;
#if (CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID)
    FileUtilsAndroid *fileUitls = (FileUtilsAndroid*)FileUtils::getInstance();
    unsigned char *pBuffer = fileUitls->getFileDataForAsync(fullpath, "rb", &dataLen);
#else
    unsigned char *pBuffer = FileUtils::getInstance()->getFileData(fullpath, "rb", &dataLen);
#endif
    if (pBuffer != NULL && dataLen > 0)
    {
        bRet = initWithImageData(pBuffer, dataLen);
    }
    CC_SAFE_DELETE_ARRAY(pBuffer);
    return bRet;
}

bool Image::initWithImageData(const unsigned char * data, int dataLen)
{
    bool ret = false;
    
    do
    {
        CC_BREAK_IF(! data || dataLen <= 0);
        
        unsigned char* unpackedData = nullptr;
        int unpackedLen = 0;
        
        //检测和解压压缩文件
        if (ZipUtils::ccIsCCZBuffer(data, dataLen))
        {
            unpackedLen = ZipUtils::ccInflateCCZBuffer(data, dataLen, &unpackedData);
        }
        else if (ZipUtils::ccIsGZipBuffer(data, dataLen))
        {
            unpackedLen = ZipUtils::ccInflateMemory(const_cast<unsigned char*>(data), dataLen, &unpackedData);
        }
        else
        {
            unpackedData = const_cast<unsigned char*>(data);
            unpackedLen = dataLen;
        }

        _fileType = detectFormat(unpackedData, unpackedLen);

        switch (_fileType)
        {
        case Format::PNG:
            ret = initWithPngData(unpackedData, unpackedLen);
            break;
        case Format::JPG:
            ret = initWithJpgData(unpackedData, unpackedLen);
            break;
        case Format::TIFF:
            ret = initWithTiffData(unpackedData, unpackedLen);
            break;
        case Format::WEBP:
            ret = initWithWebpData(unpackedData, unpackedLen);
            break;
        case Format::PVR:
            ret = initWithPVRData(unpackedData, unpackedLen);
            break;
        case Format::ETC:
            ret = initWithETCData(unpackedData, unpackedLen);
            break;
        case Format::S3TC:
            ret = initWithS3TCData(unpackedData, unpackedLen);
            break;
        case Format::ATITC:
            ret = initWithATITCData(unpackedData, unpackedLen);
            break;
        default:
            CCAssert(false, "unsupport image format!");
            break;
        }
        
        if(unpackedData != data)
        {
            free(unpackedData);
        }
    } while (0);
    
    return ret;
}

bool Image::isPng(const unsigned char * data, int dataLen)
{
    if (dataLen <= 8)
    {
        return false;
    }

    static const unsigned char PNG_SIGNATURE[] = {0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a};

    return memcmp(PNG_SIGNATURE, data, sizeof(PNG_SIGNATURE)) == 0;
}


bool Image::isEtc(const unsigned char * data, int dataLen)
{
    return etc1_pkm_is_valid((etc1_byte*)data) ? true : false;
}


bool Image::isS3TC(const unsigned char * data, int dataLen)
{

    S3TCTexHeader *header = (S3TCTexHeader *)data;
    
    if (strncmp(header->fileCode, "DDS", 3) != 0)
    {
        CCLOG("cocos2d: the file is not a dds file!");
        return false;
    }
    return true;
}

bool Image::isATITC(const unsigned char *data, int dataLen)
{
    ATITCTexHeader *header = (ATITCTexHeader *)data;
    
    if (strncmp(&header->identifier[1], "KTX", 3) != 0)
    {
        CCLOG("cocos3d: the file is not a ktx file!");
        return false;
    }
    return true;
}

bool Image::isJpg(const unsigned char * data, int dataLen)
{
    if (dataLen <= 4)
    {
        return false;
    }

    static const unsigned char JPG_SOI[] = {0xFF, 0xD8};

    return memcmp(data, JPG_SOI, 2) == 0;
}

bool Image::isTiff(const unsigned char * data, int dataLen)
{
    if (dataLen <= 4)
    {
        return false;
    }

    static const char* TIFF_II = "II";
    static const char* TIFF_MM = "MM";

    return (memcmp(data, TIFF_II, 2) == 0 && *(static_cast<const unsigned char*>(data) + 2) == 42 && *(static_cast<const unsigned char*>(data) + 3) == 0) ||
        (memcmp(data, TIFF_MM, 2) == 0 && *(static_cast<const unsigned char*>(data) + 2) == 0 && *(static_cast<const unsigned char*>(data) + 3) == 42);
}

bool Image::isWebp(const unsigned char * data, int dataLen)
{
    if (dataLen <= 12)
    {
        return false;
    }

    static const char* WEBP_RIFF = "RIFF";
    static const char* WEBP_WEBP = "WEBP";

    return memcmp(data, WEBP_RIFF, 4) == 0 
        && memcmp(static_cast<const unsigned char*>(data) + 8, WEBP_WEBP, 4) == 0;
}

bool Image::isPvr(const unsigned char * data, int dataLen)
{
    if (dataLen < sizeof(PVRv2TexHeader) || dataLen < sizeof(PVRv3TexHeader))
    {
        return false;
    }
    
    const PVRv2TexHeader* headerv2 = static_cast<const PVRv2TexHeader*>(static_cast<const void*>(data));
    const PVRv3TexHeader* headerv3 = static_cast<const PVRv3TexHeader*>(static_cast<const void*>(data));
    
    return memcmp(&headerv2->pvrTag, gPVRTexIdentifier, strlen(gPVRTexIdentifier)) == 0 || CC_SWAP_INT32_BIG_TO_HOST(headerv3->version) == 0x50565203;
}


Image::Format Image::detectFormat(const unsigned char * data, int dataLen)
{
    if (isPng(data, dataLen))
    {
        return Format::PNG;
    }
    else if (isJpg(data, dataLen))
    {
        return Format::JPG;
    }
    else if (isTiff(data, dataLen))
    {
        return Format::TIFF;
    }
    else if (isWebp(data, dataLen))
    {
        return Format::WEBP;
    }
    else if (isPvr(data, dataLen))
    {
        return Format::PVR;
    }
    else if (isEtc(data, dataLen))
    {
        return Format::ETC;
    }
    else if (isS3TC(data, dataLen))
    {
        return Format::S3TC;
    }
    else if (isATITC(data, dataLen))
    {
        return Format::ATITC;
    }
    else
    {
        return Format::UNKOWN;
    }
}

int Image::getBitPerPixel()
{
    return Texture2D::getPixelFormatInfoMap().at(_renderFormat).bpp;
}

bool Image::hasAlpha()
{
    return Texture2D::getPixelFormatInfoMap().at(_renderFormat).alpha;
}

bool Image::isCompressed()
{
    return Texture2D::getPixelFormatInfoMap().at(_renderFormat).compressed;
}

namespace
{
/*
 * ERROR HANDLING:          //错误处理
 *
 * JPEG库的标准错误处理程序(jerror.c)你可以单独覆盖这几个“方法”。
 * 这可以让你不使用大量重复代码来调整行为,你日后可能会对每个版本进行更新
 *
 * 发生致命错误时,我们覆盖“ERROR_EXIT”的方法,使控制返回给库的调用者
 * 而不是调用标准的 exit() 方法,来做这一件事
 *
 * 我们使用C的setjmp/ longjmp的设施,将控制返回。这意味着调用JPEG库的程序必须首先执行的 setjmp()调用来建立返回点。
 * 我们想要替代 ERROR_EXIT 做 longjmp()。但是,我们需要访问 setjmp 缓冲区的 ERROR_EXIT 例程
 * 要做到这一点,我们做了一个私人的 JPEG 标准错误处理程序扩展。(如果我们使用 C + +,我们会说,我们制作了一个常规错误处理程序的子类)
 *
 * 下面是扩展错误处理程序的结构:
 */
    struct MyErrorMgr
    {
        struct jpeg_error_mgr pub;	/* "public" fields */           //字段
        jmp_buf setjmp_buffer;	/* for return to caller */          //返回给调用者
    };
    
    typedef struct MyErrorMgr * MyErrorPtr;
    
    /*
     * 这里的例程,将取代标准的 error_exit 方法:
     */
    
    METHODDEF(void)
    myErrorExit(j_common_ptr cinfo)
    {
        /* cinfo->err 是真正指向 MyErrorMgr 结构的 points, 所以它是一个强制指针 */
        MyErrorPtr myerr = (MyErrorPtr) cinfo->err;
        
        /* 始终显示该消息。 */
        /* 如果我们选择,我们可以延后直到 returning. */
        /* 内部消息函数不能在某些平台上显示错误消息,所以我们在这里把它改写。如果有版本冲突就编辑它。
         */
        //(*cinfo->err->output_message) (cinfo);
        char buffer[JMSG_LENGTH_MAX];
        (*cinfo->err->format_message) (cinfo, buffer);
        CCLOG("jpeg error: %s", buffer);
        
        /* Return control to the setjmp point */    //调用 setjmp 返回控制点
        longjmp(myerr->setjmp_buffer, 1);
    }
}

bool Image::initWithJpgData(const unsigned char * data, int dataLen)
{
    /* 这些都是标准的libjpeg结构用于读取(解压) */
    struct jpeg_decompress_struct cinfo;
    /* 我们用我们的私有JPEG错误处理程序扩展。
	 * Note 这个结构的生命周期必须和主JPEG结构参数一致,避免空指针问题
	 */
	struct MyErrorMgr jerr;
    /* libjpeg 数据结构,用于存储一列,也就是,图像的扫描线 */
    JSAMPROW row_pointer[1] = {0};
    unsigned long location = 0;
    unsigned int i = 0;

    bool bRet = false;
    do 
    {
        /*我们设定的正常JPEG错误例程,然后覆盖 error_exit. */
		cinfo.err = jpeg_std_error(&jerr.pub);
		jerr.pub.error_exit = myErrorExit;
		/* 为我们的 ErrorExit 建立setjmp 来返回上下文. */
		if (setjmp(jerr.setjmp_buffer)) {
			/* 如果我们在这里获取 JPEG代码的错误信号
			 * 我们需要清理的JPEG对象,关闭输入文件,并返回。
			 */
			jpeg_destroy_decompress(&cinfo);
			break;
		}

        /* 设置要解压的文件,然后读取 JPEG header */
        jpeg_create_decompress( &cinfo );

#ifndef CC_TARGET_QT5
        jpeg_mem_src( &cinfo, const_cast<unsigned char*>(data), dataLen );
#endif /* CC_TARGET_QT5 */

        /* 读取 image header 其中包含的图像信息 */
#if (JPEG_LIB_VERSION >= 90)
        // libjpeg 0.9 adds stricter types.     //libjpeg的0.9增加了更严格的类型。
        jpeg_read_header( &cinfo, TRUE );
#else
        jpeg_read_header( &cinfo, true );
#endif

        // we only support RGB or grayscale         //我们只支持RGB或灰度
        if (cinfo.jpeg_color_space == JCS_GRAYSCALE)
        {
            _renderFormat = Texture2D::PixelFormat::I8;
        }else
        {
            cinfo.out_color_space = JCS_RGB;
            _renderFormat = Texture2D::PixelFormat::RGB888;
        }

        /* Start decompression jpeg here */     //开始解压缩JPEG
        jpeg_start_decompress( &cinfo );

        /* init image info */
        _width  = cinfo.output_width;
        _height = cinfo.output_height;
        _preMulti = false;
        row_pointer[0] = new unsigned char[cinfo.output_width*cinfo.output_components];
        CC_BREAK_IF(! row_pointer[0]);

        _dataLen = cinfo.output_width*cinfo.output_height*cinfo.output_components;
        _data = new unsigned char[_dataLen];
        CC_BREAK_IF(! _data);

        /* 现在实际上读取JPEG到原始缓冲区*/
        /* 一次读一个扫描线 */
        while( cinfo.output_scanline < cinfo.output_height )
        {
            jpeg_read_scanlines( &cinfo, row_pointer, 1 );
            for( i=0; i<cinfo.output_width*cinfo.output_components;i++) 
            {
                _data[location++] = row_pointer[0][i];
            }
        }

		/*当读取被破坏的图像文件数据,可能会导致 jpeg_finish_decompress()  错误.
		 * 此外,jpeg_destroy_decompress()应该释放解压对象关联的所有内存。因此,它并不需要调用jpeg_finish_decompress()
		 */
		//jpeg_finish_decompress( &cinfo );
        jpeg_destroy_decompress( &cinfo );
        /* 解压完成后, 销毁对象,释放指针,关闭打开的文件 */        
        bRet = true;
    } while (0);

    CC_SAFE_DELETE_ARRAY(row_pointer[0]);
    return bRet;
}

bool Image::initWithPngData(const unsigned char * data, int dataLen)
{
    // 检查有效的PNG文件的字节长度
#define PNGSIGSIZE  8
    bool bRet = false;
    png_byte        header[PNGSIGSIZE]   = {0}; 
    png_structp     png_ptr     =   0;
    png_infop       info_ptr    = 0;

    do 
    {
        // png header len is 8 bytes
        CC_BREAK_IF(dataLen < PNGSIGSIZE);

        // check the data is png or not
        memcpy(header, data, PNGSIGSIZE);
        CC_BREAK_IF(png_sig_cmp(header, 0, PNGSIGSIZE));

        // init png_struct
        png_ptr = png_create_read_struct(PNG_LIBPNG_VER_STRING, 0, 0, 0);
        CC_BREAK_IF(! png_ptr);

        // init png_info
        info_ptr = png_create_info_struct(png_ptr);
        CC_BREAK_IF(!info_ptr);

#if (CC_TARGET_PLATFORM != CC_PLATFORM_BADA && CC_TARGET_PLATFORM != CC_PLATFORM_NACL)
        CC_BREAK_IF(setjmp(png_jmpbuf(png_ptr)));
#endif

        // set the read call back function      //设置读回调函数
        tImageSource imageSource;
        imageSource.data    = (unsigned char*)data;
        imageSource.size    = dataLen;
        imageSource.offset  = 0;
        png_set_read_fn(png_ptr, &imageSource, pngReadCallback);

        // read png header info

        // read png file info
        png_read_info(png_ptr, info_ptr);

        _width = png_get_image_width(png_ptr, info_ptr);
        _height = png_get_image_height(png_ptr, info_ptr);
        png_byte bit_depth = png_get_bit_depth(png_ptr, info_ptr);
        png_uint_32 color_type = png_get_color_type(png_ptr, info_ptr);

        //CCLOG("color type %u", color_type);

        // 强制图片调色扩大到 24-bit RGB
        // 它可能包含alpha通道
        if (color_type == PNG_COLOR_TYPE_PALETTE)
        {
            png_set_palette_to_rgb(png_ptr);
        }
        // low-bit-depth(低比特深度的)灰度图像被扩展到 8 bits
        if (color_type == PNG_COLOR_TYPE_GRAY && bit_depth < 8)
        {
            bit_depth = 8;
            png_set_expand_gray_1_2_4_to_8(png_ptr);
        }
        // 扩充TRNS数据块到一个完整的alpha通道
        if (png_get_valid(png_ptr, info_ptr, PNG_INFO_tRNS))
        {
            png_set_tRNS_to_alpha(png_ptr);
        }  
        // 把 16-bit 采样的图像减少到 8 bits
        if (bit_depth == 16)
        {
            png_set_strip_16(png_ptr);            
        } 

        // 提前扩大灰度,现在处理调色板和RGB
        if (bit_depth < 8) {
            png_set_packing(png_ptr);
        }
        // update info
        png_read_update_info(png_ptr, info_ptr);
        bit_depth = png_get_bit_depth(png_ptr, info_ptr);
        color_type = png_get_color_type(png_ptr, info_ptr);

        switch (color_type)
        {
        case PNG_COLOR_TYPE_GRAY:
            _renderFormat = Texture2D::PixelFormat::I8;
            break;
        case PNG_COLOR_TYPE_GRAY_ALPHA:
            _renderFormat = Texture2D::PixelFormat::AI88;
            break;
        case PNG_COLOR_TYPE_RGB:
            _renderFormat = Texture2D::PixelFormat::RGB888;
            break;
        case PNG_COLOR_TYPE_RGB_ALPHA:
            _renderFormat = Texture2D::PixelFormat::RGBA8888;
            break;
        default:
            break;
        }

        // read png data
        png_uint_32 rowbytes;
        png_bytep* row_pointers = (png_bytep*)malloc( sizeof(png_bytep) * _height );

        rowbytes = png_get_rowbytes(png_ptr, info_ptr);

        _dataLen = rowbytes * _height;
        _data = new unsigned char[_dataLen];
        CC_BREAK_IF(!_data);

        for (unsigned short i = 0; i < _height; ++i)
        {
            row_pointers[i] = _data + i*rowbytes;
        }
        png_read_image(png_ptr, row_pointers);

        png_read_end(png_ptr, NULL);

        _preMulti = false;

        CC_SAFE_FREE(row_pointers);

        bRet = true;
    } while (0);

    if (png_ptr)
    {
        png_destroy_read_struct(&png_ptr, (info_ptr) ? &info_ptr : 0, 0);
    }
    return bRet;
}

namespace
{
    static tmsize_t tiffReadProc(thandle_t fd, void* buf, tmsize_t size)
    {
        tImageSource* isource = (tImageSource*)fd;
        uint8* ma;
        uint64 mb;
        unsigned long n;
        unsigned long o;
        tmsize_t p;
        ma=(uint8*)buf;
        mb=size;
        p=0;
        while (mb>0)
        {
            n=0x80000000UL;
            if ((uint64)n>mb)
            n=(unsigned long)mb;
            
            
            if((int)(isource->offset + n) <= isource->size)
            {
                memcpy(ma, isource->data+isource->offset, n);
                isource->offset += n;
                o = n;
            }
            else
            {
                return 0;
            }
            
            ma+=o;
            mb-=o;
            p+=o;
            if (o!=n)
            {
                break;
            }
        }
        return p;
    }
    
    static tmsize_t tiffWriteProc(thandle_t fd, void* buf, tmsize_t size)
    {
        CC_UNUSED_PARAM(fd);
        CC_UNUSED_PARAM(buf);
        CC_UNUSED_PARAM(size);
        return 0;
    }
    
    
    static uint64 tiffSeekProc(thandle_t fd, uint64 off, int whence)
    {
        tImageSource* isource = (tImageSource*)fd;
        uint64 ret = -1;
        do
        {
            if (whence == SEEK_SET)
            {
                CC_BREAK_IF(off >= (uint64)isource->size);
                ret = isource->offset = (uint32)off;
            }
            else if (whence == SEEK_CUR)
            {
                CC_BREAK_IF(isource->offset + off >= (uint64)isource->size);
                ret = isource->offset += (uint32)off;
            }
            else if (whence == SEEK_END)
            {
                CC_BREAK_IF(off >= (uint64)isource->size);
                ret = isource->offset = (uint32)(isource->size-1 - off);
            }
            else
            {
                CC_BREAK_IF(off >= (uint64)isource->size);
                ret = isource->offset = (uint32)off;
            }
        } while (0);
        
        return ret;
    }
    
    static uint64 tiffSizeProc(thandle_t fd)
    {
        tImageSource* pImageSrc = (tImageSource*)fd;
        return pImageSrc->size;
    }
    
    static int tiffCloseProc(thandle_t fd)
    {
        CC_UNUSED_PARAM(fd);
        return 0;
    }
    
    static int tiffMapProc(thandle_t fd, void** pbase, toff_t* psize)
    {
        CC_UNUSED_PARAM(fd);
        CC_UNUSED_PARAM(pbase);
        CC_UNUSED_PARAM(psize);
        return 0;
    }
    
    static void tiffUnmapProc(thandle_t fd, void* base, toff_t size)
    {
        CC_UNUSED_PARAM(fd);
        CC_UNUSED_PARAM(base);
        CC_UNUSED_PARAM(size);
    }
}

bool Image::initWithTiffData(const unsigned char * data, int dataLen)
{
    bool bRet = false;
    do 
    {
        // set the read call back function      //设置读回调函数
        tImageSource imageSource;
        imageSource.data    = data;
        imageSource.size    = dataLen;
        imageSource.offset  = 0;

        TIFF* tif = TIFFClientOpen("file.tif", "r", (thandle_t)&imageSource, 
            tiffReadProc, tiffWriteProc,
            tiffSeekProc, tiffCloseProc, tiffSizeProc,
            tiffMapProc,
            tiffUnmapProc);

        CC_BREAK_IF(NULL == tif);

        uint32 w = 0, h = 0;
        uint16 bitsPerSample = 0, samplePerPixel = 0, planarConfig = 0;
        size_t npixels = 0;
        
        TIFFGetField(tif, TIFFTAG_IMAGEWIDTH, &w);
        TIFFGetField(tif, TIFFTAG_IMAGELENGTH, &h);
        TIFFGetField(tif, TIFFTAG_BITSPERSAMPLE, &bitsPerSample);
        TIFFGetField(tif, TIFFTAG_SAMPLESPERPIXEL, &samplePerPixel);
        TIFFGetField(tif, TIFFTAG_PLANARCONFIG, &planarConfig);

        npixels = w * h;
        
        _renderFormat = Texture2D::PixelFormat::RGBA8888;
        _width = w;
        _height = h;

        _dataLen = npixels * sizeof (uint32);
        _data = new unsigned char[_dataLen];

        uint32* raster = (uint32*) _TIFFmalloc(npixels * sizeof (uint32));
        if (raster != NULL) 
        {
           if (TIFFReadRGBAImageOriented(tif, w, h, raster, ORIENTATION_TOPLEFT, 0))
           {
                /* 调用 TIFFReadRGBAImageOriented 后使用 alpha 分量,预乘栅格数据*/
                _preMulti = true;

               memcpy(_data, raster, npixels*sizeof (uint32));
           }

          _TIFFfree(raster);
        }
        

        TIFFClose(tif);

        bRet = true;
    } while (0);
    return bRet;
}

namespace
{
    bool testFormatForPvr2TCSupport(PVR2TexturePixelFormat format)
    {
        if (!Configuration::getInstance()->supportsPVRTC())
        {
            if (format == PVR2TexturePixelFormat::PVRTC2BPP_RGBA ||
                format == PVR2TexturePixelFormat::PVRTC4BPP_RGBA)
            {
                return false;
            }
        }
        
        return true;
    }
    
    bool testFormatForPvr3TCSupport(PVR3TexturePixelFormat format)
    {
        if (!Configuration::getInstance()->supportsPVRTC())
        {
            if (format == PVR3TexturePixelFormat::PVRTC2BPP_RGB  ||
                format == PVR3TexturePixelFormat::PVRTC2BPP_RGBA ||
                format == PVR3TexturePixelFormat::PVRTC4BPP_RGB  ||
                format == PVR3TexturePixelFormat::PVRTC4BPP_RGBA)
            {
                return false;
            }
        }
        
        return true;
    }
}

bool Image::initWithPVRv2Data(const unsigned char * data, int dataLen)
{
    int dataLength = 0, dataOffset = 0, dataSize = 0;
    int blockSize = 0, widthBlocks = 0, heightBlocks = 0;
    int width = 0, height = 0;
    
    //把 sizeof(PVRTexHeader) bytes 数据流作为 PVRTexHeader
    const PVRv2TexHeader *header = static_cast<const PVRv2TexHeader *>(static_cast<const void*>(data));
    
    //确保标签使用了正确的格式
    if (memcmp(&header->pvrTag, gPVRTexIdentifier, strlen(gPVRTexIdentifier)) != 0)
    {
        return false;
    }
    
    Configuration *configuration = Configuration::getInstance();
    
    _hasPremultipliedAlpha = false;
    unsigned int flags = CC_SWAP_INT32_LITTLE_TO_HOST(header->flags);
    PVR2TexturePixelFormat formatFlags = static_cast<PVR2TexturePixelFormat>(flags & PVR_TEXTURE_FLAG_TYPE_MASK);
    bool flipped = (flags & (unsigned int)PVR2TextureFlag::VerticalFlip) ? true : false;
    if (flipped)
    {
        CCLOG("cocos2d: WARNING: Image is flipped. Regenerate it using PVRTexTool");
    }
    
    if (! configuration->supportsNPOT() &&
        (header->width != ccNextPOT(header->width) || header->height != ccNextPOT(header->height)))
    {
        CCLOG("cocos2d: ERROR: Loading an NPOT texture (%dx%d) but is not supported on this device", header->width, header->height);
        return false;
    }
    
    if (!testFormatForPvr2TCSupport(formatFlags))
    {
        CCLOG("cocos2d: WARNING: Unsupported PVR Pixel Format: 0x%02X. Re-encode it with a OpenGL pixel format variant", formatFlags);
        return false;
    }

    if (v2_pixel_formathash.find(formatFlags) == v2_pixel_formathash.end())
    {
        CCLOG("cocos2d: WARNING: Unsupported PVR Pixel Format: 0x%02X. Re-encode it with a OpenGL pixel format variant", formatFlags);
        return false;
    }
    
    auto it = Texture2D::getPixelFormatInfoMap().find(v2_pixel_formathash.at(formatFlags));

    if (it == Texture2D::getPixelFormatInfoMap().end())
    {
        CCLOG("cocos2d: WARNING: Unsupported PVR Pixel Format: 0x%02X. Re-encode it with a OpenGL pixel format variant", formatFlags);
        return false;
    }

    _renderFormat = it->first;

    //Reset num of mipmaps
    _numberOfMipmaps = 0;

    //Get size of mipmap
    _width = width = CC_SWAP_INT32_LITTLE_TO_HOST(header->width);
    _height = height = CC_SWAP_INT32_LITTLE_TO_HOST(header->height);

    //Get ptr to where data starts..
    dataLength = CC_SWAP_INT32_LITTLE_TO_HOST(header->dataLength);

    //Move by size of header        //移动 header 的大小
    _dataLen = dataLen - sizeof(PVRv2TexHeader);
    _data = new unsigned char[_dataLen];
    memcpy(_data, (unsigned char*)data + sizeof(PVRv2TexHeader), _dataLen);

    // 计算每个 texture 级别的数据大小,计算每个块处的最小数目
    while (dataOffset < dataLength)
    {
        switch (formatFlags) {
        case PVR2TexturePixelFormat::PVRTC2BPP_RGBA:
            blockSize = 8 * 4; // Pixel by pixel block size for 2bpp
            widthBlocks = width / 8;
            heightBlocks = height / 4;
            break;
        case PVR2TexturePixelFormat::PVRTC4BPP_RGBA:
            blockSize = 4 * 4; // Pixel by pixel block size for 4bpp
            widthBlocks = width / 4;
            heightBlocks = height / 4;
            break;
        case PVR2TexturePixelFormat::BGRA8888:
            if (Configuration::getInstance()->supportsBGRA8888() == false)
            {
                CCLOG("cocos2d: Image. BGRA8888 not supported on this device");
                return false;
            }
        default:
            blockSize = 1;
            widthBlocks = width;
            heightBlocks = height;
            break;
        }

        // Clamp to minimum number of blocks    //收缩blocks 的数目到最小
        if (widthBlocks < 2)
        {
            widthBlocks = 2;
        }
        if (heightBlocks < 2)
        {
            heightBlocks = 2;
        }

        dataSize = widthBlocks * heightBlocks * ((blockSize  * it->second.bpp) / 8);
        int packetLength = (dataLength - dataOffset);
        packetLength = packetLength > dataSize ? dataSize : packetLength;

        //记录的Mipmap阵列,增量计数器
        _mipmaps[_numberOfMipmaps].address = _data + dataOffset;
        _mipmaps[_numberOfMipmaps].len = packetLength;
        _numberOfMipmaps++;

        dataOffset += packetLength;

        //Update width and height to the next lower power of two
        width = MAX(width >> 1, 1);
        height = MAX(height >> 1, 1);
    }

    return true;
}

bool Image::initWithPVRv3Data(const unsigned char * data, int dataLen)
{
    if (dataLen < sizeof(PVRv3TexHeader))
    {
		return false;
	}
	
	const PVRv3TexHeader *header = static_cast<const PVRv3TexHeader *>(static_cast<const void*>(data));
	
	// validate version
	if (CC_SWAP_INT32_BIG_TO_HOST(header->version) != 0x50565203)
    {
		CCLOG("cocos2d: WARNING: pvr file version mismatch");
		return false;
	}
	
	// parse pixel format
	PVR3TexturePixelFormat pixelFormat = static_cast<PVR3TexturePixelFormat>(header->pixelFormat);
    
    if (!testFormatForPvr3TCSupport(pixelFormat))
    {
        CCLOG("cocos2d: WARNING: Unsupported PVR Pixel Format: 0x%016llX. Re-encode it with a OpenGL pixel format variant",
              static_cast<unsigned long long>(pixelFormat));
        return false;
    }


    if (v3_pixel_formathash.find(pixelFormat) == v3_pixel_formathash.end())
    {
        CCLOG("cocos2d: WARNING: Unsupported PVR Pixel Format: 0x%016llX. Re-encode it with a OpenGL pixel format variant",
              static_cast<unsigned long long>(pixelFormat));
        return false;
    }

    auto it = Texture2D::getPixelFormatInfoMap().find(v3_pixel_formathash.at(pixelFormat));

    if (it == Texture2D::getPixelFormatInfoMap().end())
    {
        CCLOG("cocos2d: WARNING: Unsupported PVR Pixel Format: 0x%016llX. Re-encode it with a OpenGL pixel format variant",
              static_cast<unsigned long long>(pixelFormat));
        return false;
    }

    _renderFormat = it->first;
    
    // flags
	int flags = CC_SWAP_INT32_LITTLE_TO_HOST(header->flags);

    // PVRv3 指定预乘 alpha 标志 -- 应该始终遵循 PVRv3 files
    if (flags & (unsigned int)PVR3TextureFlag::PremultipliedAlpha)
    {
        _preMulti = true;
    }
    
	// sizing
	int width = CC_SWAP_INT32_LITTLE_TO_HOST(header->width);
	int height = CC_SWAP_INT32_LITTLE_TO_HOST(header->height);
	_width = width;
	_height = height;
	int dataOffset = 0, dataSize = 0;
	int blockSize = 0, widthBlocks = 0, heightBlocks = 0;
	
    _dataLen = dataLen - (sizeof(PVRv3TexHeader) + header->metadataLength);
    _data = new unsigned char[_dataLen];
    memcpy(_data, static_cast<const unsigned char*>(data) + sizeof(PVRv3TexHeader) + header->metadataLength, _dataLen);
	
	_numberOfMipmaps = header->numberOfMipmaps;
	CCAssert(_numberOfMipmaps < MIPMAP_MAX, "Image: Maximum number of mimpaps reached. Increate the CC_MIPMAP_MAX value");
    
	for (int i = 0; i < _numberOfMipmaps; i++)
    {
		switch ((PVR3TexturePixelFormat)pixelFormat)
        {
			case PVR3TexturePixelFormat::PVRTC2BPP_RGB :
			case PVR3TexturePixelFormat::PVRTC2BPP_RGBA :
				blockSize = 8 * 4; // Pixel by pixel block size for 2bpp
				widthBlocks = width / 8;
				heightBlocks = height / 4;
				break;
			case PVR3TexturePixelFormat::PVRTC4BPP_RGB :
			case PVR3TexturePixelFormat::PVRTC4BPP_RGBA :
				blockSize = 4 * 4; // Pixel by pixel block size for 4bpp
				widthBlocks = width / 4;
				heightBlocks = height / 4;
				break;
			case PVR3TexturePixelFormat::BGRA8888:
				if( ! Configuration::getInstance()->supportsBGRA8888())
                {
					CCLOG("cocos2d: Image. BGRA8888 not supported on this device");
					return false;
				}
			default:
				blockSize = 1;
				widthBlocks = width;
				heightBlocks = height;
				break;
		}
        
		// Clamp to minimum number of blocks
		if (widthBlocks < 2)
        {
			widthBlocks = 2;
        }
		if (heightBlocks < 2)
        {
			heightBlocks = 2;
        }
		
		dataSize = widthBlocks * heightBlocks * ((blockSize  * it->second.bpp) / 8);
		int packetLength = _dataLen - dataOffset;
		packetLength = packetLength > dataSize ? dataSize : packetLength;
		
		_mipmaps[i].address = _data + dataOffset;
		_mipmaps[i].len = packetLength;
		
		dataOffset += packetLength;
		CCAssert(dataOffset <= _dataLen, "CCTexurePVR: Invalid lenght");
		
		
		width = MAX(width >> 1, 1);
		height = MAX(height >> 1, 1);
	}
	
	return true;
}

bool Image::initWithETCData(const unsigned char * data, int dataLen)
{
    const etc1_byte* header = static_cast<const etc1_byte*>(data);
    
    //check the data
    if(!etc1_pkm_is_valid(header))
    {
        return  false;
    }

    _width = etc1_pkm_get_width(header);
    _height = etc1_pkm_get_height(header);

    if( 0 == _width || 0 == _height )
    {
        return false;
    }

    if(Configuration::getInstance()->supportsETC())
    {
        //老OpenGL版本没有定义 GL_ETC1_RGB8_OES, 添加宏让他们可以正常编译. 
#ifdef GL_ETC1_RGB8_OES
        _renderFormat = Texture2D::PixelFormat::ETC;
        _dataLen = dataLen - ETC_PKM_HEADER_SIZE;
        _data = new unsigned char[_dataLen];
        memcpy(_data, static_cast<const unsigned char*>(data) + ETC_PKM_HEADER_SIZE, _dataLen);
        return true;
#endif
    }
    else
    {
         //如果它不是GLES或移动设备不支持ATITC的,由软件解码texture
        int bytePerPixel = 3;
        unsigned int stride = _width * bytePerPixel;
        _renderFormat = Texture2D::PixelFormat::RGB888;
        
        _dataLen =  _width * _height * bytePerPixel;
        _data = new unsigned char[_dataLen];
        
        if (etc1_decode_image(static_cast<const unsigned char*>(data) + ETC_PKM_HEADER_SIZE, static_cast<etc1_byte*>(_data), _width, _height, bytePerPixel, stride) != 0)
        {
            _dataLen = 0;
            CC_SAFE_DELETE_ARRAY(_data);
            return false;
        }
        
        return true;
    }
    return false;
}

namespace
{
    static const uint32_t makeFourCC(char ch0, char ch1, char ch2, char ch3)
    {
        const uint32_t fourCC = ((uint32_t)(char)(ch0) | ((uint32_t)(char)(ch1) << 8) | ((uint32_t)(char)(ch2) << 16) | ((uint32_t)(char)(ch3) << 24 ));
        return fourCC;
    }
}

bool Image::initWithS3TCData(const unsigned char * data, int dataLen)
{
    
    const uint32_t FOURCC_DXT1 = makeFourCC('D', 'X', 'T', '1');
    const uint32_t FOURCC_DXT3 = makeFourCC('D', 'X', 'T', '3');
    const uint32_t FOURCC_DXT5 = makeFourCC('D', 'X', 'T', '5');
    
    /* load the .dds file */
    
    S3TCTexHeader *header = (S3TCTexHeader *)data;
    unsigned char *pixelData = new unsigned char [dataLen - sizeof(S3TCTexHeader)];
    memcpy((void *)pixelData, data + sizeof(S3TCTexHeader), dataLen - sizeof(S3TCTexHeader));
    
    _width = header->ddsd.width;
    _height = header->ddsd.height;
    _numberOfMipmaps = header->ddsd.DUMMYUNIONNAMEN2.mipMapCount;
    _dataLen = 0;
    int blockSize = (FOURCC_DXT1 == header->ddsd.DUMMYUNIONNAMEN4.ddpfPixelFormat.fourCC) ? 8 : 16;
    
    /* caculate the dataLen */
    
    int width = _width;
    int height = _height;
    
    if (Configuration::getInstance()->supportsS3TC())  //compressed data length
    {
        _dataLen = dataLen - sizeof(S3TCTexHeader);
        _data = new unsigned char [_dataLen];
        memcpy((void *)_data,(void *)pixelData , _dataLen);
    }
    else                                               //decompressed data length
    {
        for (int i = 0; i < _numberOfMipmaps && (width || height); ++i)
        {
            if (width == 0) width = 1;
            if (height == 0) height = 1;
            
            _dataLen += (height * width *4);

            width >>= 1;
            height >>= 1;
        }
        _data = new unsigned char [_dataLen];
    }
    
    /* load the mipmaps */
    
    int encodeOffset = 0;
    int decodeOffset = 0;
    width = _width;  height = _height;
    
    for (int i = 0; i < _numberOfMipmaps && (width || height); ++i)  
    {
        if (width == 0) width = 1;
        if (height == 0) height = 1;
        
        int size = ((width+3)/4)*((height+3)/4)*blockSize;
                
        if (Configuration::getInstance()->supportsS3TC())
        {   //decode texture throught hardware
            
            CCLOG("this is s3tc H decode");
            
            if (FOURCC_DXT1 == header->ddsd.DUMMYUNIONNAMEN4.ddpfPixelFormat.fourCC)
            {
                _renderFormat = Texture2D::PixelFormat::S3TC_DXT1;
            }
            else if (FOURCC_DXT3 == header->ddsd.DUMMYUNIONNAMEN4.ddpfPixelFormat.fourCC)
            {
                _renderFormat = Texture2D::PixelFormat::S3TC_DXT3;
            }
            else if (FOURCC_DXT5 == header->ddsd.DUMMYUNIONNAMEN4.ddpfPixelFormat.fourCC)
            {
                _renderFormat = Texture2D::PixelFormat::S3TC_DXT5;
            }

            _mipmaps[i].address = (unsigned char *)_data + encodeOffset;
            _mipmaps[i].len = size;
        }
        else
        {   //如果它不是GLES或移动设备不支持ATITC的,由软件解码texture
            int bytePerPixel = 4;
            unsigned int stride = width * bytePerPixel;
            _renderFormat = Texture2D::PixelFormat::RGBA8888;

            std::vector<unsigned char> decodeImageData(stride * height);
            if (FOURCC_DXT1 == header->ddsd.DUMMYUNIONNAMEN4.ddpfPixelFormat.fourCC)
            {
                s3tc_decode(pixelData + encodeOffset, &decodeImageData[0], width, height, S3TCDecodeFlag::DXT1);
            }
            else if (FOURCC_DXT3 == header->ddsd.DUMMYUNIONNAMEN4.ddpfPixelFormat.fourCC)
            {
                s3tc_decode(pixelData + encodeOffset, &decodeImageData[0], width, height, S3TCDecodeFlag::DXT3);
            }
            else if (FOURCC_DXT5 == header->ddsd.DUMMYUNIONNAMEN4.ddpfPixelFormat.fourCC)
            {
                s3tc_decode(pixelData + encodeOffset, &decodeImageData[0], width, height, S3TCDecodeFlag::DXT5);
            }
            
            _mipmaps[i].address = (unsigned char *)_data + decodeOffset;
            _mipmaps[i].len = (stride * height);
            memcpy((void *)_mipmaps[i].address, (void *)&decodeImageData[0], _mipmaps[i].len);
            decodeOffset += stride * height;
        }
        
        encodeOffset += size;
        width >>= 1;
        height >>= 1;
    }
    
    /* end load the mipmaps */
    
    CC_SAFE_DELETE_ARRAY(pixelData);
    
    return true;
}


bool Image::initWithATITCData(const unsigned char *data, int dataLen)
{
    /* load the .ktx file */
    ATITCTexHeader *header = (ATITCTexHeader *)data;
    _width =  header->pixelWidth;
    _height = header->pixelHeight;
    _numberOfMipmaps = header->numberOfMipmapLevels;
    
    int blockSize = 0;
    switch (header->glInternalFormat)
    {
        case CC_GL_ATC_RGB_AMD:
            blockSize = 8;
            break;
        case CC_GL_ATC_RGBA_EXPLICIT_ALPHA_AMD:
            blockSize = 16;
            break;
        case CC_GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD:
            blockSize = 16;
            break;
        default:
            break;
    }
    
    /* pixelData point to the compressed data address */        //pixelData指向的压缩的数据地址
    unsigned char *pixelData = (unsigned char *)data + sizeof(ATITCTexHeader) + header->bytesOfKeyValueData + 4;
    
    /* caculate the dataLen */
    int width = _width;
    int height = _height;
    
    if (Configuration::getInstance()->supportsATITC())  //compressed data length
    {
        _dataLen = dataLen - sizeof(ATITCTexHeader) - header->bytesOfKeyValueData - 4;
        _data = new unsigned char [_dataLen];
        memcpy((void *)_data,(void *)pixelData , _dataLen);
    }
    else                                               //decompressed data length
    {
        for (int i = 0; i < _numberOfMipmaps && (width || height); ++i)
        {
            if (width == 0) width = 1;
            if (height == 0) height = 1;
            
            _dataLen += (height * width *4);
            
            width >>= 1;
            height >>= 1;
        }
        _data = new unsigned char [_dataLen];
    }
    
    /* load the mipmaps */
    int encodeOffset = 0;
    int decodeOffset = 0;
    width = _width;  height = _height;
    
    for (int i = 0; i < _numberOfMipmaps && (width || height); ++i)
    {
        if (width == 0) width = 1;
        if (height == 0) height = 1;
        
        int size = ((width+3)/4)*((height+3)/4)*blockSize;
        
        if (Configuration::getInstance()->supportsATITC())
        {
            /* decode texture throught hardware */
            
            CCLOG("this is atitc H decode");
            
            switch (header->glInternalFormat)
            {
                case CC_GL_ATC_RGB_AMD:
                    _renderFormat = Texture2D::PixelFormat::ATC_RGB;
                    break;
                case CC_GL_ATC_RGBA_EXPLICIT_ALPHA_AMD:
                    _renderFormat = Texture2D::PixelFormat::ATC_EXPLICIT_ALPHA;
                    break;
                case CC_GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD:
                    _renderFormat = Texture2D::PixelFormat::ATC_INTERPOLATED_ALPHA;
                    break;
                default:
                    break;
            }
            
            _mipmaps[i].address = (unsigned char *)_data + encodeOffset;
            _mipmaps[i].len = size;
        }
        else
        {
            /* 如果它不是GLES或移动设备不支持ATITC的,由软件解码texture */
            
            int bytePerPixel = 4;
            unsigned int stride = width * bytePerPixel;
            _renderFormat = Texture2D::PixelFormat::RGBA8888;
            
            std::vector<unsigned char> decodeImageData(stride * height);
            switch (header->glInternalFormat)
            {
                case CC_GL_ATC_RGB_AMD:
                    atitc_decode(pixelData + encodeOffset, &decodeImageData[0], width, height, ATITCDecodeFlag::ATC_RGB);
                    break;
                case CC_GL_ATC_RGBA_EXPLICIT_ALPHA_AMD:
                    atitc_decode(pixelData + encodeOffset, &decodeImageData[0], width, height, ATITCDecodeFlag::ATC_EXPLICIT_ALPHA);
                    break;
                case CC_GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD:
                    atitc_decode(pixelData + encodeOffset, &decodeImageData[0], width, height, ATITCDecodeFlag::ATC_INTERPOLATED_ALPHA);
                    break;
                default:
                    break;
            }

            _mipmaps[i].address = (unsigned char *)_data + decodeOffset;
            _mipmaps[i].len = (stride * height);
            memcpy((void *)_mipmaps[i].address, (void *)&decodeImageData[0], _mipmaps[i].len);
            decodeOffset += stride * height;
        }

        encodeOffset += (size + 4);
        width >>= 1;
        height >>= 1;
    }
    /* end load the mipmaps */
    
    return true;
}

bool Image::initWithPVRData(const unsigned char * data, int dataLen)
{
    return initWithPVRv2Data(data, dataLen) || initWithPVRv3Data(data, dataLen);
}

bool Image::initWithWebpData(const unsigned char * data, int dataLen)
{
	bool bRet = false;
	do
	{
        WebPDecoderConfig config;
        if (WebPInitDecoderConfig(&config) == 0) break;
        if (WebPGetFeatures(static_cast<const uint8_t*>(data), dataLen, &config.input) != VP8_STATUS_OK) break;
        if (config.input.width == 0 || config.input.height == 0) break;
        
        config.output.colorspace = MODE_RGBA;
        _renderFormat = Texture2D::PixelFormat::RGBA8888;
        _width    = config.input.width;
        _height   = config.input.height;
        
        int bufferSize = _width * _height * 4;
        _data = new unsigned char[bufferSize];
        
        config.output.u.RGBA.rgba = static_cast<uint8_t*>(_data);
        config.output.u.RGBA.stride = _width * 4;
        config.output.u.RGBA.size = bufferSize;
        config.output.is_external_memory = 1;
        
        if (WebPDecode(static_cast<const uint8_t*>(data), dataLen, &config) != VP8_STATUS_OK)
        {
            delete []_data;
            _data = NULL;
            break;
        }
        
        bRet = true;
	} while (0);
	return bRet;
}

bool Image::initWithRawData(const unsigned char * data, int dataLen, int width, int height, int bitsPerComponent, bool preMulti)
{
    bool bRet = false;
    do 
    {
        CC_BREAK_IF(0 == width || 0 == height);

        _height   = height;
        _width    = width;
        _preMulti = preMulti;
        _renderFormat = Texture2D::PixelFormat::RGBA8888;

        // only RGBA8888 supported
        int bytesPerComponent = 4;
        _dataLen = height * width * bytesPerComponent;
        _data = new unsigned char[_dataLen];
        CC_BREAK_IF(! _data);
        memcpy(_data, data, _dataLen);

        bRet = true;
    } while (0);

    return bRet;
}


#if (CC_TARGET_PLATFORM != CC_PLATFORM_IOS)
bool Image::saveToFile(const char *pszFilePath, bool bIsToRGB)
{
    //only support for Texture2D::PixelFormat::RGB888 or Texture2D::PixelFormat::RGBA8888 uncompressed data
    if (isCompressed() || (_renderFormat != Texture2D::PixelFormat::RGB888 && _renderFormat != Texture2D::PixelFormat::RGBA8888))
    {
        CCLOG("cocos2d: Image: saveToFile is only support for Texture2D::PixelFormat::RGB888 or Texture2D::PixelFormat::RGBA8888 uncompressed data for now");
        return false;
    }

#if (CC_TARGET_PLATFORM == CC_PLATFORM_MAC)
    assert(false);
    return false;
#else
    bool bRet = false;

    do 
    {
        CC_BREAK_IF(NULL == pszFilePath);

        std::string strFilePath(pszFilePath);
        CC_BREAK_IF(strFilePath.size() <= 4);

        std::string strLowerCasePath(strFilePath);
        for (unsigned int i = 0; i < strLowerCasePath.length(); ++i)
        {
            strLowerCasePath[i] = tolower(strFilePath[i]);
        }

        if (std::string::npos != strLowerCasePath.find(".png"))
        {
            CC_BREAK_IF(!saveImageToPNG(pszFilePath, bIsToRGB));
        }
        else if (std::string::npos != strLowerCasePath.find(".jpg"))
        {
            CC_BREAK_IF(!saveImageToJPG(pszFilePath));
        }
        else
        {
            break;
        }

        bRet = true;
    } while (0);

    return bRet;
#endif
}
#endif

bool Image::saveImageToPNG(const char * filePath, bool isToRGB)
{
    bool bRet = false;
    do 
    {
        CC_BREAK_IF(NULL == filePath);

        FILE *fp;
        png_structp png_ptr;
        png_infop info_ptr;
        png_colorp palette;
        png_bytep *row_pointers;

        fp = fopen(filePath, "wb");
        CC_BREAK_IF(NULL == fp);

        png_ptr = png_create_write_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);

        if (NULL == png_ptr)
        {
            fclose(fp);
            break;
        }

        info_ptr = png_create_info_struct(png_ptr);
        if (NULL == info_ptr)
        {
            fclose(fp);
            png_destroy_write_struct(&png_ptr, NULL);
            break;
        }
#if (CC_TARGET_PLATFORM != CC_PLATFORM_BADA && CC_TARGET_PLATFORM != CC_PLATFORM_NACL)
        if (setjmp(png_jmpbuf(png_ptr)))
        {
            fclose(fp);
            png_destroy_write_struct(&png_ptr, &info_ptr);
            break;
        }
#endif
        png_init_io(png_ptr, fp);

        if (!isToRGB && hasAlpha())
        {
            png_set_IHDR(png_ptr, info_ptr, _width, _height, 8, PNG_COLOR_TYPE_RGB_ALPHA,
                PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_BASE, PNG_FILTER_TYPE_BASE);
        } 
        else
        {
            png_set_IHDR(png_ptr, info_ptr, _width, _height, 8, PNG_COLOR_TYPE_RGB,
                PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_BASE, PNG_FILTER_TYPE_BASE);
        }

        palette = (png_colorp)png_malloc(png_ptr, PNG_MAX_PALETTE_LENGTH * sizeof (png_color));
        png_set_PLTE(png_ptr, info_ptr, palette, PNG_MAX_PALETTE_LENGTH);

        png_write_info(png_ptr, info_ptr);

        png_set_packing(png_ptr);

        row_pointers = (png_bytep *)malloc(_height * sizeof(png_bytep));
        if(row_pointers == NULL)
        {
            fclose(fp);
            png_destroy_write_struct(&png_ptr, &info_ptr);
            break;
        }

        if (hasAlpha())
        {
            for (int i = 0; i < (int)_height; i++)
            {
                row_pointers[i] = (png_bytep)_data + i * _width * 3;
            }

            png_write_image(png_ptr, row_pointers);

            free(row_pointers);
            row_pointers = NULL;
        }
        else
        {
            if (isToRGB)
            {
                unsigned char *pTempData = new unsigned char[_width * _height * 3];
                if (NULL == pTempData)
                {
                    fclose(fp);
                    png_destroy_write_struct(&png_ptr, &info_ptr);
                    break;
                }

                for (int i = 0; i < _height; ++i)
                {
                    for (int j = 0; j < _width; ++j)
                    {
                        pTempData[(i * _width + j) * 3] = _data[(i * _width + j) * 4];
                        pTempData[(i * _width + j) * 3 + 1] = _data[(i * _width + j) * 4 + 1];
                        pTempData[(i * _width + j) * 3 + 2] = _data[(i * _width + j) * 4 + 2];
                    }
                }

                for (int i = 0; i < (int)_height; i++)
                {
                    row_pointers[i] = (png_bytep)pTempData + i * _width * 3;
                }

                png_write_image(png_ptr, row_pointers);

                free(row_pointers);
                row_pointers = NULL;

                CC_SAFE_DELETE_ARRAY(pTempData);
            } 
            else
            {
                for (int i = 0; i < (int)_height; i++)
                {
                    row_pointers[i] = (png_bytep)_data + i * _width * 4;
                }

                png_write_image(png_ptr, row_pointers);

                free(row_pointers);
                row_pointers = NULL;
            }
        }

        png_write_end(png_ptr, info_ptr);

        png_free(png_ptr, palette);
        palette = NULL;

        png_destroy_write_struct(&png_ptr, &info_ptr);

        fclose(fp);

        bRet = true;
    } while (0);
    return bRet;
}
bool Image::saveImageToJPG(const char * filePath)
{
    bool bRet = false;
    do 
    {
        CC_BREAK_IF(NULL == filePath);

        struct jpeg_compress_struct cinfo;
        struct jpeg_error_mgr jerr;
        FILE * outfile;                 /* 目标文件 */
        JSAMPROW row_pointer[1];        /* pointer to JSAMPLE row[s] */
        int     row_stride;          /* 图像缓冲区物理行宽度*/

        cinfo.err = jpeg_std_error(&jerr);
        /*现在,我们可以初始化JPEG压缩对象. */
        jpeg_create_compress(&cinfo);

        CC_BREAK_IF((outfile = fopen(filePath, "wb")) == NULL);
        
        jpeg_stdio_dest(&cinfo, outfile);

        cinfo.image_width = _width;    /* 图像的宽度和高度,以像素为单位 */
        cinfo.image_height = _height;
        cinfo.input_components = 3;       /* # 每个像素的颜色分量 */
        cinfo.in_color_space = JCS_RGB;       /* 输入图像的色彩空间 */

        jpeg_set_defaults(&cinfo);

        jpeg_start_compress(&cinfo, TRUE);

        row_stride = _width * 3; /* image_buffer 每行都是一个 JSAMPLEs */

        if (hasAlpha())
        {
            unsigned char *pTempData = new unsigned char[_width * _height * 3];
            if (NULL == pTempData)
            {
                jpeg_finish_compress(&cinfo);
                jpeg_destroy_compress(&cinfo);
                fclose(outfile);
                break;
            }

            for (int i = 0; i < _height; ++i)
            {
                for (int j = 0; j < _width; ++j)

                {
                    pTempData[(i * _width + j) * 3] = _data[(i * _width + j) * 4];
                    pTempData[(i * _width + j) * 3 + 1] = _data[(i * _width + j) * 4 + 1];
                    pTempData[(i * _width + j) * 3 + 2] = _data[(i * _width + j) * 4 + 2];
                }
            }

            while (cinfo.next_scanline < cinfo.image_height) {
                row_pointer[0] = & pTempData[cinfo.next_scanline * row_stride];
                (void) jpeg_write_scanlines(&cinfo, row_pointer, 1);
            }

            CC_SAFE_DELETE_ARRAY(pTempData);
        } 
        else
        {
            while (cinfo.next_scanline < cinfo.image_height) {
                row_pointer[0] = & _data[cinfo.next_scanline * row_stride];
                (void) jpeg_write_scanlines(&cinfo, row_pointer, 1);
            }
        }

        jpeg_finish_compress(&cinfo);
        fclose(outfile);
        jpeg_destroy_compress(&cinfo);
        
        bRet = true;
    } while (0);
    return bRet;
}

NS_CC_END