Allow SkLazyPixelRef to use SkScaledImageCache

-   SkScaledImageCache:
    -   Add new FindAndLock/AddAndLock variants that work well with
        SkLazyPixelRefs (take width, height, generation_id).
    -   Add static versions of these new variants.

-   SkLazyPixelRef:
    -   If NULL passed in as SkImageCache* in the constructor, it will
        now default to using the static SkScaledImageCache methods to
        cache decoded images.
    -   If (fImageCache==NULL), the default allocator can be changed
        with the setAllocator method.  If (fImageCache!=NULL), the
        SkImageCache handles allocation.

-   CachedDecodingPixelRefTest to test the new functionality.

BUG=
R=scroggo@google.com, mtklein@google.com, reed@google.com

Author: halcanary@google.com

Review URL: https://codereview.chromium.org/37343002

git-svn-id: http://skia.googlecode.com/svn/trunk/src@12006 2bbb7eff-a529-9590-31e7-b0007b416f81
diff --git a/core/SkScaledImageCache.cpp b/core/SkScaledImageCache.cpp
index 11a0ee4..644ce7f 100644
--- a/core/SkScaledImageCache.cpp
+++ b/core/SkScaledImageCache.cpp
@@ -7,6 +7,7 @@
 
 #include "SkScaledImageCache.h"
 #include "SkMipMap.h"
+#include "SkOnce.h"
 #include "SkPixelRef.h"
 #include "SkRect.h"
 
@@ -14,6 +15,13 @@
     #define SK_DEFAULT_IMAGE_CACHE_LIMIT     (2 * 1024 * 1024)
 #endif
 
+static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) {
+    return reinterpret_cast<SkScaledImageCache::ID*>(rec);
+}
+
+static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) {
+    return reinterpret_cast<SkScaledImageCache::Rec*>(id);
+}
 
  // Implemented from en.wikipedia.org/wiki/MurmurHash.
 static uint32_t compute_hash(const uint32_t data[], int count) {
@@ -42,23 +50,15 @@
 }
 
 struct Key {
-    bool init(const SkBitmap& bm, SkScalar scaleX, SkScalar scaleY) {
-        SkPixelRef* pr = bm.pixelRef();
-        if (!pr) {
-            return false;
-        }
-
-        size_t x, y;
-        SkTDivMod(bm.pixelRefOffset(), bm.rowBytes(), &y, &x);
-        x >>= 2;
-
-        fGenID = pr->getGenerationID();
-        fBounds.set(x, y, x + bm.width(), y + bm.height());
-        fScaleX = scaleX;
-        fScaleY = scaleY;
-
+    Key(uint32_t genID,
+        SkScalar scaleX,
+        SkScalar scaleY,
+        SkIRect  bounds)
+        : fGenID(genID)
+        , fScaleX(scaleX)
+        , fScaleY(scaleY)
+        , fBounds(bounds) {
         fHash = compute_hash(&fGenID, 7);
-        return true;
     }
 
     bool operator<(const Key& other) const {
@@ -151,6 +151,17 @@
 // experimental hash to speed things up
 #define USE_HASH
 
+#if !defined(USE_HASH)
+static inline SkScaledImageCache::Rec* find_rec_in_list(
+        SkScaledImageCache::Rec* head, const Key & key) {
+    SkScaledImageCache::Rec* rec = head;
+    while ((rec != NULL) && (rec->fKey != key)) {
+        rec = rec->fNext;
+    }
+    return rec;
+}
+#endif
+
 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) {
     fHead = NULL;
     fTail = NULL;
@@ -174,26 +185,24 @@
     delete fHash;
 }
 
-SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkBitmap& orig,
+////////////////////////////////////////////////////////////////////////////////
+
+/**
+   This private method is the fully general record finder. All other
+   record finders should call this funtion. */
+SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID,
                                                         SkScalar scaleX,
-                                                        SkScalar scaleY) {
-    Key key;
-    if (!key.init(orig, scaleX, scaleY)) {
+                                                        SkScalar scaleY,
+                                                        const SkIRect& bounds) {
+    if (bounds.isEmpty()) {
         return NULL;
     }
-
+    Key key(genID, scaleX, scaleY, bounds);
 #ifdef USE_HASH
     Rec* rec = fHash->find(key);
 #else
-    Rec* rec = fHead;
-    while (rec != NULL) {
-        if (rec->fKey == key) {
-            break;
-        }
-        rec = rec->fNext;
-    }
+    Rec* rec = find_rec_in_list(fHead, key);
 #endif
-
     if (rec) {
         this->moveToHead(rec);  // for our LRU
         rec->fLockCount += 1;
@@ -201,6 +210,36 @@
     return rec;
 }
 
+/**
+   This function finds the bounds of the bitmap *within its pixelRef*.
+   If the bitmap lacks a pixelRef, it will return an empty rect, since
+   that doesn't make sense.  This may be a useful enough function that
+   it should be somewhere else (in SkBitmap?). */
+static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) {
+    if (!(bm.pixelRef())) {
+        return SkIRect::MakeEmpty();
+    }
+    size_t x, y;
+    SkTDivMod(bm.pixelRefOffset(), bm.rowBytes(), &y, &x);
+    x >>= bm.shiftPerPixel();
+    return SkIRect::MakeXYWH(x, y, bm.width(), bm.height());
+}
+
+
+SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID,
+                                                        int32_t width,
+                                                        int32_t height,
+                                                        SkBitmap* bitmap) {
+    Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1,
+                                 SkIRect::MakeWH(width, height));
+    if (rec) {
+        SkASSERT(NULL == rec->fMip);
+        SkASSERT(rec->fBitmap.pixelRef());
+        *bitmap = rec->fBitmap;
+    }
+    return rec_to_id(rec);
+}
+
 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig,
                                                         SkScalar scaleX,
                                                         SkScalar scaleY,
@@ -209,25 +248,53 @@
         // degenerate, and the key we use for mipmaps
         return NULL;
     }
-
-    Rec* rec = this->findAndLock(orig, scaleX, scaleY);
+    Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX,
+                                 scaleY, get_bounds_from_bitmap(orig));
     if (rec) {
         SkASSERT(NULL == rec->fMip);
         SkASSERT(rec->fBitmap.pixelRef());
         *scaled = rec->fBitmap;
     }
-    return (ID*)rec;
+    return rec_to_id(rec);
 }
 
 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig,
                                                            SkMipMap const ** mip) {
-    Rec* rec = this->findAndLock(orig, 0, 0);
+    Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0, 
+                                 get_bounds_from_bitmap(orig));
     if (rec) {
         SkASSERT(rec->fMip);
         SkASSERT(NULL == rec->fBitmap.pixelRef());
         *mip = rec->fMip;
     }
-    return (ID*)rec;
+    return rec_to_id(rec);
+}
+
+
+////////////////////////////////////////////////////////////////////////////////
+/**
+   This private method is the fully general record adder. All other
+   record adders should call this funtion. */
+void SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) {
+    SkASSERT(rec);
+    this->addToHead(rec);
+    SkASSERT(1 == rec->fLockCount);
+#ifdef USE_HASH
+    SkASSERT(fHash);
+    fHash->add(rec);
+#endif
+    // We may (now) be overbudget, so see if we need to purge something.
+    this->purgeAsNeeded();
+}
+
+SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID,
+                                                       int32_t width,
+                                                       int32_t height,
+                                                       const SkBitmap& bitmap) {
+    Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height));
+    Rec* rec = SkNEW_ARGS(Rec, (key, bitmap));
+    this->addAndLock(rec);
+    return rec_to_id(rec);
 }
 
 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig,
@@ -238,43 +305,26 @@
         // degenerate, and the key we use for mipmaps
         return NULL;
     }
-
-    Key key;
-    if (!key.init(orig, scaleX, scaleY)) {
+    SkIRect bounds = get_bounds_from_bitmap(orig);
+    if (bounds.isEmpty()) {
         return NULL;
     }
-
+    Key key(orig.getGenerationID(), scaleX, scaleY, bounds);
     Rec* rec = SkNEW_ARGS(Rec, (key, scaled));
-    this->addToHead(rec);
-    SkASSERT(1 == rec->fLockCount);
-
-#ifdef USE_HASH
-    fHash->add(rec);
-#endif
-
-    // We may (now) be overbudget, so see if we need to purge something.
-    this->purgeAsNeeded();
-    return (ID*)rec;
+    this->addAndLock(rec);
+    return rec_to_id(rec);
 }
 
 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig,
                                                           const SkMipMap* mip) {
-    Key key;
-    if (!key.init(orig, 0, 0)) {
+    SkIRect bounds = get_bounds_from_bitmap(orig);
+    if (bounds.isEmpty()) {
         return NULL;
     }
-
+    Key key(orig.getGenerationID(), 0, 0, bounds);
     Rec* rec = SkNEW_ARGS(Rec, (key, mip));
-    this->addToHead(rec);
-    SkASSERT(1 == rec->fLockCount);
-
-#ifdef USE_HASH
-    fHash->add(rec);
-#endif
-
-    // We may (now) be overbudget, so see if we need to purge something.
-    this->purgeAsNeeded();
-    return (ID*)rec;
+    this->addAndLock(rec);
+    return rec_to_id(rec);
 }
 
 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
@@ -285,7 +335,7 @@
         bool found = false;
         Rec* rec = fHead;
         while (rec != NULL) {
-            if ((ID*)rec == id) {
+            if (rec == id_to_rec(id)) {
                 found = true;
                 break;
             }
@@ -294,7 +344,7 @@
         SkASSERT(found);
     }
 #endif
-    Rec* rec = (Rec*)id;
+    Rec* rec = id_to_rec(id);
     SkASSERT(rec->fLockCount > 0);
     rec->fLockCount -= 1;
 
@@ -451,14 +501,38 @@
 
 SK_DECLARE_STATIC_MUTEX(gMutex);
 
+static void create_cache(SkScaledImageCache** cache) {
+    *cache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT));
+}
+
 static SkScaledImageCache* get_cache() {
-    static SkScaledImageCache* gCache;
-    if (!gCache) {
-        gCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT));
-    }
+    static SkScaledImageCache* gCache(NULL);
+    SK_DECLARE_STATIC_ONCE(create_cache_once);
+    SkOnce<SkScaledImageCache**>(&create_cache_once, create_cache, &gCache);
+    SkASSERT(NULL != gCache);
     return gCache;
 }
 
+
+SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(
+                                uint32_t pixelGenerationID,
+                                int32_t width,
+                                int32_t height,
+                                SkBitmap* scaled) {
+    SkAutoMutexAcquire am(gMutex);
+    return get_cache()->findAndLock(pixelGenerationID, width, height, scaled);
+}
+
+SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(
+                               uint32_t pixelGenerationID,
+                               int32_t width,
+                               int32_t height,
+                               const SkBitmap& scaled) {
+    SkAutoMutexAcquire am(gMutex);
+    return get_cache()->addAndLock(pixelGenerationID, width, height, scaled);
+}
+
+
 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig,
                                                         SkScalar scaleX,
                                                         SkScalar scaleY,
diff --git a/core/SkScaledImageCache.h b/core/SkScaledImageCache.h
index 32474b7..fee69d2 100644
--- a/core/SkScaledImageCache.h
+++ b/core/SkScaledImageCache.h
@@ -31,13 +31,25 @@
      *  instance of this cache.
      */
 
+    static ID* FindAndLock(uint32_t pixelGenerationID,
+                           int32_t width,
+                           int32_t height,
+                           SkBitmap* returnedBitmap);
+
     static ID* FindAndLock(const SkBitmap& original, SkScalar scaleX,
-                           SkScalar scaleY, SkBitmap* scaled);
-    static ID* FindAndLockMip(const SkBitmap& original, SkMipMap const**);
+                           SkScalar scaleY, SkBitmap* returnedBitmap);
+    static ID* FindAndLockMip(const SkBitmap& original,
+                              SkMipMap const** returnedMipMap);
+
+
+    static ID* AddAndLock(uint32_t pixelGenerationID,
+                          int32_t width,
+                          int32_t height,
+                          const SkBitmap& bitmap);
 
     static ID* AddAndLock(const SkBitmap& original, SkScalar scaleX,
-                          SkScalar scaleY, const SkBitmap& scaled);
-    static ID* AddAndLockMip(const SkBitmap& original, const SkMipMap*);
+                          SkScalar scaleY, const SkBitmap& bitmap);
+    static ID* AddAndLockMip(const SkBitmap& original, const SkMipMap* mipMap);
 
     static void Unlock(ID*);
 
@@ -51,24 +63,48 @@
     ~SkScaledImageCache();
 
     /**
-     *  Search the cache for a scaled version of original. If found, return it
-     *  in scaled, and return its ID pointer. Use the returned ptr to unlock
-     *  the cache when you are done using scaled.
+     *  Search the cache for a matching bitmap (using generationID,
+     *  width, and height as a search key). If found, return it in
+     *  returnedBitmap, and return its ID pointer. Use the returned
+     *  ptr to unlock the cache when you are done using
+     *  returnedBitmap.
      *
-     *  If a match is not found, scaled will be unmodifed, and NULL will be
-     *  returned.
+     *  If a match is not found, returnedBitmap will be unmodifed, and
+     *  NULL will be returned.
+     *
+     *  This is used if there is no scaling or subsetting, for example
+     *  by SkLazyPixelRef.
      */
-    ID* findAndLock(const SkBitmap& original, SkScalar scaleX,
-                    SkScalar scaleY, SkBitmap* scaled);
-    ID* findAndLockMip(const SkBitmap& original, SkMipMap const**);
+    ID* findAndLock(uint32_t pixelGenerationID, int32_t width, int32_t height,
+                    SkBitmap* returnedBitmap);
 
     /**
-     *  To add a new (scaled) bitmap to the cache, call AddAndLock. Use the
-     *  returned ptr to unlock the cache when you are done using scaled.
+     *  Search the cache for a scaled version of original. If found,
+     *  return it in returnedBitmap, and return its ID pointer. Use
+     *  the returned ptr to unlock the cache when you are done using
+     *  returnedBitmap.
+     *
+     *  If a match is not found, returnedBitmap will be unmodifed, and
+     *  NULL will be returned.
      */
+    ID* findAndLock(const SkBitmap& original, SkScalar scaleX,
+                    SkScalar scaleY, SkBitmap* returnedBitmap);
+    ID* findAndLockMip(const SkBitmap& original,
+                       SkMipMap const** returnedMipMap);
+
+    /**
+     *  To add a new bitmap (or mipMap) to the cache, call
+     *  AddAndLock. Use the returned ptr to unlock the cache when you
+     *  are done using scaled.
+     *
+     *  Use (generationID, width, and height) or (original, scaleX,
+     *  scaleY) or (original) as a search key
+     */
+    ID* addAndLock(uint32_t pixelGenerationID, int32_t width, int32_t height,
+                   const SkBitmap& bitmap);
     ID* addAndLock(const SkBitmap& original, SkScalar scaleX,
-                   SkScalar scaleY, const SkBitmap& scaled);
-    ID* addAndLockMip(const SkBitmap& original, const SkMipMap*);
+                   SkScalar scaleY, const SkBitmap& bitmap);
+    ID* addAndLockMip(const SkBitmap& original, const SkMipMap* mipMap);
 
     /**
      *  Given a non-null ID ptr returned by either findAndLock or addAndLock,
@@ -101,7 +137,9 @@
     size_t  fByteLimit;
     int     fCount;
 
-    Rec* findAndLock(const SkBitmap& original, SkScalar sx, SkScalar sy);
+    Rec* findAndLock(uint32_t generationID, SkScalar sx, SkScalar sy,
+                     const SkIRect& bounds);
+    void addAndLock(Rec* rec);
 
     void purgeAsNeeded();
 
@@ -115,5 +153,4 @@
     void validate() const {}
 #endif
 };
-
 #endif
diff --git a/lazy/SkLazyPixelRef.cpp b/lazy/SkLazyPixelRef.cpp
index 0454362..17bdff4 100644
--- a/lazy/SkLazyPixelRef.cpp
+++ b/lazy/SkLazyPixelRef.cpp
@@ -11,6 +11,7 @@
 #include "SkData.h"
 #include "SkImageCache.h"
 #include "SkImagePriv.h"
+#include "SkScaledImageCache.h"
 
 #if LAZY_CACHE_STATS
 #include "SkThread.h"
@@ -22,9 +23,9 @@
 SkLazyPixelRef::SkLazyPixelRef(SkData* data, SkBitmapFactory::DecodeProc proc, SkImageCache* cache)
     // Pass NULL for the Mutex so that the default (ring buffer) will be used.
     : INHERITED(NULL)
+    , fErrorInDecoding(false)
     , fDecodeProc(proc)
     , fImageCache(cache)
-    , fCacheId(SkImageCache::UNINITIALIZED_ID)
     , fRowBytes(0) {
     SkASSERT(fDecodeProc != NULL);
     if (NULL == data) {
@@ -35,8 +36,12 @@
         fData->ref();
         fErrorInDecoding = data->size() == 0;
     }
-    SkASSERT(cache != NULL);
-    cache->ref();
+    if (fImageCache != NULL) {
+        fImageCache->ref();
+        fCacheId = SkImageCache::UNINITIALIZED_ID;
+    } else {
+        fScaledCacheId = NULL;
+    }
 
     // mark as uninitialized -- all fields are -1
     memset(&fLazilyCachedInfo, 0xFF, sizeof(fLazilyCachedInfo));
@@ -48,6 +53,14 @@
 SkLazyPixelRef::~SkLazyPixelRef() {
     SkASSERT(fData != NULL);
     fData->unref();
+    if (NULL == fImageCache) {
+        if (fScaledCacheId != NULL) {
+            SkScaledImageCache::Unlock(fScaledCacheId);
+            // TODO(halcanary): SkScaledImageCache needs a
+            // throwAwayCache(id) method.
+        }
+        return;
+    }
     SkASSERT(fImageCache);
     if (fCacheId != SkImageCache::UNINITIALIZED_ID) {
         fImageCache->throwAwayCache(fCacheId);
@@ -79,10 +92,91 @@
     return &fLazilyCachedInfo;
 }
 
+/**
+   Returns bitmap->getPixels() on success; NULL on failure */
+static void* decode_into_bitmap(SkImage::Info* info,
+                                SkBitmapFactory::DecodeProc decodeProc,
+                                size_t* rowBytes,
+                                SkData* data,
+                                SkBitmap* bm) {
+    SkASSERT(info && decodeProc && rowBytes && data && bm);
+    if (!(bm->setConfig(SkImageInfoToBitmapConfig(*info), info->fWidth,
+                        info->fHeight, *rowBytes, info->fAlphaType)
+          && bm->allocPixels(NULL, NULL))) {
+        // Use the default allocator.  It may be necessary for the
+        // SkLazyPixelRef to have a allocator field which is passed
+        // into allocPixels().
+        return NULL;
+    }
+    SkBitmapFactory::Target target;
+    target.fAddr = bm->getPixels();
+    target.fRowBytes = bm->rowBytes();
+    *rowBytes = target.fRowBytes;
+    if (!decodeProc(data->data(), data->size(), info, &target)) {
+        return NULL;
+    }
+    return target.fAddr;
+}
+
+void* SkLazyPixelRef::lockScaledImageCachePixels() {
+    SkASSERT(!fErrorInDecoding);
+    SkASSERT(NULL == fImageCache);
+    SkBitmap bitmap;
+    const SkImage::Info* info = this->getCachedInfo();
+    if (info == NULL) {
+        return NULL;
+    }
+    // If this is the first time though, this is guaranteed to fail.
+    // Maybe we should have a flag that says "don't even bother looking"
+    fScaledCacheId = SkScaledImageCache::FindAndLock(this->getGenerationID(),
+                                                     info->fWidth,
+                                                     info->fHeight,
+                                                     &bitmap);
+    if (fScaledCacheId != NULL) {
+        SkAutoLockPixels autoLockPixels(bitmap);
+        void* pixels = bitmap.getPixels();
+        SkASSERT(NULL != pixels);
+        // At this point, the autoLockPixels will unlockPixels()
+        // to remove bitmap's lock on the pixels.  We will then
+        // destroy bitmap.  The *only* guarantee that this pointer
+        // remains valid is the guarantee made by
+        // SkScaledImageCache that it will not destroy the *other*
+        // bitmap (SkScaledImageCache::Rec.fBitmap) that holds a
+        // reference to the concrete PixelRef while this record is
+        // locked.
+        return pixels;
+    } else {
+        // Cache has been purged, must re-decode.
+        void* pixels = decode_into_bitmap(const_cast<SkImage::Info*>(info),
+                                          fDecodeProc, &fRowBytes, fData,
+                                          &bitmap);
+        if (NULL == pixels) {
+            fErrorInDecoding = true;
+            return NULL;
+        }
+        fScaledCacheId = SkScaledImageCache::AddAndLock(this->getGenerationID(),
+                                                        info->fWidth,
+                                                        info->fHeight,
+                                                        bitmap);
+        SkASSERT(fScaledCacheId != NULL);
+        return pixels;
+    }
+}
+
 void* SkLazyPixelRef::onLockPixels(SkColorTable**) {
     if (fErrorInDecoding) {
         return NULL;
     }
+    if (NULL == fImageCache) {
+        return this->lockScaledImageCachePixels();
+    } else {
+        return this->lockImageCachePixels();
+    }
+}
+
+void* SkLazyPixelRef::lockImageCachePixels() {
+    SkASSERT(fImageCache != NULL);
+    SkASSERT(!fErrorInDecoding);
     SkBitmapFactory::Target target;
     // Check to see if the pixels still exist in the cache.
     if (SkImageCache::UNINITIALIZED_ID == fCacheId) {
@@ -147,8 +241,19 @@
     if (fErrorInDecoding) {
         return;
     }
-    if (fCacheId != SkImageCache::UNINITIALIZED_ID) {
-        fImageCache->releaseCache(fCacheId);
+    if (NULL == fImageCache) {
+        // onUnlockPixels() should never be called a second time from
+        // PixelRef::Unlock() without calling onLockPixels() first.
+        SkASSERT(NULL != fScaledCacheId);
+        if (NULL != fScaledCacheId) {
+            SkScaledImageCache::Unlock(fScaledCacheId);
+            fScaledCacheId = NULL;
+        }
+    } else {  // use fImageCache
+        SkASSERT(SkImageCache::UNINITIALIZED_ID != fCacheId);
+        if (SkImageCache::UNINITIALIZED_ID != fCacheId) {
+            fImageCache->releaseCache(fCacheId);
+        }
     }
 }
 
@@ -157,8 +262,6 @@
     return fData;
 }
 
-#include "SkImagePriv.h"
-
 static bool init_from_info(SkBitmap* bm, const SkImage::Info& info,
                            size_t rowBytes) {
     SkBitmap::Config config = SkImageInfoToBitmapConfig(info);
@@ -206,3 +309,4 @@
     *bitmap = tmp;
     return true;
 }
+
diff --git a/lazy/SkLazyPixelRef.h b/lazy/SkLazyPixelRef.h
index 8f7a751..c7fbd7b 100644
--- a/lazy/SkLazyPixelRef.h
+++ b/lazy/SkLazyPixelRef.h
@@ -10,8 +10,10 @@
 
 #include "SkBitmapFactory.h"
 #include "SkImage.h"
+#include "SkImageCache.h"
 #include "SkPixelRef.h"
 #include "SkFlattenable.h"
+#include "SkScaledImageCache.h"
 
 class SkColorTable;
 class SkData;
@@ -33,8 +35,9 @@
      *  Create a new SkLazyPixelRef.
      *  @param SkData Encoded data representing the pixels.
      *  @param DecodeProc Called to decode the pixels when needed. Must be non-NULL.
-     *  @param SkImageCache Object that handles allocating and freeing the pixel memory, as needed.
-     *         Must not be NULL.
+     *  @param SkImageCache Object that handles allocating and freeing
+     *         the pixel memory, as needed.  If NULL, use the global
+     *         SkScaledImageCache.
      */
     SkLazyPixelRef(SkData*, SkBitmapFactory::DecodeProc, SkImageCache*);
 
@@ -69,7 +72,10 @@
     SkData*                     fData;
     SkBitmapFactory::DecodeProc fDecodeProc;
     SkImageCache*               fImageCache;
-    intptr_t                    fCacheId;
+    union {
+        SkImageCache::ID        fCacheId;
+        SkScaledImageCache::ID* fScaledCacheId;
+    };
     size_t                      fRowBytes;
     SkImage::Info               fLazilyCachedInfo;
 
@@ -80,6 +86,9 @@
 
     // lazily initialized our cached info. Returns NULL on failure.
     const SkImage::Info* getCachedInfo();
+    void* lockScaledImageCachePixels();
+    void* lockImageCachePixels();
+
 
     typedef SkPixelRef INHERITED;
 };