diff --git a/src/CHK.hs b/src/CHK.hs
index a1caddf8a870c2a5a9879c89c59b73c77b4deb14..6d1a6ec0c065202f5b6995cd26f87b3f4ab32f2d 100644
--- a/src/CHK.hs
+++ b/src/CHK.hs
@@ -296,17 +296,17 @@ chkEncode version stateRef p@(Parameters segmentSize total happy required) dataS
   (CHKState currentStage crypttextHashes crypttextHashContext blockHashes) <- readIORef stateRef
   case currentStage of
     Version -> do
-      -- print "chk_encode"
-      -- print ("Data size:", dataSize)
-      -- print ("Share size:", shareSize)
-      -- print ("Block size:", blockSize)
-      -- print ("Segment size:", segmentSize)
-      -- print ("Num segments:", numSegments)
-      -- print ("Effective segments:", effectiveSegments)
-      -- print ("Segment hash size:", segmentHashSize)
-      -- print ("Params:", p)
-      -- print ("Tail params:", tailParams)
-      -- print ("Tail padding:", tailPadding)
+      print "chk_encode"
+      print ("Data size:", dataSize)
+      print ("Share size:", shareSize)
+      print ("Block size:", blockSize)
+      print ("Segment size:", segmentSize)
+      print ("Num segments:", numSegments)
+      print ("Effective segments:", effectiveSegments)
+      print ("Segment hash size:", segmentHashSize)
+      print ("Params:", p)
+      print ("Tail params:", tailParams)
+      print ("Tail padding:", tailPadding)
 
       setStage BlockSize
       same_blocks $
diff --git a/src/Upload.hs b/src/Upload.hs
index db0e4507db7ea8e445a5515d2f65dc353da5e18b..0174175cca086a11e4679d6dfa80051982fbd447 100644
--- a/src/Upload.hs
+++ b/src/Upload.hs
@@ -1,12 +1,14 @@
 module Upload
-  ( UploadResult(uploadResultReadCap, uploadResultExistingShares, uploadResultShareMap)
+  ( UploadResult(UploadResult, uploadResultReadCap, uploadResultExistingShares, uploadResultShareMap)
   , Capability(capabilityText)
   , Parameters(Parameters)
   , defaultParameters
+  , nullStorageServer
   , filesystemUploadable
   , filesystemUploadableWithConvergence
   , filesystemUploadableRandomConvergence
   , filesystemStorageServer
+  , memoryUploadable
   , getConvergentKey
   , upload
   , store
@@ -15,11 +17,14 @@ module Upload
   , adjustSegmentSize
   ) where
 
-import Control.Concurrent.MVar
-  ( MVar
-  , newEmptyMVar
-  , putMVar
-  , takeMVar
+import Debug.Trace
+  ( trace
+  )
+
+import Data.IORef
+  ( newIORef
+  , readIORef
+  , writeIORef
   )
 
 import Control.Concurrent.Async
@@ -364,6 +369,43 @@ filesystemUploadableRandomConvergence path params = do
   filesystemUploadable key path params
 
 
+memoryUploadableRandomConvergence :: B.ByteString -> Parameters -> IO Uploadable
+memoryUploadableRandomConvergence content params = do
+  key <- buildKeyIO :: IO AESKey128
+  memoryUploadable key content params
+
+memoryUploadable :: AESKey128 -> B.ByteString -> Parameters -> IO Uploadable
+memoryUploadable key content params = do
+  let fsize = fromIntegral $ B.length content
+  reader <- memoryReader content
+  return $ Uploadable
+    { uploadableKey = key
+    , uploadableSize = fsize
+    , uploadableParameters = adjustSegmentSize params fsize
+    , uploadableReadCleartext = reader
+    }
+  where
+    memoryReader :: B.ByteString -> IO (Integer -> IO B.ByteString)
+    memoryReader content = do
+      posRef <- newIORef 0
+      return $ \len -> do
+        pos <- readIORef posRef
+        let result = B.take (fromIntegral len) . B.drop pos $ content
+        writeIORef posRef (pos + B.length result)
+        return result
+
+nullStorageServer :: StorageServerID -> StorageServer
+nullStorageServer sid = StorageServer
+  { storageServerID = sid
+  , storageServerWrite = write
+  , storageServerRead = read
+  , storageServerGetBuckets = getBuckets
+  }
+  where
+    write idx num off content = return ()
+    read idx num = return ""
+    getBuckets idx = return mempty
+
 filesystemStorageServer :: FilePath -> IO StorageServer
 filesystemStorageServer shareRoot = do
   createDirectoryIfMissing True shareRoot
diff --git a/test/SpecUpload.hs b/test/SpecUpload.hs
index 8bce2bd2615d39a918ff1f78d9489aefd715155d..7ee9ab6b54526fa429c9b71c87be2ef34babb119 100644
--- a/test/SpecUpload.hs
+++ b/test/SpecUpload.hs
@@ -1,3 +1,5 @@
+{-# LANGUAGE NamedFieldPuns #-}
+
 module SpecUpload
   ( tests
   ) where
@@ -49,19 +51,25 @@ import Data.IORef
 
 import Types
   ( Parameters(..)
+  , Capability(Capability)
   , Size
   , Required
   )
 
 import Upload
-  (  getConvergentKey
+  ( UploadResult(UploadResult, uploadResultReadCap)
+  , getConvergentKey
   , adjustSegmentSize
+  , memoryUploadable
+  , nullStorageServer
+  , store
   )
 
 tests :: TestTree
 tests = testGroup "Upload"
   [ testConvergence
   , testAdjustSegmentSize
+  , testStore
   ]
 
 testAdjustSegmentSize :: TestTree
@@ -139,3 +147,18 @@ testConvergence = testGroup "Convergence"
       , paramHappyShares = 7
       , paramRequiredShares = 3
       }
+
+testStore :: TestTree
+testStore = testGroup "store capability test vectors"
+  [ testCase "the operation results in a known-correct capability string" $ do
+      let dataContent = B.concat $ replicate 256 "01234567"
+      let params = Parameters (128 * 1024) 2 1 1
+      let secret = B.replicate 32 0x42
+      let key = getConvergentKey secret params (BL.fromStrict dataContent)
+      uploadable <- memoryUploadable key dataContent params
+      let servers = [nullStorageServer "x", nullStorageServer "y"]
+      UploadResult { uploadResultReadCap } <- store servers uploadable
+      assertEqual "The capability matches the expected value."
+        (Capability "URI:CHK:56e6ydnnvpimf6727336wzpzy4:gns7heugtvn3fxd4gy6dmucqchtrszlznfhq7gnxhg5hof6ltu3a:1:1:2048")
+        uploadResultReadCap
+  ]