diff --git a/requirements.in b/requirements.in
index b35f3c038dc93795c97b4fff635f32a687f6ad6d..a2b360de74ed5068846780c3cf4591b652d3ce35 100644
--- a/requirements.in
+++ b/requirements.in
@@ -1,4 +1,4 @@
 attrs
 zope.interface
 twisted
-https://github.com/tahoe-lafs/tahoe-lafs/archive/6c1a37c95188c1d9a877286ef726280a68d38a4b.zip#egg=tahoe-lafs
+https://github.com/tahoe-lafs/tahoe-lafs/archive/b35a8908f4096ccae35da78b0e7dde96d6cf1667.zip#egg=tahoe-lafs
diff --git a/requirements.txt b/requirements.txt
index 3542f6171328df3b3708c3b5b28628573cca3df3..690f442040708188c72f8ce6d34a71be72166cc7 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -79,9 +79,6 @@ cffi==1.12.3 \
     --hash=sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff \
     --hash=sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201 \
     # via bcrypt, cryptography, pynacl
-characteristic==14.3.0 \
-    --hash=sha256:5324ca333fc9705bf0f1a38d36021baa37888590ccfe8c3738c68adfa609bbbb \
-    --hash=sha256:ded68d4e424115ed44e5c83c2a901a0b6157a959079d7591d92106ffd3ada380
 click==7.0 \
     --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \
     --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \
@@ -150,7 +147,8 @@ nevow==0.14.4 \
     --hash=sha256:2299a0d2a0c1312040705599d5d571acfea74df82b968c0b9264f6f45266cf6e
 pyasn1-modules==0.2.6 \
     --hash=sha256:43c17a83c155229839cc5c6b868e8d0c6041dba149789b6d6e28801c64821722 \
-    --hash=sha256:e30199a9d221f1b26c885ff3d87fd08694dbbe18ed0e8e405a2a7126d30ce4c0
+    --hash=sha256:e30199a9d221f1b26c885ff3d87fd08694dbbe18ed0e8e405a2a7126d30ce4c0 \
+    # via service-identity
 pyasn1==0.4.6 \
     --hash=sha256:3bb81821d47b17146049e7574ab4bf1e315eb7aead30efe5d6a9ca422c9710be \
     --hash=sha256:b773d5c9196ffbc3a1e13bdf909d446cad80a039aa3340bcad72f395b76ebc86 \
@@ -158,8 +156,6 @@ pyasn1==0.4.6 \
 pycparser==2.19 \
     --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
     # via cffi
-pycryptopp==0.7.1.869544967005693312591928092448767568728501330214 \
-    --hash=sha256:08ad57a1a39b7ed23c173692281da0b8d49d98ad3dcc09f8cca6d901e142699f
 pyhamcrest==1.9.0 \
     --hash=sha256:6b672c02fdf7470df9674ab82263841ce8333fb143f32f021f6cb26f0e512420 \
     --hash=sha256:8ffaa0a53da57e89de14ced7185ac746227a8894dbd5a3c718bf05ddbd1d56cd \
@@ -219,8 +215,8 @@ spake2==0.8 \
     --hash=sha256:c17a614b29ee4126206e22181f70a406c618d3c6c62ca6d6779bce95e9c926f4 \
     --hash=sha256:ce80705f8516c54364931f3b2c9a917ba001500d7f2fc76a0e8cf3bcaf0e30f7 \
     # via magic-wormhole
-https://github.com/tahoe-lafs/tahoe-lafs/archive/6c1a37c95188c1d9a877286ef726280a68d38a4b.zip#egg=tahoe-lafs \
-    --hash=sha256:fda66824f274f003d4cb98cf59104549a4e572f943c25383f2db56615aa6d105
+https://github.com/tahoe-lafs/tahoe-lafs/archive/b35a8908f4096ccae35da78b0e7dde96d6cf1667.zip#egg=tahoe-lafs \
+    --hash=sha256:b93eecd4986e06a1cd0eca25c39610bf7ab5011b654cb82f9a323009762bd9a7
 tqdm==4.32.2 \
     --hash=sha256:14a285392c32b6f8222ecfbcd217838f88e11630affe9006cd0e94c7eff3cb61 \
     --hash=sha256:25d4c0ea02a305a688e7e9c2cdc8f862f989ef2a4701ab28ee963295f5b109ab \
@@ -272,4 +268,4 @@ zope.interface==4.6.0 \
 setuptools==41.0.1 \
     --hash=sha256:a222d126f5471598053c9a77f4b5d4f26eaa1f150ad6e01dcf1a42e185d05613 \
     --hash=sha256:c7769ce668c7a333d84e17fe8b524b1c45e7ee9f7908ad0a73e1eda7e6a5aebf \
-    # via pycryptopp, pyhamcrest, zope.interface
+    # via pyhamcrest, zope.interface
diff --git a/src/_zkapauthorizer/_storage_server.py b/src/_zkapauthorizer/_storage_server.py
index a3364f5e4328b7a1b3e89958bfec34c66629b5b5..e917100e209e0cf9877fcf0e519623e2e1208ff3 100644
--- a/src/_zkapauthorizer/_storage_server.py
+++ b/src/_zkapauthorizer/_storage_server.py
@@ -230,7 +230,13 @@ class ZKAPAuthorizerStorageServer(Referenceable):
             same from the perspective of token validation.
         """
         self._validate_tokens(tokens)
-        return self._original.remote_slot_testv_and_readv_and_writev(*a, **kw)
+        # Skip over the remotely exposed method and jump to the underlying
+        # implementation which accepts one additional parameter that we know
+        # about (and don't expose over the network): renew_leases.  We always
+        # pass False for this because we want to manage leases completely
+        # separately from writes.
+        kw["renew_leases"] = False
+        return self._original.slot_testv_and_readv_and_writev(*a, **kw)
 
     def remote_slot_readv(self, *a, **kw):
         """
diff --git a/src/_zkapauthorizer/tests/test_storage_protocol.py b/src/_zkapauthorizer/tests/test_storage_protocol.py
index f821ad5278814518ffa196b6c8e47e206f2546d1..202d87e52d056bf9cabe58e481dfda4bdef0a54c 100644
--- a/src/_zkapauthorizer/tests/test_storage_protocol.py
+++ b/src/_zkapauthorizer/tests/test_storage_protocol.py
@@ -29,9 +29,11 @@ from testtools import (
 from testtools.matchers import (
     Equals,
     HasLength,
+    Always,
 )
 from testtools.twistedsupport import (
     succeeded,
+    failed,
 )
 from testtools.twistedsupport._deferred import (
     # I'd rather use https://twistedmatrix.com/trac/ticket/8900 but efforts
@@ -121,6 +123,7 @@ class LocalRemote(object):
         provides a simulated remote interface.
     """
     _referenceable = attr.ib()
+    check_args = attr.ib(default=True)
 
     def callRemote(self, methname, *args, **kwargs):
         """
@@ -132,7 +135,8 @@ class LocalRemote(object):
         :return Deferred: The result of the call on the wrapped object.
         """
         schema = self._referenceable.getInterface()[methname]
-        schema.checkAllArgs(args, kwargs, inbound=False)
+        if self.check_args:
+            schema.checkAllArgs(args, kwargs, inbound=False)
         # TODO: Figure out how to call checkResults on the result.
         return execute(
             self._referenceable.doRemoteCall,
@@ -270,7 +274,7 @@ class ShareTests(TestCase):
                 cancel_secret,
             ),
         )
-        [(_, leases)] = get_leases(self.server, storage_index).items()
+        leases = list(self.anonymous_storage_server.get_leases(storage_index))
         self.assertThat(leases, HasLength(2))
 
     @given(
@@ -315,7 +319,7 @@ class ShareTests(TestCase):
         # Based on Tahoe-LAFS' hard-coded renew time.
         RENEW_INTERVAL = 60 * 60 * 24 * 31
 
-        [(_, [lease])] = get_leases(self.server, storage_index).items()
+        [lease] = self.anonymous_storage_server.get_leases(storage_index)
         self.assertThat(
             lease.get_expiration_time(),
             Equals(int(now + RENEW_INTERVAL)),
@@ -394,7 +398,7 @@ class ShareTests(TestCase):
         self.assertThat(
             wrote,
             Equals(True),
-            u"Server rejected a write to a new mutable storage index",
+            u"Server rejected a write to a new mutable slot",
         )
 
         self.assertThat(
@@ -437,6 +441,121 @@ class ShareTests(TestCase):
                     sharenum,
                 ),
             )
+    @given(
+        storage_index=storage_indexes(),
+        secrets=tuples(
+            write_enabler_secrets(),
+            lease_renew_secrets(),
+            lease_cancel_secrets(),
+        ),
+        test_and_write_vectors_for_shares=test_and_write_vectors_for_shares(),
+    )
+    def test_mutable_write_preserves_lease(self, storage_index, secrets, test_and_write_vectors_for_shares):
+        """
+        When mutable share data is written using *slot_testv_and_readv_and_writev*
+        any leases on the corresponding slot remain the same.
+        """
+        # Hypothesis causes our storage server to be used many times.  Clean
+        # up between iterations.
+        cleanup_storage_server(self.anonymous_storage_server)
+
+        wrote, read = extract_result(
+            self.client.slot_testv_and_readv_and_writev(
+                storage_index,
+                secrets=secrets,
+                tw_vectors={
+                    k: v.for_call()
+                    for (k, v)
+                    in test_and_write_vectors_for_shares.items()
+                },
+                r_vector=[],
+            ),
+        )
+
+        self.assertThat(
+            wrote,
+            Equals(True),
+            u"Server rejected a write to a new mutable slot",
+        )
+
+        # There are *no* leases on this newly written slot!
+        self.assertThat(
+            list(self.anonymous_storage_server.get_slot_leases(storage_index)),
+            Equals([]),
+        )
+
+    @given(
+        storage_index=storage_indexes(),
+        secrets=tuples(
+            write_enabler_secrets(),
+            lease_renew_secrets(),
+            lease_cancel_secrets(),
+        ),
+        test_and_write_vectors_for_shares=test_and_write_vectors_for_shares(),
+    )
+    def test_client_cannot_control_lease_behavior(self, storage_index, secrets, test_and_write_vectors_for_shares):
+        """
+        If the client passes ``renew_leases`` to *slot_testv_and_readv_and_writev*
+        it fails with ``TypeError``, no lease is updated, and no share data is
+        written.
+        """
+        # First, tell the client to let us violate the protocol.  It is the
+        # server's responsibility to defend against this attack.
+        self.local_remote_server.check_args = False
+
+        # The nice Python API doesn't let you do this so we drop down to
+        # the layer below.  We also use positional arguments because they
+        # transit the network differently from keyword arguments.  Yay.
+        d = self.client._rref.callRemote(
+            "slot_testv_and_readv_and_writev",
+            # tokens
+            self.client._get_tokens(),
+            # storage_index
+            storage_index,
+            # secrets
+            secrets,
+            # tw_vectors
+            {
+                k: v.for_call()
+                for (k, v)
+                in test_and_write_vectors_for_shares.items()
+            },
+            # r_vector
+            [],
+            # add_leases
+            True,
+        )
+
+        # The operation should fail.  I'm not that concerned with how just
+        # yet.
+        self.expectThat(
+            d,
+            failed(Always()),
+        )
+
+        # There should be no shares at the given storage index.
+        d = self.client.slot_readv(
+            storage_index,
+            # Surprise.  shares=None means all shares.
+            shares=None,
+            r_vector=list(
+                list(map(write_vector_to_read_vector, vector.write_vector))
+                for vector
+                in test_and_write_vectors_for_shares.values()
+            ),
+        )
+        self.expectThat(
+            d,
+            succeeded(
+                Equals({}),
+            ),
+        )
+
+        # And there should be no leases on those non-shares.
+        self.expectThat(
+            list(self.anonymous_storage_server.get_slot_leases(storage_index)),
+            Equals([]),
+        )
 
 
 def write_vector_to_read_vector(write_vector):
@@ -480,28 +599,6 @@ def write_toy_shares(
         writer.remote_close()
 
 
-def get_leases(storage_server, storage_index):
-    """
-    Get all leases for all shares of the given storage index on the given
-    server.
-
-    :param StorageServer storage_server: The storage server on which to find
-        the information.
-
-    :param bytes storage_index: The storage index for which to look up shares.
-
-    :return dict[int, list[LeaseInfo]]: The lease information for each share.
-    """
-    # It's hard to assert much about the lease without knowing about *some*
-    # implementation details of the storage server.  I prefer to know Python
-    # API details rather than on-disk format details.
-    return {
-        sharenum: list(reader._share_file.get_leases())
-        for (sharenum, reader)
-        in storage_server.remote_get_buckets(storage_index).items()
-    }
-
-
 def cleanup_storage_server(storage_server):
     """
     Delete all of the shares held by the given storage server.
diff --git a/tahoe-lafs.nix b/tahoe-lafs.nix
index 649d0cd237310dff41d59fa1fcf0b500e2c47655..fd34d16edbefc955cb27e03e3cd117d82fd28621 100644
--- a/tahoe-lafs.nix
+++ b/tahoe-lafs.nix
@@ -8,11 +8,12 @@ buildPythonPackage rec {
   version = "1.14.0.dev";
   name = "tahoe-lafs-${version}";
   src = fetchFromGitHub {
-    owner = "tahoe-lafs";
+    owner = "LeastAuthority";
     repo = "tahoe-lafs";
-    # HEAD of integration/storage-economics branch as of July 15th 2019.
-    rev = "48bd16a8d9109910122cc2e2c85eb4f378390135";
-    sha256 = "0i8k7zb4381vbblriciz1m33va0mxld6nrhpwvjqr9jk335b1a9q";
+    # HEAD of an integration branch for all of the storage plugin stuff.  Last
+    # updated August 15th 2019.
+    rev = "b35a8908f4096ccae35da78b0e7dde96d6cf1667";
+    sha256 = "0n289hzx2s1jvspmpz2c5iwl0dvnfc8qbiqfmpbl88ymrjp7p6rr";
   };
 
   postPatch = ''
diff --git a/zkapauthorizer.nix b/zkapauthorizer.nix
index c5f52fbac02604a3230adbbd99aa177c4655a3a4..282bdc8036b1aee7a1792c88343220e4cf66d90c 100644
--- a/zkapauthorizer.nix
+++ b/zkapauthorizer.nix
@@ -4,7 +4,8 @@
 }:
 buildPythonPackage rec {
   version = "0.0";
-  name = "zero-knowledge-access-pass-authorizer-${version}";
+  pname = "zero-knowledge-access-pass-authorizer";
+  name = "${pname}-${version}";
   src = ./.;
 
   depsBuildBuild = [