From 38f42ae58df12f5f026fa6c11f0f0d8094edc32c Mon Sep 17 00:00:00 2001
From: Jean-Paul Calderone <exarkun@twistedmatrix.com>
Date: Tue, 5 May 2020 12:15:34 -0400
Subject: [PATCH] test the error cases of stat_share

---
 src/_zkapauthorizer/tests/storage_common.py   |  60 +++++++
 src/_zkapauthorizer/tests/strategies.py       |   7 +
 .../tests/test_storage_protocol.py            | 147 ++++++++++++------
 3 files changed, 170 insertions(+), 44 deletions(-)

diff --git a/src/_zkapauthorizer/tests/storage_common.py b/src/_zkapauthorizer/tests/storage_common.py
index 4baf4de..d00a580 100644
--- a/src/_zkapauthorizer/tests/storage_common.py
+++ b/src/_zkapauthorizer/tests/storage_common.py
@@ -16,6 +16,13 @@
 ``allmydata.storage``-related helpers shared across the test suite.
 """
 
+from os import (
+    SEEK_CUR,
+)
+from struct import (
+    pack,
+)
+
 from twisted.python.filepath import (
     FilePath,
 )
@@ -25,6 +32,9 @@ from .strategies import (
     bytes_for_share,
 )
 
+# Hard-coded in Tahoe-LAFS
+LEASE_INTERVAL = 60 * 60 * 24 * 31
+
 def cleanup_storage_server(storage_server):
     """
     Delete all of the shares held by the given storage server.
@@ -73,3 +83,53 @@ def write_toy_shares(
     for (sharenum, writer) in allocated.items():
         writer.remote_write(0, bytes_for_share(sharenum, size))
         writer.remote_close()
+
+
+def whitebox_write_sparse_share(sharepath, version, size, leases, now):
+    """
+    Write a zero-filled sparse (if the filesystem supports it) immutable share
+    to the given path.
+
+    This assumes knowledge of the Tahoe-LAFS share file format.
+
+    :param FilePath sharepath: The path to which to write the share file.
+    :param int version: The share version to write to the file.
+    :param int size: The share data size to write.
+    :param list leases: Renewal secrets for leases to write to the share file.
+    :param float now: The current time as a POSIX timestamp.
+    """
+    # Maybe-saturated size (what at least one Tahoe-LAFS comment claims is
+    # appropriate for large files)
+    internal_size = min(size, 2 ** 32 - 1)
+    apparent_size = size
+
+    header_format = ">LLL"
+    lease_format = ">L32s32sL"
+    with sharepath.open("wb") as share:
+        share.write(
+            pack(
+                header_format,
+                version,
+                internal_size,
+                len(leases),
+            ),
+        )
+        # Try to make it sparse by skipping all the data.
+        share.seek(apparent_size - 1, SEEK_CUR),
+        share.write(b"\0")
+        share.write(
+            b"".join(
+                pack(
+                    lease_format,
+                    # no owner
+                    0,
+                    renew,
+                    # no cancel secret
+                    b"",
+                    # expiration timestamp
+                    int(now + LEASE_INTERVAL),
+                )
+                for renew
+                in leases
+            ),
+        )
diff --git a/src/_zkapauthorizer/tests/strategies.py b/src/_zkapauthorizer/tests/strategies.py
index 533b649..5faf8e7 100644
--- a/src/_zkapauthorizer/tests/strategies.py
+++ b/src/_zkapauthorizer/tests/strategies.py
@@ -511,6 +511,13 @@ def write_enabler_secrets():
     )
 
 
+def share_versions():
+    """
+    Build integers which could be Tahoe-LAFS share file version numbers.
+    """
+    return integers(min_value=0, max_value=2 ** 32 - 1)
+
+
 def sharenums():
     """
     Build Tahoe-LAFS share numbers.
diff --git a/src/_zkapauthorizer/tests/test_storage_protocol.py b/src/_zkapauthorizer/tests/test_storage_protocol.py
index a4a7c35..bfaf5fe 100644
--- a/src/_zkapauthorizer/tests/test_storage_protocol.py
+++ b/src/_zkapauthorizer/tests/test_storage_protocol.py
@@ -20,13 +20,6 @@ from __future__ import (
     absolute_import,
 )
 
-from os import (
-    SEEK_CUR,
-)
-from struct import (
-    pack,
-)
-
 from fixtures import (
     MonkeyPatch,
 )
@@ -93,6 +86,7 @@ from .strategies import (
     lease_renew_secrets,
     lease_cancel_secrets,
     write_enabler_secrets,
+    share_versions,
     sharenums,
     sharenum_sets,
     sizes,
@@ -108,8 +102,10 @@ from .fixtures import (
     AnonymousStorageServer,
 )
 from .storage_common import (
+    LEASE_INTERVAL,
     cleanup_storage_server,
     write_toy_shares,
+    whitebox_write_sparse_share,
 )
 from .foolscap import (
     LocalRemote,
@@ -130,9 +126,6 @@ from ..foolscap import (
     ShareStat,
 )
 
-# Hard-coded in Tahoe-LAFS
-LEASE_INTERVAL = 60 * 60 * 24 * 31
-
 class RequiredPassesTests(TestCase):
     """
     Tests for ``required_passes``.
@@ -440,6 +433,99 @@ class ShareTests(TestCase):
             ),
         )
 
+    @given(
+        storage_index=storage_indexes(),
+        sharenum=sharenums(),
+        size=sizes(min_value=2 ** 18, max_value=2 ** 40),
+        clock=clocks(),
+        leases=lists(lease_renew_secrets(), unique=True, min_size=1),
+        version=share_versions(),
+    )
+    def test_stat_shares_immutable_wrong_version(self, storage_index, sharenum, size, clock, leases, version):
+        """
+        If a share file with an unexpected version is found, ``stat_shares``
+        declines to offer a result (by raising ``ValueError``).
+        """
+        assume(version != 1)
+
+        # Hypothesis causes our storage server to be used many times.  Clean
+        # up between iterations.
+        cleanup_storage_server(self.anonymous_storage_server)
+
+        sharedir = FilePath(self.anonymous_storage_server.sharedir).preauthChild(
+            # storage_index_to_dir likes to return multiple segments
+            # joined by pathsep
+            storage_index_to_dir(storage_index),
+        )
+        sharepath = sharedir.child(u"{}".format(sharenum))
+        sharepath.parent().makedirs()
+        whitebox_write_sparse_share(
+            sharepath,
+            version=version,
+            size=size,
+            leases=leases,
+            now=clock.seconds(),
+        )
+
+        self.assertThat(
+            self.client.stat_shares([storage_index]),
+            failed(
+                AfterPreprocessing(
+                    lambda f: f.value,
+                    IsInstance(ValueError),
+                ),
+            ),
+        )
+
+    @given(
+        storage_index=storage_indexes(),
+        sharenum=sharenums(),
+        size=sizes(min_value=2 ** 18, max_value=2 ** 40),
+        clock=clocks(),
+        version=share_versions(),
+        # Encode our knowledge of the share header format and size right here...
+        position=integers(min_value=0, max_value=11),
+    )
+    def test_stat_shares_truncated_file(self, storage_index, sharenum, size, clock, version, position):
+        """
+        If a share file is truncated in the middle of the header,
+        ``stat_shares`` declines to offer a result (by raising
+        ``ValueError``).
+        """
+        # Hypothesis causes our storage server to be used many times.  Clean
+        # up between iterations.
+        cleanup_storage_server(self.anonymous_storage_server)
+
+        sharedir = FilePath(self.anonymous_storage_server.sharedir).preauthChild(
+            # storage_index_to_dir likes to return multiple segments
+            # joined by pathsep
+            storage_index_to_dir(storage_index),
+        )
+        sharepath = sharedir.child(u"{}".format(sharenum))
+        sharepath.parent().makedirs()
+        whitebox_write_sparse_share(
+            sharepath,
+            version=version,
+            size=size,
+            # We know leases are at the end, where they'll get chopped off, so
+            # we don't bother to write any.
+            leases=[],
+            now=clock.seconds(),
+        )
+        with sharepath.open("wb") as fobj:
+            fobj.truncate(position)
+
+        self.assertThat(
+            self.client.stat_shares([storage_index]),
+            failed(
+                AfterPreprocessing(
+                    lambda f: f.value,
+                    IsInstance(ValueError),
+                ),
+            ),
+        )
+
+
     @skipIf(platform.isWindows(), "Creating large files on Windows (no sparse files) is too slow")
     @given(
         storage_index=storage_indexes(),
@@ -457,8 +543,6 @@ class ShareTests(TestCase):
         share placement and layout.  This is necessary to avoid having to
         write real multi-gigabyte files to exercise the behavior.
         """
-        header_format = ">LLL"
-        lease_format = ">L32s32sL"
         def write_shares(storage_server, storage_index, sharenums, size, canary):
             sharedir = FilePath(storage_server.sharedir).preauthChild(
                 # storage_index_to_dir likes to return multiple segments
@@ -468,38 +552,13 @@ class ShareTests(TestCase):
             for sharenum in sharenums:
                 sharepath = sharedir.child(u"{}".format(sharenum))
                 sharepath.parent().makedirs()
-                with sharepath.open("wb") as share:
-                    share.write(
-                        pack(
-                            header_format,
-                            # Version
-                            1,
-                            # Maybe-saturated size (what at least one
-                            # Tahoe-LAFS comment claims is appropriate for
-                            # large files)
-                            min(size, 2 ** 32 - 1),
-                            len(leases),
-                        ),
-                    )
-                    # Try to make it sparse by skipping all the data.
-                    share.seek(size - 1, SEEK_CUR),
-                    share.write(b"\0")
-                    share.write(
-                        b"".join(
-                            pack(
-                                lease_format,
-                                # no owner
-                                0,
-                                renew,
-                                # no cancel secret
-                                b"",
-                                # expiration timestamp
-                                int(clock.seconds() + LEASE_INTERVAL),
-                            )
-                            for renew
-                            in leases
-                        ),
-                    )
+                whitebox_write_sparse_share(
+                    sharepath,
+                    version=1,
+                    size=size,
+                    leases=leases,
+                    now=clock.seconds(),
+                )
 
         return self._stat_shares_immutable_test(
             storage_index,
-- 
GitLab