diff --git a/src/_zkapauthorizer/_storage_client.py b/src/_zkapauthorizer/_storage_client.py index 8a9acdf15b9dedec2db6226c2932c5f80275f484..237f52f03c7d7869485d7d1acd9cfef88e38de3f 100644 --- a/src/_zkapauthorizer/_storage_client.py +++ b/src/_zkapauthorizer/_storage_client.py @@ -186,14 +186,12 @@ class ZKAPAuthorizerStorageClient(object): current_pass_count = 0 else: current_pass_count = required_passes(BYTES_PER_PASS, {0}, current_size) - new_size = sum( - ( - get_implied_data_length(data_vector, length) - for (_, data_vector, length) - in tw_vectors.values() - ), - 0, + implied_sizes = ( + get_implied_data_length(data_vector, length) + for (_, data_vector, length) + in tw_vectors.values() ) + new_size = sum(implied_sizes, 0) new_pass_count = required_passes(BYTES_PER_PASS, {0}, new_size) pass_count_increase = new_pass_count - current_pass_count passes = self._get_encoded_passes( diff --git a/src/_zkapauthorizer/_storage_server.py b/src/_zkapauthorizer/_storage_server.py index d57fb9c2d9cadb862d68dfa9b07de72416cc8aab..0ac123c82e527506dc3620b90756f51192c5f301 100644 --- a/src/_zkapauthorizer/_storage_server.py +++ b/src/_zkapauthorizer/_storage_server.py @@ -86,11 +86,13 @@ from .storage_common import ( renew_lease_message, slot_testv_and_readv_and_writev_message, has_writes, - get_sharenums, - get_allocated_size, get_implied_data_length, ) +# See allmydata/storage/mutable.py +SLOT_HEADER_SIZE = 468 +LEASE_TRAILER_SIZE = 4 + class MorePassesRequired(Exception): """ Storage operations fail with ``MorePassesRequired`` when they are not @@ -262,6 +264,8 @@ class ZKAPAuthorizerStorageServer(Referenceable): data in already-allocated storage. These cases may not be the same from the perspective of pass validation. """ + # print("passes = {}".format(len(passes))) + # print("tw_vectors = {}".format(tw_vectors)) renew_leases = False if has_writes(tw_vectors): @@ -275,26 +279,33 @@ class ZKAPAuthorizerStorageServer(Referenceable): ) if has_active_lease(self._original, storage_index, self._clock.seconds()): current_length = get_slot_share_size(self._original, storage_index, tw_vectors.keys()) - new_length = sum( - ( - get_implied_data_length(data_vector, new_length) - for (_, data_vector, new_length) - in tw_vectors.values() - ), - 0, - ) - required_new_passes = ( - required_passes(BYTES_PER_PASS, {0}, new_length) - - required_passes(BYTES_PER_PASS, {0}, current_length) + # Perform a sum() here because we're going to lie to + # required_passes and tell it the allocated size is for a + # single share. The tw_vectors API lets different shares be + # different sizes, though I don't think the Tahoe-LAFS client + # intentionally causes this to happen. Letting such a case + # pass by the pass calculation would possibly offer free + # storage to altered clients. + implied_sizes = ( + get_implied_data_length(data_vector, new_length) + for (_, data_vector, new_length) + in tw_vectors.values() ) + new_length = sum(implied_sizes, 0) + current_passes = required_passes(BYTES_PER_PASS, {0}, current_length) + new_passes = required_passes(BYTES_PER_PASS, {0}, new_length) + required_new_passes = new_passes - current_passes + # print("Current length: {}".format(current_length)) + # print("New length: {}".format(new_length)) + # print("Current passes: {}".format(current_passes)) + # print("New passes: {}".format(new_passes)) + # print("Required new passes: {}".format(required_new_passes)) if required_new_passes > len(valid_passes): raise MorePassesRequired(len(valid_passes), required_new_passes) else: check_pass_quantity_for_mutable_write(len(valid_passes), tw_vectors) renew_leases = True - - # Skip over the remotely exposed method and jump to the underlying # implementation which accepts one additional parameter that we know # about (and don't expose over the network): renew_leases. We always @@ -352,6 +363,10 @@ def check_pass_quantity_for_write(valid_count, sharenums, allocated_size): :return: ``None`` if the number of valid passes given is sufficient. """ required_pass_count = required_passes(BYTES_PER_PASS, sharenums, allocated_size) + # print("valid_count = {}".format(valid_count)) + # print("sharenums = {}".format(len(sharenums))) + # print("allocated size = {}".format(allocated_size)) + # print("required_pass_count = {}".format(required_pass_count)) if valid_count < required_pass_count: raise MorePassesRequired( valid_count, @@ -369,9 +384,13 @@ def check_pass_quantity_for_mutable_write(valid_count, tw_vectors): :param tw_vectors: See ``allmydata.interfaces.TestAndWriteVectorsForShares``. """ - sharenums = get_sharenums(tw_vectors) - allocated_size = get_allocated_size(tw_vectors) - check_pass_quantity_for_write(valid_count, sharenums, allocated_size) + implied_sizes = ( + get_implied_data_length(data_vector, new_length) + for (_, data_vector, new_length) + in tw_vectors.values() + ) + total_implied_size = sum(implied_sizes, 0) + check_pass_quantity_for_write(valid_count, {0}, total_implied_size) def get_slot_share_size(storage_server, storage_index, sharenums): @@ -405,7 +424,18 @@ def get_slot_share_size(storage_server, storage_index, sharenums): except Exception as e: print(e) else: - total += metadata.st_size + # Compared to calculating how much *user* data we're + # storing, the on-disk file is larger by at *least* + # SLOT_HEADER_SIZE* where various bookkeeping is kept. + # There is also a variable sized trailer which is harder + # to compute. Fortunately it's generally also a lot + # smaller so I'm just going to ignore it for now. + # + # By measuring that the slots are larger than the data the + # user is storing we'll overestimate how many passes are + # required right around the boundary between two costs. + # Oops. + total += (metadata.st_size - SLOT_HEADER_SIZE - LEASE_TRAILER_SIZE) return total diff --git a/src/_zkapauthorizer/storage_common.py b/src/_zkapauthorizer/storage_common.py index 0bdee118a746f5d69ea59e8a4fe47b1826c12eb4..21b7e41d2c65c9287e5b75f241b809c8dba2d4be 100644 --- a/src/_zkapauthorizer/storage_common.py +++ b/src/_zkapauthorizer/storage_common.py @@ -75,7 +75,7 @@ def has_writes(tw_vectors): :return bool: ``True`` if any only if there are writes in ``tw_vectors``. """ return any( - data + data or (new_length is not None) for (test, data, new_length) in tw_vectors.values() ) diff --git a/src/_zkapauthorizer/tests/__init__.py b/src/_zkapauthorizer/tests/__init__.py index 5019fa0eacdc71e14869e1c2dca6735e6626a433..7bee673d66603b08e562dd9c149a9785c7e12faf 100644 --- a/src/_zkapauthorizer/tests/__init__.py +++ b/src/_zkapauthorizer/tests/__init__.py @@ -42,6 +42,11 @@ def _configure_hypothesis(): deadline=None, ) + settings.register_profile( + "big", + max_examples=100000, + ) + profile_name = environ.get("ZKAPAUTHORIZER_HYPOTHESIS_PROFILE", "default") settings.load_profile(profile_name) diff --git a/src/_zkapauthorizer/tests/test_storage_protocol.py b/src/_zkapauthorizer/tests/test_storage_protocol.py index 74083eedca9c0efd48d085a3ea3a8c550b236ebe..c7488b542db97b5dba0585da20eb6abf78fbdac9 100644 --- a/src/_zkapauthorizer/tests/test_storage_protocol.py +++ b/src/_zkapauthorizer/tests/test_storage_protocol.py @@ -142,23 +142,6 @@ class LocalRemote(object): ) -def assume_one_pass(test_and_write_vectors_for_shares): - """ - Assume that the writes represented by the given ``TestAndWriteVectors`` - will cost at most one pass. - """ - from .._storage_server import ( - BYTES_PER_PASS, - get_sharenums, - get_allocated_size, - required_passes, - ) - tw_vectors = {k: v.for_call() for (k, v) in test_and_write_vectors_for_shares.items()} - sharenums = get_sharenums(tw_vectors) - allocated_size = get_allocated_size(tw_vectors) - assume(required_passes(BYTES_PER_PASS, sharenums, allocated_size) <= 1) - - class ShareTests(TestCase): """ Tests for interaction with shares. @@ -219,9 +202,6 @@ class ShareTests(TestCase): resulting buckets can be read back using *get_buckets* and methods of those resulting buckets. """ - # XXX - assume(len(sharenums) * size < 128 * 1024 * 10) - # Hypothesis causes our storage server to be used many times. Clean # up between iterations. cleanup_storage_server(self.anonymous_storage_server) @@ -395,6 +375,7 @@ class ShareTests(TestCase): HasLength(1), ) +# @reproduce_failure('4.7.3', 'AXicY2CgMWAEQhgTCGBsAADOAAc=') @given( storage_index=storage_indexes(), secrets=tuples( @@ -409,9 +390,6 @@ class ShareTests(TestCase): Mutable share data written using *slot_testv_and_readv_and_writev* can be read back as-written and without spending any more passes. """ - # XXX - assume_one_pass(test_and_write_vectors_for_shares) - # Hypothesis causes our storage server to be used many times. Clean # up between iterations. cleanup_storage_server(self.anonymous_storage_server) @@ -462,9 +440,6 @@ class ShareTests(TestCase): *slot_testv_and_readv_and_writev* any leases on the corresponding slot remain the same. """ - # XXX - assume_one_pass(test_and_write_vectors_for_shares) - # Hypothesis causes our storage server to be used many times. Clean # up between iterations. cleanup_storage_server(self.anonymous_storage_server) diff --git a/src/_zkapauthorizer/tests/test_storage_server.py b/src/_zkapauthorizer/tests/test_storage_server.py index 055c43d11abcde2317112eb0b5af837974e0d5ff..bc9d80780da7ff4d7d386d9d7c1bd576e259cf6c 100644 --- a/src/_zkapauthorizer/tests/test_storage_server.py +++ b/src/_zkapauthorizer/tests/test_storage_server.py @@ -202,21 +202,8 @@ class PassValidationTests(TestCase): else: self.fail("expected MorePassesRequired, got {}".format(result)) - @given( - storage_index=storage_indexes(), - secrets=tuples( - write_enabler_secrets(), - lease_renew_secrets(), - lease_cancel_secrets(), - ), - test_and_write_vectors_for_shares=test_and_write_vectors_for_shares(), - ) - def test_extend_mutable_fails_without_passes(self, storage_index, secrets, test_and_write_vectors_for_shares): - """ - If ``remote_slot_testv_and_readv_and_writev`` is invoked to increase the - storage used by shares without supplying passes, the operation fails - with ``MorePassesRequired``. - """ + + def _test_extend_mutable_fails_without_passes(self, storage_index, secrets, test_and_write_vectors_for_shares, make_data_vector): # Hypothesis causes our storage server to be used many times. Clean # up between iterations. cleanup_storage_server(self.anonymous_storage_server) @@ -270,14 +257,7 @@ class PassValidationTests(TestCase): storage_index=storage_index, secrets=secrets, tw_vectors={ - sharenum: ( - [], - # Do it by writing past the end. Another thing we - # could do is just specify new_length with a large - # enough value. - [(current_length, b"x" * BYTES_PER_PASS)], - None, - ), + sharenum: make_data_vector(current_length), }, r_vector=[], ), @@ -292,3 +272,56 @@ class PassValidationTests(TestCase): ) else: self.fail("expected MorePassesRequired, got {}".format(result)) + + @given( + storage_index=storage_indexes(), + secrets=tuples( + write_enabler_secrets(), + lease_renew_secrets(), + lease_cancel_secrets(), + ), + test_and_write_vectors_for_shares=test_and_write_vectors_for_shares(), + ) + def test_extend_mutable_with_new_length_fails_without_passes(self, storage_index, secrets, test_and_write_vectors_for_shares): + """ + If ``remote_slot_testv_and_readv_and_writev`` is invoked to increase + storage usage by supplying a ``new_length`` greater than the current + share size and without supplying passes, the operation fails with + ``MorePassesRequired``. + """ + return self._test_extend_mutable_fails_without_passes( + storage_index, + secrets, + test_and_write_vectors_for_shares, + lambda current_length: ( + [], + [], + current_length + BYTES_PER_PASS, + ), + ) + + @given( + storage_index=storage_indexes(), + secrets=tuples( + write_enabler_secrets(), + lease_renew_secrets(), + lease_cancel_secrets(), + ), + test_and_write_vectors_for_shares=test_and_write_vectors_for_shares(), + ) + def test_extend_mutable_with_write_fails_without_passes(self, storage_index, secrets, test_and_write_vectors_for_shares): + """ + If ``remote_slot_testv_and_readv_and_writev`` is invoked to increase + storage usage by performing a write past the end of a share without + supplying passes, the operation fails with ``MorePassesRequired``. + """ + return self._test_extend_mutable_fails_without_passes( + storage_index, + secrets, + test_and_write_vectors_for_shares, + lambda current_length: ( + [], + [(current_length, "x" * BYTES_PER_PASS)], + None, + ), + )