From a8447cc2fff296f776c6f6fcde727403da4774b4 Mon Sep 17 00:00:00 2001 From: Jean-Paul Calderone <exarkun@twistedmatrix.com> Date: Tue, 3 Dec 2019 14:36:52 -0500 Subject: [PATCH] Add the stat_shares protocol method --- src/_zkapauthorizer/_storage_client.py | 6 + src/_zkapauthorizer/_storage_server.py | 60 +++++++-- src/_zkapauthorizer/foolscap.py | 26 ++++ .../tests/test_storage_protocol.py | 124 ++++++++++++++++++ 4 files changed, 206 insertions(+), 10 deletions(-) diff --git a/src/_zkapauthorizer/_storage_client.py b/src/_zkapauthorizer/_storage_client.py index 3e87eb8..579312e 100644 --- a/src/_zkapauthorizer/_storage_client.py +++ b/src/_zkapauthorizer/_storage_client.py @@ -176,6 +176,12 @@ class ZKAPAuthorizerStorageClient(object): ) )) + def stat_shares(self, storage_indexes): + return self._rref.callRemote( + "stat_shares", + storage_indexes, + ) + def advise_corrupt_share( self, share_type, diff --git a/src/_zkapauthorizer/_storage_server.py b/src/_zkapauthorizer/_storage_server.py index a221630..d56da89 100644 --- a/src/_zkapauthorizer/_storage_server.py +++ b/src/_zkapauthorizer/_storage_server.py @@ -81,6 +81,7 @@ from twisted.internet.interfaces import ( ) from .foolscap import ( + ShareStat, RIPrivacyPassAuthorizedStorageServer, ) from .storage_common import ( @@ -272,6 +273,13 @@ class ZKAPAuthorizerStorageServer(Referenceable): get_share_sizes(self._original, storage_index_or_slot, sharenums) ) + def remote_stat_shares(self, storage_indexes_or_slots): + return list( + dict(stat_share(self._original, storage_index_or_slot)) + for storage_index_or_slot + in storage_indexes_or_slots + ) + def remote_slot_testv_and_readv_and_writev( self, passes, @@ -483,18 +491,12 @@ def get_share_sizes(storage_server, storage_index_or_slot, sharenums): share number and the second element is the data size for that share number. """ - get_size = None + stat = None for sharenum, sharepath in get_all_share_paths(storage_server, storage_index_or_slot): - if get_size is None: - # Figure out if it is a storage index or a slot. - with open(sharepath) as share_file: - magic = share_file.read(32) - if magic == "Tahoe mutable container v1\n" + "\x75\x09\x44\x03\x8e": - get_size = get_slot_share_size - else: - get_size = get_storage_index_share_size + if stat is None: + stat = get_stat(sharepath) if sharenums is None or sharenum in sharenums: - yield sharenum, get_size(sharepath) + yield sharenum, stat(storage_server, storage_index_or_slot, sharepath).size def get_storage_index_share_size(sharepath): @@ -511,6 +513,26 @@ def get_storage_index_share_size(sharepath): return share_data_length +def get_lease_expiration(get_leases, storage_index_or_slot): + for lease in get_leases(storage_index_or_slot): + return lease.get_expiration_time() + return None + + +def stat_bucket(storage_server, storage_index, sharepath): + return ShareStat( + size=get_storage_index_share_size(sharepath), + lease_expiration=get_lease_expiration(storage_server.get_leases, storage_index), + ) + + +def stat_slot(storage_server, slot, sharepath): + return ShareStat( + size=get_slot_share_size(sharepath), + lease_expiration=get_lease_expiration(storage_server.get_slot_leases, slot), + ) + + def get_slot_share_size(sharepath): """ Get the size of a share belonging to a slot (a mutable share). @@ -525,6 +547,24 @@ def get_slot_share_size(sharepath): return share_data_length +def stat_share(storage_server, storage_index_or_slot): + stat = None + for sharenum, sharepath in get_all_share_paths(storage_server, storage_index_or_slot): + if stat is None: + stat = get_stat(sharepath) + yield (sharenum, stat(storage_server, storage_index_or_slot, sharepath)) + + +def get_stat(sharepath): + # Figure out if it is a storage index or a slot. + with open(sharepath) as share_file: + magic = share_file.read(32) + if magic == "Tahoe mutable container v1\n" + "\x75\x09\x44\x03\x8e": + return stat_slot + else: + return stat_bucket + + # I don't understand why this is required. # ZKAPAuthorizerStorageServer is-a Referenceable. It seems like # the built in adapter should take care of this case. diff --git a/src/_zkapauthorizer/foolscap.py b/src/_zkapauthorizer/foolscap.py index 7215dbc..f0137ac 100644 --- a/src/_zkapauthorizer/foolscap.py +++ b/src/_zkapauthorizer/foolscap.py @@ -21,6 +21,8 @@ from __future__ import ( absolute_import, ) +import attr + from foolscap.constraint import ( ByteStringConstraint, ) @@ -28,6 +30,7 @@ from foolscap.api import ( Any, DictOf, ListOf, + Copyable, ) from foolscap.remoteinterface import ( RemoteMethodSchema, @@ -40,6 +43,20 @@ from allmydata.interfaces import ( Offset, ) +@attr.s +class ShareStat(Copyable): + """ + Represent some metadata about a share. + + :ivar int size: The size. in bytes, of the share. + + :ivar int lease_expiration: The POSIX timestamp of the time at which the + lease on this share expires, or None if there is no lease. + """ + size = attr.ib() + lease_expiration = attr.ib() + + # The Foolscap convention seems to be to try to constrain inputs to valid # values. So we'll try to limit the number of passes a client can supply. # Foolscap may be moving away from this so we may eventually drop this as @@ -153,6 +170,15 @@ class RIPrivacyPassAuthorizedStorageServer(RemoteInterface): """ return DictOf(int, Offset) + def stat_shares( + storage_indexes_or_slots=ListOf(StorageIndex), + ): + """ + Get various metadata about shares in the given storage index or slot. + """ + # Any() should be ShareStat but I don't know how to spell that. + return ListOf(ListOf(DictOf(int, Any()))) + slot_readv = RIStorageServer["slot_readv"] slot_testv_and_readv_and_writev = add_passes( diff --git a/src/_zkapauthorizer/tests/test_storage_protocol.py b/src/_zkapauthorizer/tests/test_storage_protocol.py index c045a03..53a272a 100644 --- a/src/_zkapauthorizer/tests/test_storage_protocol.py +++ b/src/_zkapauthorizer/tests/test_storage_protocol.py @@ -80,6 +80,7 @@ from .strategies import ( sharenum_sets, sizes, test_and_write_vectors_for_shares, + clocks, # Not really a strategy... bytes_for_share, ) @@ -99,10 +100,14 @@ from ..api import ( ) from ..storage_common import ( slot_testv_and_readv_and_writev_message, + get_implied_data_length, ) from ..model import ( Pass, ) +from ..foolscap import ( + ShareStat, +) @attr.s class LocalRemote(object): @@ -334,6 +339,125 @@ class ShareTests(TestCase): Equals(int(now + self.server.LEASE_PERIOD.total_seconds())), ) + @given( + storage_index=storage_indexes(), + renew_secret=lease_renew_secrets(), + cancel_secret=lease_cancel_secrets(), + sharenum=sharenums(), + size=sizes(), + clock=clocks(), + ) + def test_stat_shares_immutable(self, storage_index, renew_secret, cancel_secret, sharenum, size, clock): + """ + Size and lease information about immutable shares can be retrieved from a + storage server. + """ + # Hypothesis causes our storage server to be used many times. Clean + # up between iterations. + cleanup_storage_server(self.anonymous_storage_server) + + # anonymous_storage_server uses time.time(), unfortunately. And + # useFixture does not interact very well with Hypothesis. + patch = MonkeyPatch("time.time", clock.seconds) + try: + patch.setUp() + # Create a share we can toy with. + write_toy_shares( + self.anonymous_storage_server, + storage_index, + renew_secret, + cancel_secret, + {sharenum}, + size, + canary=self.canary, + ) + finally: + patch.cleanUp() + + stats = extract_result( + self.client.stat_shares([storage_index]), + ) + # Hard-coded in Tahoe-LAFS + LEASE_INTERVAL = 60 * 60 * 24 * 31 + expected = [{ + sharenum: ShareStat( + size=size, + lease_expiration=int(clock.seconds() + LEASE_INTERVAL), + ), + }] + self.assertThat( + stats, + Equals(expected), + ) + + + @given( + storage_index=storage_indexes(), + secrets=tuples( + write_enabler_secrets(), + lease_renew_secrets(), + lease_cancel_secrets(), + ), + test_and_write_vectors_for_shares=test_and_write_vectors_for_shares(), + clock=clocks(), + ) + def test_stat_shares_mutable(self, storage_index, secrets, test_and_write_vectors_for_shares, clock): + """ + Size and lease information about mutable shares can be retrieved from a + storage server. + """ + # Hypothesis causes our storage server to be used many times. Clean + # up between iterations. + cleanup_storage_server(self.anonymous_storage_server) + + # anonymous_storage_server uses time.time(), unfortunately. And + # useFixture does not interact very well with Hypothesis. + patch = MonkeyPatch("time.time", clock.seconds) + try: + patch.setUp() + # Create a share we can toy with. + wrote, read = extract_result( + self.client.slot_testv_and_readv_and_writev( + storage_index, + secrets=secrets, + tw_vectors={ + k: v.for_call() + for (k, v) + in test_and_write_vectors_for_shares.items() + }, + r_vector=[], + ), + ) + finally: + patch.cleanUp() + self.assertThat( + wrote, + Equals(True), + u"Server rejected a write to a new mutable slot", + ) + + stats = extract_result( + self.client.stat_shares([storage_index]), + ) + # Hard-coded in Tahoe-LAFS + LEASE_INTERVAL = 60 * 60 * 24 * 31 + expected = [{ + sharenum: ShareStat( + size=get_implied_data_length( + vectors.write_vector, + vectors.new_length, + ), + lease_expiration=int(clock.seconds() + LEASE_INTERVAL), + ) + for (sharenum, vectors) + in test_and_write_vectors_for_shares.items() + }] + self.assertThat( + stats, + Equals(expected), + ) + + @given( storage_index=storage_indexes(), renew_secret=lease_renew_secrets(), -- GitLab