diff --git a/.circleci/config.yml b/.circleci/config.yml
index 1b8ac9ff0d363c3b43a154413298d9bfc5b35515..37ce8374349ee3997487c738b8ff1b30319ec520 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -49,7 +49,7 @@ jobs:
 
   tests:
     docker:
-      - image: "circleci/python:3.7"
+      - image: "circleci/python:2.7"
 
     environment:
       PIP_SYNC_REQUIREMENTS: "requirements.txt requirements-tests.txt"
diff --git a/autobahn.nix b/autobahn.nix
new file mode 100644
index 0000000000000000000000000000000000000000..3cc1df2138e783f7bb212b50ba09435773233b88
--- /dev/null
+++ b/autobahn.nix
@@ -0,0 +1,35 @@
+{ lib, buildPythonPackage, fetchFromGitHub, isPy3k,
+  six, txaio, twisted, zope_interface, cffi, trollius, futures, cryptography,
+  mock, pytest
+}:
+buildPythonPackage rec {
+  pname = "autobahn";
+  version = "19.7.1";
+
+  src = fetchFromGitHub {
+    owner = "crossbario";
+    repo = "autobahn-python";
+    rev = "v${version}";
+    sha256 = "1gl2m18s77hlpiglh44plv3k6b965n66ylnxbzgvzcdl9jf3l3q3";
+  };
+
+  propagatedBuildInputs = [ six txaio twisted zope_interface cffi cryptography ] ++
+    (lib.optionals (!isPy3k) [ trollius futures ]);
+
+  checkInputs = [ mock pytest ];
+  checkPhase = ''
+    runHook preCheck
+    USE_TWISTED=true py.test $out
+    runHook postCheck
+  '';
+
+  # XXX Fails for some reason I don't understand.
+  doCheck = false;
+
+  meta = with lib; {
+    description = "WebSocket and WAMP in Python for Twisted and asyncio.";
+    homepage    = "https://crossbar.io/autobahn";
+    license     = licenses.mit;
+    maintainers = with maintainers; [ nand0p ];
+  };
+}
diff --git a/cryptography.nix b/cryptography.nix
new file mode 100644
index 0000000000000000000000000000000000000000..bfa6d30208387b334af63b072b3e409b6d39a063
--- /dev/null
+++ b/cryptography.nix
@@ -0,0 +1,75 @@
+{ stdenv
+, buildPythonPackage
+, fetchFromGitHub
+, openssl
+, cryptography_vectors
+, darwin
+, asn1crypto
+, packaging
+, six
+, pythonOlder
+, enum34
+, ipaddress
+, isPyPy
+, cffi
+, pytest
+, pretend
+, iso8601
+, pytz
+, hypothesis
+}:
+
+buildPythonPackage rec {
+  pname = "cryptography";
+  version = "2.7"; # Also update the hash in vectors.nix
+
+  src = fetchFromGitHub {
+    owner = "pyca";
+    repo = "cryptography";
+    rev = "2.7";
+    sha256 = "145byri5c3b8m6dbhwb6yxrv9jrr652l3z1w16mz205z8dz38qja";
+  };
+
+  outputs = [ "out" "dev" ];
+
+  buildInputs = [ openssl ]
+             ++ stdenv.lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
+  propagatedBuildInputs = [
+    asn1crypto
+    packaging
+    six
+  ] ++ stdenv.lib.optional (pythonOlder "3.4") enum34
+  ++ stdenv.lib.optional (pythonOlder "3.3") ipaddress
+  ++ stdenv.lib.optional (!isPyPy) cffi;
+
+  checkInputs = [
+    cryptography_vectors
+    hypothesis
+    iso8601
+    pretend
+    pytest
+    pytz
+  ];
+
+  checkPhase = ''
+    py.test --disable-pytest-warnings tests
+  '';
+
+  # IOKit's dependencies are inconsistent between OSX versions, so this is the best we
+  # can do until nix 1.11's release
+  __impureHostDeps = [ "/usr/lib" ];
+
+  meta = with stdenv.lib; {
+    description = "A package which provides cryptographic recipes and primitives";
+    longDescription = ''
+      Cryptography includes both high level recipes and low level interfaces to
+      common cryptographic algorithms such as symmetric ciphers, message
+      digests, and key derivation functions.
+      Our goal is for it to be your "cryptographic standard library". It
+      supports Python 2.7, Python 3.4+, and PyPy 5.3+.
+    '';
+    homepage = https://github.com/pyca/cryptography;
+    license = with licenses; [ asl20 bsd3 psfl ];
+    maintainers = with maintainers; [ primeos ];
+  };
+}
diff --git a/default.nix b/default.nix
index 3e33118f25b1e31833674edef9e90f8450146c1e..06e185c4d85697fa06bb7f119d5b306c76e12bb7 100644
--- a/default.nix
+++ b/default.nix
@@ -1,2 +1,2 @@
-{ pkgs ? import <nixpkgs> { } }:
-pkgs.python37Packages.callPackage ./secure-access-token-authorizer.nix { }
+{ pkgs ? import <nixpkgs> { overlays = [ (import ./overlays.nix) ]; } }:
+pkgs.python27Packages.callPackage ./secure-access-token-authorizer.nix { }
diff --git a/eliot.nix b/eliot.nix
new file mode 100644
index 0000000000000000000000000000000000000000..f6d6b3061b1ea635bac0e694be407ca8d1b6befb
--- /dev/null
+++ b/eliot.nix
@@ -0,0 +1,27 @@
+{ lib, buildPythonPackage, fetchPypi, zope_interface, pyrsistent, boltons
+, hypothesis, testtools, pytest }:
+buildPythonPackage rec {
+  pname = "eliot";
+  version = "1.7.0";
+
+  src = fetchPypi {
+    inherit pname version;
+    sha256 = "0ylyycf717s5qsrx8b9n6m38vyj2k8328lfhn8y6r31824991wv8";
+  };
+
+  postPatch = ''
+    substituteInPlace setup.py \
+      --replace "boltons >= 19.0.1" boltons
+    # depends on eliot.prettyprint._main which we don't have here.
+    rm eliot/tests/test_prettyprint.py
+  '';
+
+  checkInputs = [ testtools pytest hypothesis ];
+  propagatedBuildInputs = [ zope_interface pyrsistent boltons ];
+
+  meta = with lib; {
+    homepage = https://github.com/itamarst/eliot/;
+    description = "Logging library that tells you why it happened";
+    license = licenses.asl20;
+  };
+}
diff --git a/overlays.nix b/overlays.nix
new file mode 100644
index 0000000000000000000000000000000000000000..7e503de7cb5f4c38b5b9d2b13a25987089b74eeb
--- /dev/null
+++ b/overlays.nix
@@ -0,0 +1,19 @@
+self: super: {
+  python27 = super.python27.override {
+    packageOverrides = python-self: python-super: {
+      # new tahoe-lafs dependency
+      eliot = python-super.callPackage ./eliot.nix { };
+      # new autobahn requires a newer cryptography
+      cryptography = python-super.callPackage ./cryptography.nix { };
+      # new cryptography requires a newer cryptography_vectors
+      cryptography_vectors = python-super.callPackage ./cryptography_vectors.nix { };
+      # new tahoe-lafs depends on a very recent autobahn for better
+      # websocket testing features.
+      autobahn = python-super.callPackage ./autobahn.nix { };
+
+      # tahoe-lafs in nixpkgs is packaged as an application!  so we have to
+      # re-package it ourselves as a library.
+      tahoe-lafs = python-super.callPackage ./tahoe-lafs.nix { };
+    };
+  };
+}
diff --git a/requirements-tests.in b/requirements-tests.in
index 41b5c5778d13e356a8188bfd7452b615deb6d480..eb7961896371c2ea899ea51bd04f514eba1fb4d9 100644
--- a/requirements-tests.in
+++ b/requirements-tests.in
@@ -1,2 +1,6 @@
 codecov
 twisted
+attrs
+hypothesis
+testtools
+fixtures
diff --git a/requirements-tests.txt b/requirements-tests.txt
index 8d0fbd4f4371289112e35f7a5a083726187dbc5c..6f1599c6585731861f897f41cdfe76a0f3b3aa29 100644
--- a/requirements-tests.txt
+++ b/requirements-tests.txt
@@ -4,10 +4,13 @@
 #
 #    pip-compile --allow-unsafe --generate-hashes --output-file=requirements-tests.txt requirements-tests.in
 #
+argparse==1.4.0 \
+    --hash=sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4 \
+    --hash=sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314 \
+    # via unittest2
 attrs==19.1.0 \
     --hash=sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79 \
-    --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399 \
-    # via automat, twisted
+    --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399
 automat==0.7.0 \
     --hash=sha256:cbd78b83fa2d81fe2a4d23d258e1661dd7493c9a50ee2f1a5b2cac61c1793b0e \
     --hash=sha256:fdccab66b68498af9ecfa1fa43693abe546014dd25cf28543cbe9d1334916a58 \
@@ -60,10 +63,26 @@ coverage==4.5.3 \
     --hash=sha256:f8019c5279eb32360ca03e9fac40a12667715546eed5c5eb59eb381f2f501260 \
     --hash=sha256:fc5f4d209733750afd2714e9109816a29500718b32dd9a5db01c0cb3a019b96a \
     # via codecov
+enum34==1.1.6 \
+    --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
+    --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
+    --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
+    --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1 \
+    # via hypothesis
+extras==1.0.0 \
+    --hash=sha256:132e36de10b9c91d5d4cc620160a476e0468a88f16c9431817a6729611a81b4e \
+    --hash=sha256:f689f08df47e2decf76aa6208c081306e7bd472630eb1ec8a875c67de2366e87 \
+    # via testtools
+fixtures==3.0.0 \
+    --hash=sha256:2a551b0421101de112d9497fb5f6fd25e5019391c0fbec9bad591ecae981420d \
+    --hash=sha256:fcf0d60234f1544da717a9738325812de1f42c2fa085e2d9252d8fff5712b2ef
 hyperlink==19.0.0 \
     --hash=sha256:4288e34705da077fada1111a24a0aa08bb1e76699c9ce49876af722441845654 \
     --hash=sha256:ab4a308feb039b04f855a020a6eda3b18ca5a68e6d8f8c899cbe9e653721d04f \
     # via twisted
+hypothesis==4.27.0 \
+    --hash=sha256:ad01a2a4ba7da42372bc78a024b1d7e0f7ae886feca32779a36ca7dd0f6a3c45 \
+    --hash=sha256:bec25879a635b590098e6683e6740ef84b412405e47859d2410788f9567d74c3
 idna==2.8 \
     --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
     --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
@@ -72,10 +91,22 @@ incremental==17.5.0 \
     --hash=sha256:717e12246dddf231a349175f48d74d93e2897244939173b01974ab6661406b9f \
     --hash=sha256:7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3 \
     # via twisted
+linecache2==1.0.0 \
+    --hash=sha256:4b26ff4e7110db76eeb6f5a7b64a82623839d595c2038eeda662f2a2db78e97c \
+    --hash=sha256:e78be9c0a0dfcbac712fe04fbf92b96cddae80b1b842f24248214c8496f006ef \
+    # via traceback2
+pbr==5.4.0 \
+    --hash=sha256:36ebd78196e8c9588c972f5571230a059ff83783fabbbbedecc07be263ccd7e6 \
+    --hash=sha256:5a03f59455ad54f01a94c15829b8b70065462b7bd8d5d7e983306b59127fc841 \
+    # via fixtures, testtools
 pyhamcrest==1.9.0 \
     --hash=sha256:6b672c02fdf7470df9674ab82263841ce8333fb143f32f021f6cb26f0e512420 \
     --hash=sha256:8ffaa0a53da57e89de14ced7185ac746227a8894dbd5a3c718bf05ddbd1d56cd \
     # via twisted
+python-mimeparse==1.6.0 \
+    --hash=sha256:76e4b03d700a641fd7761d3cd4fdbbdcd787eade1ebfac43f877016328334f78 \
+    --hash=sha256:a295f03ff20341491bfe4717a39cd0a8cc9afad619ba44b77e86b0ab8a2b8282 \
+    # via testtools
 requests==2.22.0 \
     --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \
     --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 \
@@ -83,9 +114,20 @@ requests==2.22.0 \
 six==1.12.0 \
     --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
     --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
-    # via automat, pyhamcrest
+    # via automat, fixtures, pyhamcrest, testtools, unittest2
+testtools==2.3.0 \
+    --hash=sha256:5827ec6cf8233e0f29f51025addd713ca010061204fdea77484a2934690a0559 \
+    --hash=sha256:a2be448869171b6e0f26d9544088b8b98439ec180ce272040236d570a40bcbed
+traceback2==1.4.0 \
+    --hash=sha256:05acc67a09980c2ecfedd3423f7ae0104839eccb55fc645773e1caa0951c3030 \
+    --hash=sha256:8253cebec4b19094d67cc5ed5af99bf1dba1285292226e98a31929f87a5d6b23 \
+    # via testtools, unittest2
 twisted==19.2.1 \
     --hash=sha256:fa2c04c2d68a9be7fc3975ba4947f653a57a656776f24be58ff0fe4b9aaf3e52
+unittest2==1.1.0 \
+    --hash=sha256:13f77d0875db6d9b435e1d4f41e74ad4cc2eb6e1d5c824996092b3430f088bb8 \
+    --hash=sha256:22882a0e418c284e1f718a822b3b022944d53d2d908e1690b319a9d3eb2c0579 \
+    # via testtools
 urllib3==1.25.3 \
     --hash=sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1 \
     --hash=sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232 \
diff --git a/requirements.in b/requirements.in
new file mode 100644
index 0000000000000000000000000000000000000000..a65a2be8d945c9391c604f71ab21c8e36ec0fe07
--- /dev/null
+++ b/requirements.in
@@ -0,0 +1,4 @@
+attrs
+zope.interface
+twisted
+https://github.com/tahoe-lafs/tahoe-lafs/archive/48bd16a8d9109910122cc2e2c85eb4f378390135.zip#egg=tahoe-lafs
diff --git a/requirements.txt b/requirements.txt
index 9b137ccd23c3958a7e6a3bc3d7342f6983e7a786..de68811252e40ec68f47411fa725f3114e410428 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,5 +2,275 @@
 # This file is autogenerated by pip-compile
 # To update, run:
 #
-#    pip-compile --allow-unsafe --generate-hashes --output-file=requirements.txt
+#    pip-compile --allow-unsafe --generate-hashes --output-file=requirements.txt requirements.in
 #
+appdirs==1.4.3 \
+    --hash=sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92 \
+    --hash=sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e \
+    # via twisted
+argparse==1.4.0 \
+    --hash=sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4 \
+    --hash=sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314 \
+    # via zfec
+asn1crypto==0.24.0 \
+    --hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \
+    --hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49 \
+    # via cryptography
+attrs==19.1.0 \
+    --hash=sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79 \
+    --hash=sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399
+autobahn[twisted]==19.7.1 \
+    --hash=sha256:70f0cfb8005b5429df5709acf5d66a8eba00669765547029371648dffd4a0470 \
+    --hash=sha256:89f94a1535673b1655df28ef91e96b7f34faea76da04a5e56441c9ac779a2f9f \
+    # via magic-wormhole
+automat==0.7.0 \
+    --hash=sha256:cbd78b83fa2d81fe2a4d23d258e1661dd7493c9a50ee2f1a5b2cac61c1793b0e \
+    --hash=sha256:fdccab66b68498af9ecfa1fa43693abe546014dd25cf28543cbe9d1334916a58 \
+    # via magic-wormhole, twisted, txtorcon
+bcrypt==3.1.7 \
+    --hash=sha256:0258f143f3de96b7c14f762c770f5fc56ccd72f8a1857a451c1cd9a655d9ac89 \
+    --hash=sha256:0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42 \
+    --hash=sha256:19a4b72a6ae5bb467fea018b825f0a7d917789bcfe893e53f15c92805d187294 \
+    --hash=sha256:5432dd7b34107ae8ed6c10a71b4397f1c853bd39a4d6ffa7e35f40584cffd161 \
+    --hash=sha256:69361315039878c0680be456640f8705d76cb4a3a3fe1e057e0f261b74be4b31 \
+    --hash=sha256:6fe49a60b25b584e2f4ef175b29d3a83ba63b3a4df1b4c0605b826668d1b6be5 \
+    --hash=sha256:74a015102e877d0ccd02cdeaa18b32aa7273746914a6c5d0456dd442cb65b99c \
+    --hash=sha256:763669a367869786bb4c8fcf731f4175775a5b43f070f50f46f0b59da45375d0 \
+    --hash=sha256:8b10acde4e1919d6015e1df86d4c217d3b5b01bb7744c36113ea43d529e1c3de \
+    --hash=sha256:9fe92406c857409b70a38729dbdf6578caf9228de0aef5bc44f859ffe971a39e \
+    --hash=sha256:a190f2a5dbbdbff4b74e3103cef44344bc30e61255beb27310e2aec407766052 \
+    --hash=sha256:a595c12c618119255c90deb4b046e1ca3bcfad64667c43d1166f2b04bc72db09 \
+    --hash=sha256:c9457fa5c121e94a58d6505cadca8bed1c64444b83b3204928a866ca2e599105 \
+    --hash=sha256:cb93f6b2ab0f6853550b74e051d297c27a638719753eb9ff66d1e4072be67133 \
+    --hash=sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7 \
+    --hash=sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc \
+    # via twisted
+boltons==19.1.0 \
+    --hash=sha256:7aa10b0f5b015678458a7d0422961fc0c7e823c05e644094c0e564931ce0b0df \
+    --hash=sha256:c32b2d121331a9bc7c220050d4273f3aa359b7569cb4794188e71524603113dc \
+    # via eliot
+cffi==1.12.3 \
+    --hash=sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774 \
+    --hash=sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d \
+    --hash=sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90 \
+    --hash=sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b \
+    --hash=sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63 \
+    --hash=sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45 \
+    --hash=sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25 \
+    --hash=sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3 \
+    --hash=sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b \
+    --hash=sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647 \
+    --hash=sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016 \
+    --hash=sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4 \
+    --hash=sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb \
+    --hash=sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753 \
+    --hash=sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7 \
+    --hash=sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9 \
+    --hash=sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f \
+    --hash=sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8 \
+    --hash=sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f \
+    --hash=sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc \
+    --hash=sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42 \
+    --hash=sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3 \
+    --hash=sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909 \
+    --hash=sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45 \
+    --hash=sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d \
+    --hash=sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512 \
+    --hash=sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff \
+    --hash=sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201 \
+    # via bcrypt, cryptography, pynacl
+characteristic==14.3.0 \
+    --hash=sha256:5324ca333fc9705bf0f1a38d36021baa37888590ccfe8c3738c68adfa609bbbb \
+    --hash=sha256:ded68d4e424115ed44e5c83c2a901a0b6157a959079d7591d92106ffd3ada380
+click==7.0 \
+    --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \
+    --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \
+    # via magic-wormhole
+constantly==15.1.0 \
+    --hash=sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35 \
+    --hash=sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d \
+    # via twisted
+cryptography==2.7 \
+    --hash=sha256:24b61e5fcb506424d3ec4e18bca995833839bf13c59fc43e530e488f28d46b8c \
+    --hash=sha256:25dd1581a183e9e7a806fe0543f485103232f940fcfc301db65e630512cce643 \
+    --hash=sha256:3452bba7c21c69f2df772762be0066c7ed5dc65df494a1d53a58b683a83e1216 \
+    --hash=sha256:41a0be220dd1ed9e998f5891948306eb8c812b512dc398e5a01846d855050799 \
+    --hash=sha256:5751d8a11b956fbfa314f6553d186b94aa70fdb03d8a4d4f1c82dcacf0cbe28a \
+    --hash=sha256:5f61c7d749048fa6e3322258b4263463bfccefecb0dd731b6561cb617a1d9bb9 \
+    --hash=sha256:72e24c521fa2106f19623a3851e9f89ddfdeb9ac63871c7643790f872a305dfc \
+    --hash=sha256:7b97ae6ef5cba2e3bb14256625423413d5ce8d1abb91d4f29b6d1a081da765f8 \
+    --hash=sha256:961e886d8a3590fd2c723cf07be14e2a91cf53c25f02435c04d39e90780e3b53 \
+    --hash=sha256:96d8473848e984184b6728e2c9d391482008646276c3ff084a1bd89e15ff53a1 \
+    --hash=sha256:ae536da50c7ad1e002c3eee101871d93abdc90d9c5f651818450a0d3af718609 \
+    --hash=sha256:b0db0cecf396033abb4a93c95d1602f268b3a68bb0a9cc06a7cff587bb9a7292 \
+    --hash=sha256:cfee9164954c186b191b91d4193989ca994703b2fff406f71cf454a2d3c7327e \
+    --hash=sha256:e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6 \
+    --hash=sha256:f27d93f0139a3c056172ebb5d4f9056e770fdf0206c2f422ff2ebbad142e09ed \
+    --hash=sha256:f57b76e46a58b63d1c6375017f4564a28f19a5ca912691fd2e4261b3414b618d \
+    # via autobahn, pyopenssl, service-identity, twisted
+eliot==1.7.0 \
+    --hash=sha256:68f3901211288c6c3cb2d05124069a42fa8d4635362dd4b3c6459f701cf39e7a \
+    --hash=sha256:816d2fd04f8b935c008f3cbeb75e1645e3fdf2ef58f1d16cf8cfc568a84a4173
+enum34==1.1.6 \
+    --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
+    --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
+    --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
+    --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1 \
+    # via cryptography
+foolscap==0.13.1 \
+    --hash=sha256:4e400e377683f328a70fd5b808202576397c30e2b5a0b8773d22608518fb9f95 \
+    --hash=sha256:e2773b4901430b8852da9d691e91984a5e2118da0448c192d9ec5aa81db91d6b
+hkdf==0.0.3 \
+    --hash=sha256:622a31c634bc185581530a4b44ffb731ed208acf4614f9c795bdd70e77991dca \
+    # via magic-wormhole, spake2
+humanize==0.5.1 \
+    --hash=sha256:a43f57115831ac7c70de098e6ac46ac13be00d69abbf60bdcac251344785bb19 \
+    # via magic-wormhole
+hyperlink==19.0.0 \
+    --hash=sha256:4288e34705da077fada1111a24a0aa08bb1e76699c9ce49876af722441845654 \
+    --hash=sha256:ab4a308feb039b04f855a020a6eda3b18ca5a68e6d8f8c899cbe9e653721d04f \
+    # via twisted
+idna==2.8 \
+    --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \
+    --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \
+    # via hyperlink, twisted
+incremental==17.5.0 \
+    --hash=sha256:717e12246dddf231a349175f48d74d93e2897244939173b01974ab6661406b9f \
+    --hash=sha256:7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3 \
+    # via twisted, txtorcon
+ipaddress==1.0.22 \
+    --hash=sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794 \
+    --hash=sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c \
+    # via cryptography, service-identity, txtorcon
+magic-wormhole==0.11.2 \
+    --hash=sha256:40184aac0ea1b978726a509156400754d3c7a89da0724a8cf90df87d73217193 \
+    --hash=sha256:ae79667bdbb39fba7d315e36718db383651b45421813366cfaceb069e222d905
+nevow==0.14.4 \
+    --hash=sha256:0c6f3d897403216619a6eb8402f1248fbbade7b29c91a70a89092a06bbcb09be \
+    --hash=sha256:2299a0d2a0c1312040705599d5d571acfea74df82b968c0b9264f6f45266cf6e
+pyasn1-modules==0.2.5 \
+    --hash=sha256:ef721f68f7951fab9b0404d42590f479e30d9005daccb1699b0a51bb4177db96 \
+    --hash=sha256:f309b6c94724aeaf7ca583feb1cc70430e10d7551de5e36edfc1ae6909bcfb3c \
+    # via service-identity
+pyasn1==0.4.5 \
+    --hash=sha256:da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7 \
+    --hash=sha256:da6b43a8c9ae93bc80e2739efb38cc776ba74a886e3e9318d65fe81a8b8a2c6e \
+    # via pyasn1-modules, service-identity, twisted
+pycparser==2.19 \
+    --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
+    # via cffi
+pycryptopp==0.7.1.869544967005693312591928092448767568728501330214 \
+    --hash=sha256:08ad57a1a39b7ed23c173692281da0b8d49d98ad3dcc09f8cca6d901e142699f
+pyhamcrest==1.9.0 \
+    --hash=sha256:6b672c02fdf7470df9674ab82263841ce8333fb143f32f021f6cb26f0e512420 \
+    --hash=sha256:8ffaa0a53da57e89de14ced7185ac746227a8894dbd5a3c718bf05ddbd1d56cd \
+    # via twisted
+pynacl==1.3.0 \
+    --hash=sha256:05c26f93964373fc0abe332676cb6735f0ecad27711035b9472751faa8521255 \
+    --hash=sha256:0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c \
+    --hash=sha256:0d0a8171a68edf51add1e73d2159c4bc19fc0718e79dec51166e940856c2f28e \
+    --hash=sha256:1c780712b206317a746ace34c209b8c29dbfd841dfbc02aa27f2084dd3db77ae \
+    --hash=sha256:2424c8b9f41aa65bbdbd7a64e73a7450ebb4aa9ddedc6a081e7afcc4c97f7621 \
+    --hash=sha256:2d23c04e8d709444220557ae48ed01f3f1086439f12dbf11976e849a4926db56 \
+    --hash=sha256:30f36a9c70450c7878053fa1344aca0145fd47d845270b43a7ee9192a051bf39 \
+    --hash=sha256:37aa336a317209f1bb099ad177fef0da45be36a2aa664507c5d72015f956c310 \
+    --hash=sha256:4943decfc5b905748f0756fdd99d4f9498d7064815c4cf3643820c9028b711d1 \
+    --hash=sha256:57ef38a65056e7800859e5ba9e6091053cd06e1038983016effaffe0efcd594a \
+    --hash=sha256:5bd61e9b44c543016ce1f6aef48606280e45f892a928ca7068fba30021e9b786 \
+    --hash=sha256:6482d3017a0c0327a49dddc8bd1074cc730d45db2ccb09c3bac1f8f32d1eb61b \
+    --hash=sha256:7d3ce02c0784b7cbcc771a2da6ea51f87e8716004512493a2b69016326301c3b \
+    --hash=sha256:a14e499c0f5955dcc3991f785f3f8e2130ed504fa3a7f44009ff458ad6bdd17f \
+    --hash=sha256:a39f54ccbcd2757d1d63b0ec00a00980c0b382c62865b61a505163943624ab20 \
+    --hash=sha256:aabb0c5232910a20eec8563503c153a8e78bbf5459490c49ab31f6adf3f3a415 \
+    --hash=sha256:bd4ecb473a96ad0f90c20acba4f0bf0df91a4e03a1f4dd6a4bdc9ca75aa3a715 \
+    --hash=sha256:e2da3c13307eac601f3de04887624939aca8ee3c9488a0bb0eca4fb9401fc6b1 \
+    --hash=sha256:f67814c38162f4deb31f68d590771a29d5ae3b1bd64b75cf232308e5c74777e0 \
+    # via magic-wormhole
+pyopenssl==19.0.0 \
+    --hash=sha256:aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200 \
+    --hash=sha256:c727930ad54b10fc157015014b666f2d8b41f70c0d03e83ab67624fd3dd5d1e6 \
+    # via foolscap, twisted
+pyrsistent==0.15.3 \
+    --hash=sha256:50cffebc87ca91b9d4be2dcc2e479272bcb466b5a0487b6c271f7ddea6917e14 \
+    # via eliot
+pyutil==3.3.0 \
+    --hash=sha256:8c4d4bf668c559186389bb9bce99e4b1b871c09ba252a756ccaacd2b8f401848 \
+    # via zfec
+pyyaml==5.1.1 \
+    --hash=sha256:57acc1d8533cbe51f6662a55434f0dbecfa2b9eaf115bede8f6fd00115a0c0d3 \
+    --hash=sha256:588c94b3d16b76cfed8e0be54932e5729cc185caffaa5a451e7ad2f7ed8b4043 \
+    --hash=sha256:68c8dd247f29f9a0d09375c9c6b8fdc64b60810ebf07ba4cdd64ceee3a58c7b7 \
+    --hash=sha256:70d9818f1c9cd5c48bb87804f2efc8692f1023dac7f1a1a5c61d454043c1d265 \
+    --hash=sha256:86a93cccd50f8c125286e637328ff4eef108400dd7089b46a7be3445eecfa391 \
+    --hash=sha256:a0f329125a926876f647c9fa0ef32801587a12328b4a3c741270464e3e4fa778 \
+    --hash=sha256:a3c252ab0fa1bb0d5a3f6449a4826732f3eb6c0270925548cac342bc9b22c225 \
+    --hash=sha256:b4bb4d3f5e232425e25dda21c070ce05168a786ac9eda43768ab7f3ac2770955 \
+    --hash=sha256:cd0618c5ba5bda5f4039b9398bb7fb6a317bb8298218c3de25c47c4740e4b95e \
+    --hash=sha256:ceacb9e5f8474dcf45b940578591c7f3d960e82f926c707788a570b51ba59190 \
+    --hash=sha256:fe6a88094b64132c4bb3b631412e90032e8cfe9745a58370462240b8cb7553cd
+service-identity==18.1.0 \
+    --hash=sha256:001c0707759cb3de7e49c078a7c0c9cd12594161d3bf06b9c254fdcb1a60dc36 \
+    --hash=sha256:0858a54aabc5b459d1aafa8a518ed2081a285087f349fe3e55197989232e2e2d \
+    # via twisted
+six==1.12.0 \
+    --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
+    --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
+    # via autobahn, automat, bcrypt, cryptography, eliot, magic-wormhole, pyhamcrest, pynacl, pyopenssl, pyrsistent, txaio
+spake2==0.8 \
+    --hash=sha256:c17a614b29ee4126206e22181f70a406c618d3c6c62ca6d6779bce95e9c926f4 \
+    --hash=sha256:ce80705f8516c54364931f3b2c9a917ba001500d7f2fc76a0e8cf3bcaf0e30f7 \
+    # via magic-wormhole
+https://github.com/tahoe-lafs/tahoe-lafs/archive/48bd16a8d9109910122cc2e2c85eb4f378390135.zip#egg=tahoe-lafs \
+    --hash=sha256:982e67ee3e515f400ab88b7ce02dba8a7c0dd897eb07f3111a80a06778e2b6e4
+tqdm==4.32.2 \
+    --hash=sha256:14a285392c32b6f8222ecfbcd217838f88e11630affe9006cd0e94c7eff3cb61 \
+    --hash=sha256:25d4c0ea02a305a688e7e9c2cdc8f862f989ef2a4701ab28ee963295f5b109ab \
+    # via magic-wormhole
+twisted[conch,tls]==19.2.1 \
+    --hash=sha256:fa2c04c2d68a9be7fc3975ba4947f653a57a656776f24be58ff0fe4b9aaf3e52
+txaio==18.8.1 \
+    --hash=sha256:67e360ac73b12c52058219bb5f8b3ed4105d2636707a36a7cdafb56fe06db7fe \
+    --hash=sha256:b6b235d432cc58ffe111b43e337db71a5caa5d3eaa88f0eacf60b431c7626ef5 \
+    # via autobahn
+txtorcon==19.0.0 \
+    --hash=sha256:196b7b5726e3c69a602071295368da9205c0cd1e26aba37536d3b8fb3b08ac9d \
+    --hash=sha256:3731b740653e3f551412744f1fcd7fa6f04aa9fa37c90dc6c9152e619886bf3b \
+    # via magic-wormhole
+zfec==1.5.3 \
+    --hash=sha256:b41bd4b0af9c6b3a78bd6734e1e4511475944164375e6241b53df518a366922b
+zope.interface==4.6.0 \
+    --hash=sha256:086707e0f413ff8800d9c4bc26e174f7ee4c9c8b0302fbad68d083071822316c \
+    --hash=sha256:1157b1ec2a1f5bf45668421e3955c60c610e31913cc695b407a574efdbae1f7b \
+    --hash=sha256:11ebddf765bff3bbe8dbce10c86884d87f90ed66ee410a7e6c392086e2c63d02 \
+    --hash=sha256:14b242d53f6f35c2d07aa2c0e13ccb710392bcd203e1b82a1828d216f6f6b11f \
+    --hash=sha256:1b3d0dcabc7c90b470e59e38a9acaa361be43b3a6ea644c0063951964717f0e5 \
+    --hash=sha256:20a12ab46a7e72b89ce0671e7d7a6c3c1ca2c2766ac98112f78c5bddaa6e4375 \
+    --hash=sha256:298f82c0ab1b182bd1f34f347ea97dde0fffb9ecf850ecf7f8904b8442a07487 \
+    --hash=sha256:2f6175722da6f23dbfc76c26c241b67b020e1e83ec7fe93c9e5d3dd18667ada2 \
+    --hash=sha256:3b877de633a0f6d81b600624ff9137312d8b1d0f517064dfc39999352ab659f0 \
+    --hash=sha256:4265681e77f5ac5bac0905812b828c9fe1ce80c6f3e3f8574acfb5643aeabc5b \
+    --hash=sha256:550695c4e7313555549aa1cdb978dc9413d61307531f123558e438871a883d63 \
+    --hash=sha256:5f4d42baed3a14c290a078e2696c5f565501abde1b2f3f1a1c0a94fbf6fbcc39 \
+    --hash=sha256:62dd71dbed8cc6a18379700701d959307823b3b2451bdc018594c48956ace745 \
+    --hash=sha256:7040547e5b882349c0a2cc9b50674b1745db551f330746af434aad4f09fba2cc \
+    --hash=sha256:7e099fde2cce8b29434684f82977db4e24f0efa8b0508179fce1602d103296a2 \
+    --hash=sha256:7e5c9a5012b2b33e87980cee7d1c82412b2ebabcb5862d53413ba1a2cfde23aa \
+    --hash=sha256:81295629128f929e73be4ccfdd943a0906e5fe3cdb0d43ff1e5144d16fbb52b1 \
+    --hash=sha256:95cc574b0b83b85be9917d37cd2fad0ce5a0d21b024e1a5804d044aabea636fc \
+    --hash=sha256:968d5c5702da15c5bf8e4a6e4b67a4d92164e334e9c0b6acf080106678230b98 \
+    --hash=sha256:9e998ba87df77a85c7bed53240a7257afe51a07ee6bc3445a0bf841886da0b97 \
+    --hash=sha256:a0c39e2535a7e9c195af956610dba5a1073071d2d85e9d2e5d789463f63e52ab \
+    --hash=sha256:a15e75d284178afe529a536b0e8b28b7e107ef39626a7809b4ee64ff3abc9127 \
+    --hash=sha256:a6a6ff82f5f9b9702478035d8f6fb6903885653bff7ec3a1e011edc9b1a7168d \
+    --hash=sha256:b639f72b95389620c1f881d94739c614d385406ab1d6926a9ffe1c8abbea23fe \
+    --hash=sha256:bad44274b151d46619a7567010f7cde23a908c6faa84b97598fd2f474a0c6891 \
+    --hash=sha256:bbcef00d09a30948756c5968863316c949d9cedbc7aabac5e8f0ffbdb632e5f1 \
+    --hash=sha256:d788a3999014ddf416f2dc454efa4a5dbeda657c6aba031cf363741273804c6b \
+    --hash=sha256:eed88ae03e1ef3a75a0e96a55a99d7937ed03e53d0cffc2451c208db445a2966 \
+    --hash=sha256:f99451f3a579e73b5dd58b1b08d1179791d49084371d9a47baad3b22417f0317
+
+# The following packages are considered to be unsafe in a requirements file:
+setuptools==41.0.1 \
+    --hash=sha256:a222d126f5471598053c9a77f4b5d4f26eaa1f150ad6e01dcf1a42e185d05613 \
+    --hash=sha256:c7769ce668c7a333d84e17fe8b524b1c45e7ee9f7908ad0a73e1eda7e6a5aebf \
+    # via pycryptopp, pyhamcrest, zope.interface
diff --git a/secure-access-token-authorizer.nix b/secure-access-token-authorizer.nix
index 3b916b34997f45552dd8a4cacdad917935accdf6..b76b884c87c66730b85cd8b1aa9eda2aa4e6339f 100644
--- a/secure-access-token-authorizer.nix
+++ b/secure-access-token-authorizer.nix
@@ -1,7 +1,31 @@
-{ buildPythonPackage, sphinx }:
+{ buildPythonPackage, sphinx, circleci-cli
+, attrs, zope_interface, twisted, tahoe-lafs
+, fixtures, testtools, hypothesis, pyflakes
+}:
 buildPythonPackage rec {
   version = "0.0";
   name = "secure-access-token-authorizer-${version}";
   src = ./.;
-  depsBuildBuild = [ sphinx ];
+  depsBuildBuild = [
+    sphinx
+    circleci-cli
+  ];
+
+  propagatedBuildInputs = [
+    attrs
+    zope_interface
+    twisted
+    tahoe-lafs
+  ];
+
+  checkInputs = [
+    fixtures
+    testtools
+    hypothesis
+  ];
+
+  checkPhase = ''
+    ${pyflakes}/bin/pyflakes src/_secureaccesstokenauthorizer
+    ${twisted}/bin/trial _secureaccesstokenauthorizer
+  '';
 }
diff --git a/setup.cfg b/setup.cfg
index b26f51c7d2a99651edd8bf75fe8a5ced8069c170..ea683eefb824023d09d2cdcc42afef4d5c9803b7 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -27,4 +27,5 @@ package_dir =
 # the plugins package we want to ship.
 packages =
     _secureaccesstokenauthorizer
+    _secureaccesstokenauthorizer.tests
     twisted.plugins
diff --git a/shell.nix b/shell.nix
new file mode 100644
index 0000000000000000000000000000000000000000..5dcc7a8b4dc3eb06fef0a879f02209e866a65ae1
--- /dev/null
+++ b/shell.nix
@@ -0,0 +1,14 @@
+{ pkgs ? import <nixpkgs> { overlays = [ (import ./overlays.nix) ]; } }:
+let
+  satauthorizer = pkgs.callPackage ./default.nix { };
+in
+  (pkgs.python27.buildEnv.override {
+    extraLibs = with pkgs.python27Packages; [
+      fixtures
+      testtools
+      hypothesis
+      pyhamcrest
+      satauthorizer
+    ];
+    ignoreCollisions = true;
+  }).env
diff --git a/src/_secureaccesstokenauthorizer/__init__.py b/src/_secureaccesstokenauthorizer/__init__.py
index 4990b680dc807e489646b6ae427a5e4973214bd0..b796f605da7b7b0d29a9851188b71a79f69c1feb 100644
--- a/src/_secureaccesstokenauthorizer/__init__.py
+++ b/src/_secureaccesstokenauthorizer/__init__.py
@@ -12,4 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+__all__ = [
+    "__version__",
+]
+
 __version__ = "0.0"
diff --git a/src/_secureaccesstokenauthorizer/_plugin.py b/src/_secureaccesstokenauthorizer/_plugin.py
new file mode 100644
index 0000000000000000000000000000000000000000..ba09200c97c053c641466f71da01fd93f39290ac
--- /dev/null
+++ b/src/_secureaccesstokenauthorizer/_plugin.py
@@ -0,0 +1,80 @@
+# Copyright 2019 PrivateStorage.io, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+The Twisted plugin that glues the Secure Access Token system into
+Tahoe-LAFS.
+"""
+
+import attr
+
+from zope.interface import (
+    implementer,
+)
+
+from twisted.internet.defer import (
+    succeed,
+)
+
+from allmydata.interfaces import (
+    IFoolscapStoragePlugin,
+    IAnnounceableStorageServer,
+)
+
+from .api import (
+    SecureAccessTokenAuthorizerStorageServer,
+    SecureAccessTokenAuthorizerStorageClient,
+)
+
+from ._storage_server import (
+    TOKEN_LENGTH,
+)
+
+@implementer(IAnnounceableStorageServer)
+@attr.s
+class AnnounceableStorageServer(object):
+    announcement = attr.ib()
+    storage_server = attr.ib()
+
+
+
+@implementer(IFoolscapStoragePlugin)
+class SecureAccessTokenAuthorizer(object):
+    """
+    A storage plugin which provides a token-based access control mechanism on
+    top of the Tahoe-LAFS built-in storage server interface.
+    """
+    name = u"privatestorageio-satauthz-v1"
+
+    def get_storage_server(self, configuration, get_anonymous_storage_server):
+        announcement = {}
+        storage_server = SecureAccessTokenAuthorizerStorageServer(
+            get_anonymous_storage_server(),
+            **configuration
+        )
+        return succeed(
+            AnnounceableStorageServer(
+                announcement,
+                storage_server,
+            ),
+        )
+
+
+    def get_storage_client(self, configuration, announcement, get_rref):
+        return succeed(
+            SecureAccessTokenAuthorizerStorageClient(
+                get_rref,
+                lambda: [b"x" * TOKEN_LENGTH],
+            )
+        )
diff --git a/src/_secureaccesstokenauthorizer/_storage_client.py b/src/_secureaccesstokenauthorizer/_storage_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ba11b25c0a29409c95d44743b4737524147f627
--- /dev/null
+++ b/src/_secureaccesstokenauthorizer/_storage_client.py
@@ -0,0 +1,164 @@
+# Copyright 2019 PrivateStorage.io, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A Tahoe-LAFS ``IStorageServer`` implementation which presents tokens
+per-call to prove authorization for writes and lease updates.
+
+This is the client part of a storage access protocol.  The server part is
+implemented in ``_storage_server.py``.
+"""
+
+import attr
+
+from zope.interface import (
+    implementer,
+)
+
+from allmydata.interfaces import (
+    IStorageServer,
+)
+
+@implementer(IStorageServer)
+@attr.s
+class SecureAccessTokenAuthorizerStorageClient(object):
+    """
+    An implementation of the client portion of an access-token-based
+    authorization scheme on top of the basic Tahoe-LAFS storage protocol.
+
+    This ``IStorageServer`` implementation aims to offer the same storage
+    functionality as Tahoe-LAFS' built-in storage server but with an added
+    layer of token-based authorization for some operations.  The Python
+    interface exposed to application code is the same but the network protocol
+    is augmented with tokens which are automatically inserted by this class.
+    The tokens are interpreted by the corresponding server-side implementation
+    of this scheme.
+
+    :ivar _get_rref: A no-argument callable which retrieves the most recently
+        valid ``RemoteReference`` corresponding to the server-side object for
+        this scheme.
+
+    :ivar _get_tokens: A no-argument callable which retrieves some tokens
+        which can be used to authorize an operation.
+    """
+    _get_rref = attr.ib()
+    _get_tokens = attr.ib()
+
+    @property
+    def _rref(self):
+        return self._get_rref()
+
+    def get_version(self):
+        return self._rref.callRemote(
+            "get_version",
+        )
+
+    def allocate_buckets(
+            self,
+            storage_index,
+            renew_secret,
+            cancel_secret,
+            sharenums,
+            allocated_size,
+            canary,
+    ):
+        return self._rref.callRemote(
+            "allocate_buckets",
+            self._get_tokens(),
+            storage_index,
+            renew_secret,
+            cancel_secret,
+            sharenums,
+            allocated_size,
+            canary,
+        )
+
+    def get_buckets(
+            self,
+            storage_index,
+    ):
+        return self._rref.callRemote(
+            "get_buckets",
+            storage_index,
+        )
+
+    def add_lease(
+            self,
+            storage_index,
+            renew_secret,
+            cancel_secret,
+    ):
+        return self._rref.callRemote(
+            "add_lease",
+            self._get_tokens(),
+            storage_index,
+            renew_secret,
+            cancel_secret,
+        )
+
+    def renew_lease(
+            self,
+            storage_index,
+            renew_secret,
+    ):
+        return self._rref.callRemote(
+            "renew_lease",
+            self._get_tokens(),
+            storage_index,
+            renew_secret,
+        )
+
+    def advise_corrupt_share(
+            self,
+            share_type,
+            storage_index,
+            shnum,
+            reason,
+    ):
+        return self._rref.callRemote(
+            "advise_corrupt_share",
+            share_type,
+            storage_index,
+            shnum,
+            reason,
+        )
+
+    def slot_testv_and_readv_and_writev(
+            self,
+            storage_index,
+            secrets,
+            tw_vectors,
+            r_vector,
+    ):
+        return self._rref.callRemote(
+            "slot_testv_and_readv_and_writev",
+            self._get_tokens(),
+            storage_index,
+            secrets,
+            tw_vectors,
+            r_vector,
+        )
+
+    def slot_readv(
+            self,
+            storage_index,
+            shares,
+            r_vector,
+    ):
+        return self._rref.callRemote(
+            "slot_readv",
+            storage_index,
+            shares,
+            r_vector,
+        )
diff --git a/src/_secureaccesstokenauthorizer/_storage_server.py b/src/_secureaccesstokenauthorizer/_storage_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..3181f4e0008afb748257717e32d66d29264d6abe
--- /dev/null
+++ b/src/_secureaccesstokenauthorizer/_storage_server.py
@@ -0,0 +1,254 @@
+# Copyright 2019 PrivateStorage.io, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A Tahoe-LAFS RIStorageServer-alike which authorizes writes and lease
+updates using a per-call token.
+
+This is the server part of a storage access protocol.  The client part is
+implemented in ``_storage_client.py``.
+"""
+
+import attr
+from attr.validators import (
+    provides,
+)
+
+from zope.interface import (
+    implementer_only,
+)
+
+from foolscap.constraint import (
+    ByteStringConstraint,
+)
+from foolscap.api import (
+    ListOf,
+    Referenceable,
+)
+from foolscap.ipb import (
+    IReferenceable,
+    IRemotelyCallable,
+)
+from foolscap.remoteinterface import (
+    RemoteMethodSchema,
+    RemoteInterface,
+)
+
+from allmydata.interfaces import (
+    RIStorageServer,
+)
+
+# The Foolscap convention seems to be to try to constrain inputs to valid
+# values.  So we'll try to limit the number of tokens a client can supply.
+# Foolscap may be moving away from this so we may eventually drop this as
+# well.  Though it may still make sense on a non-Foolscap protocol (eg HTTP)
+# which Tahoe-LAFS may eventually support.
+#
+# In any case, for now, pick some fairly arbitrary value.  I am deliberately
+# picking a small number here and expect to have to raise.  However, ideally,
+# a client could accomplish a lot with a few tokens while also not wasting a
+# lot of value.
+MAXIMUM_TOKENS_PER_CALL = 10
+
+# This is the length of a serialized PrivacyPass pass (there's a lot of
+# confusion between "tokens" and "passes" here, sadly).
+TOKEN_LENGTH = 97
+
+# Take those values and turn them into the appropriate Foolscap constraint
+# objects.  Foolscap seems to have a convention of representing these as
+# CamelCase module-level values so I replicate that here.
+Token = ByteStringConstraint(maxLength=TOKEN_LENGTH, minLength=TOKEN_LENGTH)
+TokenList = ListOf(Token, maxLength=MAXIMUM_TOKENS_PER_CALL)
+
+
+def add_tokens(schema):
+    """
+    Add a ``tokens`` parameter to the given method schema.
+
+    :param foolscap.remoteinterface.RemoteMethodSchema schema: An existing
+        method schema to modify.
+
+    :return foolscap.remoteinterface.RemoteMethodSchema: A schema like
+        ``schema`` but with one additional required argument.
+    """
+    return add_arguments(schema, [(b"tokens", TokenList)])
+
+
+def add_arguments(schema, kwargs):
+    """
+    Create a new schema like ``schema`` but with the arguments given by
+    ``kwargs`` prepended to the signature.
+
+    :param foolscap.remoteinterface.RemoteMethodSchema schema: The existing
+        schema.
+
+    :param list[(bytes, foolscap.IConstraint)] kwargs: The arguments to
+        prepend to the signature of ``schema``.
+
+    :return foolscap.remoteinterface.RemoteMethodSchema: The new schema
+        object.
+    """
+    new_kwargs = dict(schema.argConstraints)
+    new_kwargs.update(kwargs)
+    modified_schema = RemoteMethodSchema(**new_kwargs)
+    # Initialized from **new_kwargs, RemoteMethodSchema.argumentNames is in
+    # some arbitrary, probably-incorrect order.  This breaks user code which
+    # tries to use positional arguments.  Put them back in the order they were
+    # in originally (in the input ``schema``), prepended with the newly added
+    # arguments.
+    modified_schema.argumentNames = (
+        # The new arguments
+        list(argName for (argName, _) in kwargs) +
+        # The original arguments in the original order
+        schema.argumentNames
+    )
+    return modified_schema
+
+
+
+class RITokenAuthorizedStorageServer(RemoteInterface):
+    """
+    An object which can store and retrieve shares, subject to token-based
+    authorization.
+
+    This is much the same as ``allmydata.interfaces.RIStorageServer`` but
+    several of its methods take an additional ``tokens`` parameter.  Clients
+    are expected to supply suitable tokens and only after the tokens have been
+    validated is service provided.
+    """
+    __remote_name__ = (
+        "RITokenAuthorizedStorageServer.tahoe.privatestorage.io"
+    )
+
+    get_version = RIStorageServer["get_version"]
+
+    allocate_buckets = add_tokens(RIStorageServer["allocate_buckets"])
+
+    add_lease = add_tokens(RIStorageServer["add_lease"])
+
+    renew_lease = add_tokens(RIStorageServer["renew_lease"])
+
+    get_buckets = RIStorageServer["get_buckets"]
+
+    slot_readv = RIStorageServer["slot_readv"]
+
+    slot_testv_and_readv_and_writev = add_tokens(
+        RIStorageServer["slot_testv_and_readv_and_writev"],
+    )
+
+    advise_corrupt_share = RIStorageServer["advise_corrupt_share"]
+
+
+
+@implementer_only(RITokenAuthorizedStorageServer, IReferenceable, IRemotelyCallable)
+# It would be great to use `frozen=True` (value-based hashing) instead of
+# `cmp=False` (identity based hashing) but Referenceable wants to set some
+# attributes on self and it's hard to avoid that.
+@attr.s(cmp=False)
+class SecureAccessTokenAuthorizerStorageServer(Referenceable):
+    """
+    A class which wraps an ``RIStorageServer`` to insert token validity checks
+    before allowing certain functionality.
+    """
+    _original = attr.ib(validator=provides(RIStorageServer))
+
+    def _validate_tokens(self, tokens):
+        """
+        Check that all of the given tokens are valid.
+
+        :raise InvalidToken: If any token in ``tokens`` is not valid.
+
+        :return NoneType: If all of the tokens in ``tokens`` are valid.
+
+        :note: This is yet to be implemented so it always returns ``None``.
+        """
+        return None
+
+    def remote_get_version(self):
+        """
+        Pass through without token check to allow clients to learn about our
+        version and configuration in case it helps them decide how to behave.
+        """
+        return self._original.remote_get_version()
+
+    def remote_allocate_buckets(self, tokens, *a, **kw):
+        """
+        Pass through after a token check to ensure that clients can only allocate
+        storage for immutable shares if they present valid tokens.
+        """
+        self._validate_tokens(tokens)
+        return self._original.remote_allocate_buckets(*a, **kw)
+
+    def remote_get_buckets(self, storage_index):
+        """
+        Pass through without token check to let clients read immutable shares as
+        long as those shares exist.
+        """
+        return self._original.remote_get_buckets(storage_index)
+
+    def remote_add_lease(self, tokens, *a, **kw):
+        """
+        Pass through after a token check to ensure clients can only extend the
+        duration of share storage if they present valid tokens.
+        """
+        self._validate_tokens(tokens)
+        return self._original.remote_add_lease(*a, **kw)
+
+    def remote_renew_lease(self, tokens, *a, **kw):
+        """
+        Pass through after a token check to ensure clients can only extend the
+        duration of share storage if they present valid tokens.
+        """
+        self._validate_tokens(tokens)
+        return self._original.remote_renew_lease(*a, **kw)
+
+    def remote_advise_corrupt_share(self, *a, **kw):
+        """
+        Pass through without a token check to let clients inform us of possible
+        issues with the system without incurring any cost to themselves.
+        """
+        return self._original.remote_advise_corrupt_share(*a, **kw)
+
+    def remote_slot_testv_and_readv_and_writev(self, tokens, *a, **kw):
+        """
+        Pass through after a token check to ensure clients can only allocate
+        storage for mutable shares if they present valid tokens.
+
+        :note: This method can be used both to allocate storage and to rewrite
+            data in already-allocated storage.  These cases may not be the
+            same from the perspective of token validation.
+        """
+        self._validate_tokens(tokens)
+        return self._original.remote_slot_testv_and_readv_and_writev(*a, **kw)
+
+    def remote_slot_readv(self, *a, **kw):
+        """
+        Pass through without a token check to let clients read mutable shares as
+        long as those shares exist.
+        """
+        return self._original.remote_slot_readv(*a, **kw)
+
+# I don't understand why this is required.
+# SecureAccessTokenAuthorizerStorageServer is-a Referenceable.  It seems like
+# the built in adapter should take care of this case.
+from twisted.python.components import (
+    registerAdapter,
+)
+from foolscap.referenceable import (
+    ReferenceableSlicer,
+)
+from foolscap.ipb import (
+    ISlicer,
+)
+registerAdapter(ReferenceableSlicer, SecureAccessTokenAuthorizerStorageServer, ISlicer)
diff --git a/src/_secureaccesstokenauthorizer/api.py b/src/_secureaccesstokenauthorizer/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..53887349735138b3d8463c5d070361a422b3da72
--- /dev/null
+++ b/src/_secureaccesstokenauthorizer/api.py
@@ -0,0 +1,30 @@
+# Copyright 2019 PrivateStorage.io, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__all__ = [
+    "SecureAccessTokenAuthorizerStorageServer",
+    "SecureAccessTokenAuthorizerStorageClient",
+    "SecureAccessTokenAuthorizer",
+]
+
+from ._storage_server import (
+    SecureAccessTokenAuthorizerStorageServer,
+)
+from ._storage_client import (
+    SecureAccessTokenAuthorizerStorageClient,
+)
+
+from ._plugin import (
+    SecureAccessTokenAuthorizer,
+)
diff --git a/src/_secureaccesstokenauthorizer/tests/__init__.py b/src/_secureaccesstokenauthorizer/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ddc6757790860f85c70d9c06d97bbf25baae8784
--- /dev/null
+++ b/src/_secureaccesstokenauthorizer/tests/__init__.py
@@ -0,0 +1,17 @@
+# Copyright 2019 PrivateStorage.io, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+The automated unit test suite.
+"""
diff --git a/src/_secureaccesstokenauthorizer/tests/matchers.py b/src/_secureaccesstokenauthorizer/tests/matchers.py
new file mode 100644
index 0000000000000000000000000000000000000000..16d967a512f1ee976ac845750fdf476fc95958a6
--- /dev/null
+++ b/src/_secureaccesstokenauthorizer/tests/matchers.py
@@ -0,0 +1,56 @@
+# Copyright 2019 PrivateStorage.io, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Testtools matchers useful for the test suite.
+"""
+
+import attr
+
+from testtools.matchers import (
+    Mismatch,
+    ContainsDict,
+    Always,
+)
+
+@attr.s
+class Provides(object):
+    """
+    Match objects that provide one or more Zope Interface interfaces.
+    """
+    interfaces = attr.ib()
+
+    def match(self, obj):
+        missing = set()
+        for iface in self.interfaces:
+            if not iface.providedBy(obj):
+                missing.add(iface)
+        if missing:
+            return Mismatch("{} does not provide expected {}".format(
+                obj, ", ".join(str(iface) for iface in missing),
+            ))
+
+
+def matches_version_dictionary():
+    """
+    Match the dictionary returned by Tahoe-LAFS'
+    ``RIStorageServer.get_version`` which is also the dictionary returned by
+    our own ``RITokenAuthorizedStorageServer.get_version``.
+    """
+    return ContainsDict({
+        # It has these two top-level keys, at least.  Try not to be too
+        # fragile by asserting much more than that they are present.
+        b'application-version': Always(),
+        b'http://allmydata.org/tahoe/protocols/storage/v1': Always(),
+    })
diff --git a/src/_secureaccesstokenauthorizer/tests/strategies.py b/src/_secureaccesstokenauthorizer/tests/strategies.py
new file mode 100644
index 0000000000000000000000000000000000000000..c41f7cca88ebf7cf3d80984e7c26c9df96a9c0f6
--- /dev/null
+++ b/src/_secureaccesstokenauthorizer/tests/strategies.py
@@ -0,0 +1,238 @@
+# Copyright 2019 PrivateStorage.io, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Hypothesis strategies for property testing.
+"""
+
+import attr
+
+from hypothesis.strategies import (
+    one_of,
+    just,
+    binary,
+    integers,
+    sets,
+    lists,
+    tuples,
+    dictionaries,
+    builds,
+)
+
+from allmydata.interfaces import (
+    StorageIndex,
+    LeaseRenewSecret,
+    LeaseCancelSecret,
+    WriteEnablerSecret,
+)
+
+def configurations():
+    """
+    Build configuration values for the plugin.
+    """
+    return just({})
+
+
+def storage_indexes():
+    """
+    Build Tahoe-LAFS storage indexes.
+    """
+    return binary(
+        min_size=StorageIndex.minLength,
+        max_size=StorageIndex.maxLength,
+    )
+
+
+def lease_renew_secrets():
+    """
+    Build Tahoe-LAFS lease renewal secrets.
+    """
+    return binary(
+        min_size=LeaseRenewSecret.minLength,
+        max_size=LeaseRenewSecret.maxLength,
+    )
+
+
+def lease_cancel_secrets():
+    """
+    Build Tahoe-LAFS lease cancellation secrets.
+    """
+    return binary(
+        min_size=LeaseCancelSecret.minLength,
+        max_size=LeaseCancelSecret.maxLength,
+    )
+
+
+def write_enabler_secrets():
+    """
+    Build Tahoe-LAFS write enabler secrets.
+    """
+    return binary(
+        min_size=WriteEnablerSecret.minLength,
+        max_size=WriteEnablerSecret.maxLength,
+    )
+
+
+def sharenums():
+    """
+    Build Tahoe-LAFS share numbers.
+    """
+    return integers(
+        min_value=0,
+        max_value=255,
+    )
+
+
+def sharenum_sets():
+    """
+    Build sets of Tahoe-LAFS share numbers.
+    """
+    return sets(
+        sharenums(),
+        min_size=1,
+        max_size=255,
+    )
+
+
+def sizes():
+    """
+    Build Tahoe-LAFS share sizes.
+    """
+    return integers(
+        # Size 0 data isn't data, it's nothing.
+        min_value=1,
+        # Just for practical purposes...
+        max_value=2 ** 16,
+    )
+
+
+def offsets():
+    """
+    Build Tahoe-LAFS share offsets.
+    """
+    return integers(
+        min_value=0,
+        # Just for practical purposes...
+        max_value=2 ** 16,
+    )
+
+
+def bytes_for_share(sharenum, size):
+    """
+    :return bytes: marginally distinctive bytes of a certain length for the
+        given share number
+    """
+    if 0 <= sharenum <= 255:
+        return (unichr(sharenum) * size).encode("latin-1")
+    raise ValueError("Sharenum must be between 0 and 255 inclusive.")
+
+
+def shares():
+    """
+    Build Tahoe-LAFS share data.
+    """
+    return tuples(
+        sharenums(),
+        sizes()
+    ).map(
+        lambda num_and_size: bytes_for_share(*num_and_size),
+    )
+
+
+def data_vectors():
+    """
+    Build Tahoe-LAFS data vectors.
+    """
+    return lists(
+        tuples(
+            offsets(),
+            shares(),
+        ),
+        # An empty data vector doesn't make much sense.  If you have no data
+        # to write, you should probably use slot_readv instead.  Also,
+        # Tahoe-LAFS explodes if you pass an empty data vector -
+        # storage/server.py, OSError(ENOENT) from `os.listdir(bucketdir)`.
+        min_size=1,
+        # Just for practical purposes...
+        max_size=8,
+    )
+
+
+def test_vectors():
+    """
+    Build Tahoe-LAFS test vectors.
+    """
+    return lists(
+        # XXX TODO
+        just(None),
+        min_size=0,
+        max_size=0,
+    )
+
+
+@attr.s(frozen=True)
+class TestAndWriteVectors(object):
+    """
+    Provide an alternate structure for the values required by the
+    ``tw_vectors`` parameter accepted by
+    ``RIStorageServer.slot_testv_and_readv_and_writev``.
+    """
+    test_vector = attr.ib()
+    write_vector = attr.ib()
+    new_length = attr.ib()
+
+    def for_call(self):
+        """
+        Construct a value suitable to be passed as ``tw_vectors`` to
+        ``slot_testv_and_readv_and_writev``.
+        """
+        return (self.test_vector, self.write_vector, self.new_length)
+
+
+def test_and_write_vectors():
+    """
+    Build Tahoe-LAFS test and write vectors for a single share.
+    """
+    return builds(
+        TestAndWriteVectors,
+        test_vectors(),
+        data_vectors(),
+        one_of(
+            just(None),
+            sizes(),
+        ),
+    )
+
+
+def test_and_write_vectors_for_shares():
+    """
+    Build Tahoe-LAFS test and write vectors for a number of shares.
+    """
+    return dictionaries(
+        sharenums(),
+        test_and_write_vectors(),
+        # An empty dictionary wouldn't make much sense.  And it provokes a
+        # NameError from Tahoe, storage/server.py:479, `new_length` referenced
+        # before assignment.
+        min_size=1,
+        # Just for practical purposes...
+        max_size=8,
+    )
+
+
+def announcements():
+    """
+    Build announcements for the SecureAccessTokenAuthorizer plugin.
+    """
+    return just({})
diff --git a/src/_secureaccesstokenauthorizer/tests/test_plugin.py b/src/_secureaccesstokenauthorizer/tests/test_plugin.py
new file mode 100644
index 0000000000000000000000000000000000000000..14d544bfb361b8e0adcb5ad93d65cf528f62e64c
--- /dev/null
+++ b/src/_secureaccesstokenauthorizer/tests/test_plugin.py
@@ -0,0 +1,224 @@
+# Copyright 2019 PrivateStorage.io, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Tests for the Tahoe-LAFS plugin.
+"""
+
+from zope.interface import (
+    implementer,
+)
+
+from testtools import (
+    TestCase,
+)
+from testtools.matchers import (
+    Always,
+    Contains,
+    AfterPreprocessing,
+)
+from testtools.twistedsupport import (
+    succeeded,
+)
+
+from hypothesis import (
+    given,
+)
+
+from foolscap.broker import (
+    Broker,
+)
+from foolscap.ipb import (
+    IReferenceable,
+    IRemotelyCallable,
+)
+
+from allmydata.interfaces import (
+    IFoolscapStoragePlugin,
+    IAnnounceableStorageServer,
+    IStorageServer,
+    RIStorageServer,
+)
+
+from twisted.plugin import (
+    getPlugins,
+)
+from twisted.test.proto_helpers import (
+    StringTransport,
+)
+from twisted.plugins.secureaccesstokenauthorizer import (
+    storage_server,
+)
+
+from .strategies import (
+    configurations,
+    announcements,
+)
+from .matchers import (
+    Provides,
+)
+
+
+@implementer(RIStorageServer)
+class StubStorageServer(object):
+    pass
+
+
+def get_anonymous_storage_server():
+    return StubStorageServer()
+
+
+def get_rref():
+    return None
+
+
+class PluginTests(TestCase):
+    """
+    Tests for ``twisted.plugins.secureaccesstokenauthorizer.storage_server``.
+    """
+    def test_discoverable(self):
+        """
+        The plugin can be discovered.
+        """
+        self.assertThat(
+            getPlugins(IFoolscapStoragePlugin),
+            Contains(storage_server),
+        )
+
+
+    def test_provides_interface(self):
+        """
+        ``storage_server`` provides ``IFoolscapStoragePlugin``.
+        """
+        self.assertThat(
+            storage_server,
+            Provides([IFoolscapStoragePlugin]),
+        )
+
+
+
+class ServerPluginTests(TestCase):
+    """
+    Tests for the plugin's implementation of
+    ``IFoolscapStoragePlugin.get_storage_server``.
+    """
+    @given(configurations())
+    def test_returns_announceable(self, configuration):
+        """
+        ``storage_server.get_storage_server`` returns an instance which provides
+        ``IAnnounceableStorageServer``.
+        """
+        storage_server_deferred = storage_server.get_storage_server(
+            configuration,
+            get_anonymous_storage_server,
+        )
+        self.assertThat(
+            storage_server_deferred,
+            succeeded(Provides([IAnnounceableStorageServer])),
+        )
+
+
+    @given(configurations())
+    def test_returns_referenceable(self, configuration):
+        """
+        The storage server attached to the result of
+        ``storage_server.get_storage_server`` provides ``IReferenceable`` and
+        ``IRemotelyCallable``.
+        """
+        storage_server_deferred = storage_server.get_storage_server(
+            configuration,
+            get_anonymous_storage_server,
+        )
+        self.assertThat(
+            storage_server_deferred,
+            succeeded(
+                AfterPreprocessing(
+                    lambda ann: ann.storage_server,
+                    Provides([IReferenceable, IRemotelyCallable]),
+                ),
+            ),
+        )
+
+    @given(configurations())
+    def test_returns_serializable(self, configuration):
+        """
+        The storage server attached to the result of
+        ``storage_server.get_storage_server`` can be serialized by a banana
+        Broker (for Foolscap).
+        """
+        storage_server_deferred = storage_server.get_storage_server(
+            configuration,
+            get_anonymous_storage_server,
+        )
+        broker = Broker(None)
+        broker.makeConnection(StringTransport())
+        self.expectThat(
+            storage_server_deferred,
+            succeeded(
+                AfterPreprocessing(
+                    lambda ann: broker.send(ann.storage_server),
+                    Always(),
+                ),
+            ),
+        )
+
+
+    @given(configurations())
+    def test_returns_hashable(self, configuration):
+        """
+        The storage server attached to the result of
+        ``storage_server.get_storage_server`` is hashable for use as a Python
+        dictionary key.
+
+        This is another requirement of Foolscap.
+        """
+        storage_server_deferred = storage_server.get_storage_server(
+            configuration,
+            get_anonymous_storage_server,
+        )
+        broker = Broker(None)
+        broker.makeConnection(StringTransport())
+        self.expectThat(
+            storage_server_deferred,
+            succeeded(
+                AfterPreprocessing(
+                    lambda ann: hash(ann.storage_server),
+                    Always(),
+                ),
+            ),
+        )
+
+
+
+class ClientPluginTests(TestCase):
+    """
+    Tests for the plugin's implementation of
+    ``IFoolscapStoragePlugin.get_storage_client``.
+    """
+    @given(configurations(), announcements())
+    def test_interface(self, configuration, announcement):
+        """
+        ``get_storage_client`` returns a ``Deferred`` that fires with an object
+        which provides ``IStorageServer``.
+        """
+        storage_client_deferred = storage_server.get_storage_client(
+            configuration,
+            announcement,
+            get_rref,
+        )
+
+        self.assertThat(
+            storage_client_deferred,
+            succeeded(Provides([IStorageServer])),
+        )
diff --git a/src/_secureaccesstokenauthorizer/tests/test_storage_protocol.py b/src/_secureaccesstokenauthorizer/tests/test_storage_protocol.py
new file mode 100644
index 0000000000000000000000000000000000000000..95abe60a5aa6721e4faf049c8b35481896062928
--- /dev/null
+++ b/src/_secureaccesstokenauthorizer/tests/test_storage_protocol.py
@@ -0,0 +1,519 @@
+# Copyright 2019 PrivateStorage.io, LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Tests for communication between the client and server components.
+"""
+
+import attr
+
+from fixtures import (
+    Fixture,
+    TempDir,
+    MonkeyPatch,
+)
+from testtools import (
+    TestCase,
+)
+from testtools.matchers import (
+    Equals,
+    HasLength,
+)
+from testtools.twistedsupport import (
+    succeeded,
+)
+from testtools.twistedsupport._deferred import (
+    # I'd rather use https://twistedmatrix.com/trac/ticket/8900 but efforts
+    # there appear to have stalled.
+    extract_result,
+)
+
+from hypothesis import (
+    given,
+    assume,
+    note,
+)
+from hypothesis.strategies import (
+    tuples,
+)
+
+from twisted.python.filepath import (
+    FilePath,
+)
+from twisted.internet.defer import (
+    execute,
+)
+
+from foolscap.referenceable import (
+    LocalReferenceable,
+)
+
+from allmydata.storage.server import (
+    StorageServer,
+)
+
+from .strategies import (
+    storage_indexes,
+    lease_renew_secrets,
+    lease_cancel_secrets,
+    write_enabler_secrets,
+    sharenums,
+    sharenum_sets,
+    sizes,
+    test_and_write_vectors_for_shares,
+    # Not really a strategy...
+    bytes_for_share,
+)
+from .matchers import (
+    matches_version_dictionary,
+)
+from ..api import (
+    SecureAccessTokenAuthorizerStorageServer,
+    SecureAccessTokenAuthorizerStorageClient,
+)
+from .._storage_server import (
+    TOKEN_LENGTH,
+)
+
+class AnonymousStorageServer(Fixture):
+    """
+    Supply an instance of allmydata.storage.server.StorageServer which
+    implements anonymous access to Tahoe-LAFS storage server functionality.
+
+    :ivar FilePath tempdir: The path to the server's storage on the
+        filesystem.
+
+    :ivar allmydata.storage.server.StorageServer storage_server: The storage
+        server.
+    """
+    def _setUp(self):
+        self.tempdir = FilePath(self.useFixture(TempDir()).join(b"storage"))
+        self.storage_server = StorageServer(
+            self.tempdir.asBytesMode().path,
+            b"x" * 20,
+        )
+
+
+@attr.s
+class LocalRemote(object):
+    """
+    Adapt a referenceable to behave as if it were a remote reference instead.
+
+    This is only a partial implementation of ``IRemoteReference`` so it
+    doesn't declare the interface.
+
+    ``foolscap.referenceable.LocalReferenceable`` is in many ways a better
+    adapter between these interfaces but it also uses ``eventually`` which
+    complicates matters immensely for testing.
+
+    :ivar foolscap.ipb.IReferenceable _referenceable: The object to which this
+        provides a simulated remote interface.
+    """
+    _referenceable = attr.ib()
+
+    def callRemote(self, methname, *args, **kwargs):
+        """
+        Call the given method on the wrapped object, passing the given arguments.
+
+        Arguments are checked for conformance to the remote interface but the
+        return value is not (because I don't know how -exarkun).
+
+        :return Deferred: The result of the call on the wrapped object.
+        """
+        schema = self._referenceable.getInterface()[methname]
+        schema.checkAllArgs(args, kwargs, inbound=False)
+        # TODO: Figure out how to call checkResults on the result.
+        return execute(
+            self._referenceable.doRemoteCall,
+            methname,
+            args,
+            kwargs,
+        )
+
+
+
+class ShareTests(TestCase):
+    """
+    Tests for interaction with shares.
+    """
+    def setUp(self):
+        super(ShareTests, self).setUp()
+        self.canary = LocalReferenceable(None)
+        self.anonymous_storage_server = self.useFixture(AnonymousStorageServer()).storage_server
+
+        def get_tokens():
+            return [b"x" * TOKEN_LENGTH]
+
+        self.server = SecureAccessTokenAuthorizerStorageServer(
+            self.anonymous_storage_server,
+        )
+        self.local_remote_server = LocalRemote(self.server)
+        self.client = SecureAccessTokenAuthorizerStorageClient(
+            get_rref=lambda: self.local_remote_server,
+            get_tokens=get_tokens,
+        )
+
+    def test_get_version(self):
+        """
+        Version information about the storage server can be retrieved using
+        *get_version*.
+        """
+        self.assertThat(
+            self.client.get_version(),
+            succeeded(matches_version_dictionary()),
+        )
+
+    @given(
+        storage_index=storage_indexes(),
+        renew_secret=lease_renew_secrets(),
+        cancel_secret=lease_cancel_secrets(),
+        sharenums=sharenum_sets(),
+        size=sizes(),
+    )
+    def test_create_immutable(self, storage_index, renew_secret, cancel_secret, sharenums, size):
+        """
+        Immutable share data created using *allocate_buckets* and methods of the
+        resulting buckets can be read back using *get_buckets* and methods of
+        those resulting buckets.
+        """
+        # Hypothesis causes our storage server to be used many times.  Clean
+        # up between iterations.
+        cleanup_storage_server(self.anonymous_storage_server)
+
+        alreadygot, allocated = extract_result(
+            self.client.allocate_buckets(
+                storage_index,
+                renew_secret,
+                cancel_secret,
+                sharenums,
+                size,
+                canary=self.canary,
+            ),
+        )
+        self.expectThat(
+            alreadygot,
+            Equals(set()),
+            u"fresh server somehow already had shares",
+        )
+        self.expectThat(
+            set(allocated.keys()),
+            Equals(sharenums),
+            u"fresh server refused to allocate all requested buckets",
+        )
+
+        for sharenum, bucket in allocated.items():
+            bucket.remote_write(0, bytes_for_share(sharenum, size))
+            bucket.remote_close()
+
+        readers = extract_result(self.client.get_buckets(storage_index))
+
+        self.expectThat(
+            set(readers.keys()),
+            Equals(sharenums),
+            u"server did not return all buckets we wrote",
+        )
+        for (sharenum, bucket) in readers.items():
+            self.expectThat(
+                bucket.remote_read(0, size),
+                Equals(bytes_for_share(sharenum, size)),
+                u"server returned wrong bytes for share number {}".format(
+                    sharenum,
+                ),
+            )
+
+    @given(
+        storage_index=storage_indexes(),
+        renew_secrets=tuples(lease_renew_secrets(), lease_renew_secrets()),
+        cancel_secret=lease_cancel_secrets(),
+        sharenum=sharenums(),
+        size=sizes(),
+    )
+    def test_add_lease(self, storage_index, renew_secrets, cancel_secret, sharenum, size):
+        """
+        A lease can be added to an existing immutable share.
+        """
+        # Hypothesis causes our storage server to be used many times.  Clean
+        # up between iterations.
+        cleanup_storage_server(self.anonymous_storage_server)
+
+        # Use a different secret so that it's a new lease and not an
+        # implicit renewal.
+        add_lease_secret, renew_lease_secret = renew_secrets
+        assume(add_lease_secret != renew_lease_secret)
+
+        # Create a share we can toy with.
+        write_toy_shares(
+            self.anonymous_storage_server,
+            storage_index,
+            add_lease_secret,
+            cancel_secret,
+            {sharenum},
+            size,
+            canary=self.canary,
+        )
+
+        extract_result(
+            self.client.add_lease(
+                storage_index,
+                renew_lease_secret,
+                cancel_secret,
+            ),
+        )
+        [(_, leases)] = get_leases(self.server, storage_index).items()
+        self.assertThat(leases, HasLength(2))
+
+    @given(
+        storage_index=storage_indexes(),
+        renew_secret=lease_renew_secrets(),
+        cancel_secret=lease_cancel_secrets(),
+        sharenum=sharenums(),
+        size=sizes(),
+    )
+    def test_renew_lease(self, storage_index, renew_secret, cancel_secret, sharenum, size):
+        """
+        A lease on an immutable share can be updated to expire at a later time.
+        """
+        # Hypothesis causes our storage server to be used many times.  Clean
+        # up between iterations.
+        cleanup_storage_server(self.anonymous_storage_server)
+
+        # Take control of time (in this hacky, fragile way) so we can verify
+        # the expiration time gets bumped by the renewal.
+        now = 1000000000.5
+        self.useFixture(MonkeyPatch("time.time", lambda: now))
+
+        # Create a share we can toy with.
+        write_toy_shares(
+            self.anonymous_storage_server,
+            storage_index,
+            renew_secret,
+            cancel_secret,
+            {sharenum},
+            size,
+            canary=self.canary,
+        )
+
+        now += 100000
+        extract_result(
+            self.client.renew_lease(
+                storage_index,
+                renew_secret,
+            ),
+        )
+
+        # Based on Tahoe-LAFS' hard-coded renew time.
+        RENEW_INTERVAL = 60 * 60 * 24 * 31
+
+        [(_, [lease])] = get_leases(self.server, storage_index).items()
+        self.assertThat(
+            lease.get_expiration_time(),
+            Equals(int(now + RENEW_INTERVAL)),
+        )
+
+    @given(
+        storage_index=storage_indexes(),
+        renew_secret=lease_renew_secrets(),
+        cancel_secret=lease_cancel_secrets(),
+        sharenum=sharenums(),
+        size=sizes(),
+    )
+    def test_advise_corrupt_share(self, storage_index, renew_secret, cancel_secret, sharenum, size):
+        """
+        An advisory of corruption in a share can be sent to the server.
+        """
+        # Hypothesis causes our storage server to be used many times.  Clean
+        # up between iterations.
+        cleanup_storage_server(self.anonymous_storage_server)
+
+        # Create a share we can toy with.
+        write_toy_shares(
+            self.anonymous_storage_server,
+            storage_index,
+            renew_secret,
+            cancel_secret,
+            {sharenum},
+            size,
+            canary=self.canary,
+        )
+
+        extract_result(
+            self.client.advise_corrupt_share(
+                b"immutable",
+                storage_index,
+                sharenum,
+                b"the bits look bad",
+            ),
+        )
+        self.assertThat(
+            FilePath(self.anonymous_storage_server.corruption_advisory_dir).children(),
+            HasLength(1),
+        )
+
+    @given(
+        storage_index=storage_indexes(),
+        secrets=tuples(
+            write_enabler_secrets(),
+            lease_renew_secrets(),
+            lease_cancel_secrets(),
+        ),
+        test_and_write_vectors_for_shares=test_and_write_vectors_for_shares(),
+    )
+    def test_create_mutable(self, storage_index, secrets, test_and_write_vectors_for_shares):
+        """
+        Mutable share data written using *slot_testv_and_readv_and_writev* can be
+        read back.
+        """
+        # Hypothesis causes our storage server to be used many times.  Clean
+        # up between iterations.
+        cleanup_storage_server(self.anonymous_storage_server)
+
+        wrote, read = extract_result(
+            self.client.slot_testv_and_readv_and_writev(
+                storage_index,
+                secrets=secrets,
+                tw_vectors={
+                    k: v.for_call()
+                    for (k, v)
+                    in test_and_write_vectors_for_shares.items()
+                },
+                r_vector=[],
+            ),
+        )
+
+        self.assertThat(
+            wrote,
+            Equals(True),
+            u"Server rejected a write to a new mutable storage index",
+        )
+
+        self.assertThat(
+            read,
+            Equals({}),
+            u"Server gave back read results when we asked for none.",
+        )
+
+        for sharenum, vectors in test_and_write_vectors_for_shares.items():
+            r_vector = list(map(write_vector_to_read_vector, vectors.write_vector))
+            read = extract_result(
+                self.client.slot_readv(
+                    storage_index,
+                    shares=[sharenum],
+                    r_vector=r_vector,
+                ),
+            )
+            note("read vector {}".format(r_vector))
+            # Create a buffer and pile up all the write operations in it.
+            # This lets us make correct assertions about overlapping writes.
+            length = max(
+                offset + len(data)
+                for (offset, data)
+                in vectors.write_vector
+            )
+            expected = b"\x00" * length
+            for (offset, data) in vectors.write_vector:
+                expected = expected[:offset] + data + expected[offset + len(data):]
+            if vectors.new_length is not None and vectors.new_length < length:
+                expected = expected[:vectors.new_length]
+            self.assertThat(
+                read,
+                Equals({sharenum: list(
+                    # Get the expected value out of our scratch buffer.
+                    expected[offset:offset + len(data)]
+                    for (offset, data)
+                    in vectors.write_vector
+                )}),
+                u"Server didn't reliably read back data just written for share {}".format(
+                    sharenum,
+                ),
+            )
+
+
+def write_vector_to_read_vector(write_vector):
+    """
+    Create a read vector which will read back the data written by the given
+    write vector.
+    """
+    return (write_vector[0], len(write_vector[1]))
+
+
+def write_toy_shares(
+        storage_server,
+        storage_index,
+        renew_secret,
+        cancel_secret,
+        sharenums,
+        size,
+        canary,
+):
+    """
+    Write some immutable shares to the given storage server.
+
+    :param allmydata.storage.server.StorageServer storage_server:
+    :param bytes storage_index:
+    :param bytes renew_secret:
+    :param bytes cancel_secret:
+    :param set[int] sharenums:
+    :param int size:
+    :param IRemoteReference canary:
+    """
+    _, allocated = storage_server.remote_allocate_buckets(
+        storage_index,
+        renew_secret,
+        cancel_secret,
+        sharenums,
+        size,
+        canary=canary,
+    )
+    for (sharenum, writer) in allocated.items():
+        writer.remote_write(0, bytes_for_share(sharenum, size))
+        writer.remote_close()
+
+
+def get_leases(storage_server, storage_index):
+    """
+    Get all leases for all shares of the given storage index on the given
+    server.
+
+    :param StorageServer storage_server: The storage server on which to find
+        the information.
+
+    :param bytes storage_index: The storage index for which to look up shares.
+
+    :return dict[int, list[LeaseInfo]]: The lease information for each share.
+    """
+    # It's hard to assert much about the lease without knowing about *some*
+    # implementation details of the storage server.  I prefer to know Python
+    # API details rather than on-disk format details.
+    return {
+        sharenum: list(reader._share_file.get_leases())
+        for (sharenum, reader)
+        in storage_server.remote_get_buckets(storage_index).items()
+    }
+
+
+def cleanup_storage_server(storage_server):
+    """
+    Delete all of the shares held by the given storage server.
+
+    :param allmydata.storage.server.StorageServer storage_server: The storage
+        server with some on-disk shares to delete.
+    """
+    starts = [
+        FilePath(storage_server.sharedir),
+        FilePath(storage_server.corruption_advisory_dir),
+    ]
+    for start in starts:
+        for p in start.walk():
+            if p is not start:
+                p.remove()
diff --git a/src/twisted/plugins/secureaccesstokenauthorizer.py b/src/twisted/plugins/secureaccesstokenauthorizer.py
index 4fa53c24ef8fc0a8de39a99887d180b84bb541f0..908d022a1c29f8dab0bbe9cdf891bedab631325e 100644
--- a/src/twisted/plugins/secureaccesstokenauthorizer.py
+++ b/src/twisted/plugins/secureaccesstokenauthorizer.py
@@ -15,3 +15,9 @@
 """
 A drop-in to supply plugins to the Twisted plugin system.
 """
+
+from _secureaccesstokenauthorizer.api import (
+    SecureAccessTokenAuthorizer,
+)
+
+storage_server = SecureAccessTokenAuthorizer()
diff --git a/tahoe-lafs.nix b/tahoe-lafs.nix
new file mode 100644
index 0000000000000000000000000000000000000000..649d0cd237310dff41d59fa1fcf0b500e2c47655
--- /dev/null
+++ b/tahoe-lafs.nix
@@ -0,0 +1,43 @@
+{ fetchFromGitHub, nettools, pythonPackages, buildPythonPackage
+, twisted, foolscap, nevow, simplejson, zfec, pycryptopp, darcsver
+, setuptoolsTrial, setuptoolsDarcs, pycrypto, pyasn1, zope_interface
+, service-identity, pyyaml, magic-wormhole, treq, appdirs
+, eliot, autobahn
+}:
+buildPythonPackage rec {
+  version = "1.14.0.dev";
+  name = "tahoe-lafs-${version}";
+  src = fetchFromGitHub {
+    owner = "tahoe-lafs";
+    repo = "tahoe-lafs";
+    # HEAD of integration/storage-economics branch as of July 15th 2019.
+    rev = "48bd16a8d9109910122cc2e2c85eb4f378390135";
+    sha256 = "0i8k7zb4381vbblriciz1m33va0mxld6nrhpwvjqr9jk335b1a9q";
+  };
+
+  postPatch = ''
+    sed -i "src/allmydata/util/iputil.py" \
+        -es"|_linux_path = '/sbin/ifconfig'|_linux_path = '${nettools}/bin/ifconfig'|g"
+
+    # Chroots don't have /etc/hosts and /etc/resolv.conf, so work around
+    # that.
+    for i in $(find src/allmydata/test -type f)
+    do
+      sed -i "$i" -e"s/localhost/127.0.0.1/g"
+    done
+
+    sed -i 's/"zope.interface.*"/"zope.interface"/' src/allmydata/_auto_deps.py
+    sed -i 's/"pycrypto.*"/"pycrypto"/' src/allmydata/_auto_deps.py
+  '';
+
+
+  propagatedBuildInputs = with pythonPackages; [
+    twisted foolscap nevow simplejson zfec pycryptopp darcsver
+    setuptoolsTrial setuptoolsDarcs pycrypto pyasn1 zope_interface
+    service-identity pyyaml magic-wormhole treq appdirs
+
+    eliot autobahn
+  ];
+
+  doCheck = false;
+}