Skip to content
Snippets Groups Projects

Faster fingerprinting

Merged Jean-Paul Calderone requested to merge 63.faster-fingerprinting into master
2 files
+ 92
20
Compare changes
  • Side-by-side
  • Inline
Files
2
+ 86
16
@@ -13,19 +13,33 @@
@@ -13,19 +13,33 @@
#
#
# $ for n in $(seq 0 30); do sqlite3 vouchers.db "insert into vouchers (name) values ('aaa$n')"; done
# $ for n in $(seq 0 30); do sqlite3 vouchers.db "insert into vouchers (name) values ('aaa$n')"; done
#
#
# Then the test can be run as many times as necessary. Repeated redemptions
# Then the test can be run as many times as necessary.
# are allowed since the same tokens are used on every run.
#
 
# The `redeemed` table must be cleared before each test run. Random tokens
 
# are generated for each test run and the server will reject tokens from a new
 
# run if tokens from an old run are still present.
 
#
 
# $ sqlite3 vouchers.db "delete from redeemed"
#
#
# Originally written for https://github.com/PrivateStorageio/PaymentServer/issues/60
# Originally written for https://github.com/PrivateStorageio/PaymentServer/issues/60
from __future__ import division
from __future__ import division
 
from os import (
 
urandom,
 
)
 
 
from base64 import (
 
b64encode,
 
)
 
from time import (
from time import (
time,
time,
)
)
from json import (
from json import (
dumps,
dumps,
 
loads,
)
)
from treq.client import (
from treq.client import (
@@ -45,29 +59,79 @@ from twisted.internet.defer import (
@@ -45,29 +59,79 @@ from twisted.internet.defer import (
returnValue,
returnValue,
)
)
PARALLELISM = 30
PARALLELISM = 50
 
ITERATIONS = 16
 
NUM_TOKENS = 5000
 
 
 
def a_random_token():
 
return b64encode(urandom(32))
 
 
 
def tokens_for_voucher(key, cache={}):
 
if key not in cache:
 
print("Generating tokens for {}".format(key))
 
cache[key] = list(
 
a_random_token()
 
for _
 
in range(NUM_TOKENS)
 
)
 
else:
 
print("Using cached tokens for {}".format(key))
 
return cache[key]
 
@inlineCallbacks
@inlineCallbacks
def redeem(client, index):
def redeem_with_retry(client, data, headers):
times = []
"""
for i in range(16):
Attempt a redemption. Retry if it fails.
 
 
:return: A ``Deferred`` that fires with (duration of successful request,
 
number of failed requests).
 
"""
 
errors = 0
 
while True:
before = time()
before = time()
response = yield client.post(
response = yield client.post(
url="http://127.0.0.1:8080/v1/redeem",
url="http://127.0.0.1:8080/v1/redeem",
 
data=data,
 
headers=headers,
 
)
 
after = time()
 
duration = int((after - before) * 1000)
 
body = yield readBody(response)
 
if response.code == 200:
 
print("Request complete in {}ms".format(duration))
 
returnValue((duration, errors))
 
 
errors += 1
 
try:
 
reason = loads(body)["reason"]
 
except ValueError:
 
reason = body
 
 
print("Request failed: {} {}".format(response.code, reason))
 
 
 
@inlineCallbacks
 
def redeem(client, index):
 
times = []
 
total_errors = 0
 
voucher = "aaa{}".format(index)
 
for i in range(ITERATIONS):
 
tokens = tokens_for_voucher((voucher, i))
 
duration, errors = yield redeem_with_retry(
 
client,
data=dumps({
data=dumps({
"redeemVoucher": "aaa{}".format(index),
"redeemVoucher": voucher,
"redeemTokens": ["foo-{}-{}".format(index, i)],
"redeemTokens": tokens,
"redeemCounter": i,
"redeemCounter": i,
}),
}),
headers={"content-type": "application/json"},
headers={"content-type": "application/json"},
)
)
after = time()
duration = int((after - before) * 1000)
print("Request complete in {}ms".format(duration))
body = yield readBody(response)
assert response.code == 200, (response.code, body)
times.append(duration)
times.append(duration)
returnValue(times)
total_errors += errors
 
returnValue((times, total_errors))
def mean(xs):
def mean(xs):
@@ -75,7 +139,7 @@ def mean(xs):
@@ -75,7 +139,7 @@ def mean(xs):
def percentile(n, xs):
def percentile(n, xs):
return sorted(xs)[int(len(xs) / 100 * 95)]
return sorted(xs)[int(len(xs) / 100 * n)]
def median(xs):
def median(xs):
@@ -94,11 +158,17 @@ def main(reactor):
@@ -94,11 +158,17 @@ def main(reactor):
)
)
times = []
times = []
for result in (yield gatherResults(ds)):
total_errors = 0
 
for (result, errors) in (yield gatherResults(ds)):
times.extend(result)
times.extend(result)
 
total_errors += errors
print("min: {}".format(min(times)))
print("min: {}".format(min(times)))
print("max: {}".format(max(times)))
print("max: {}".format(max(times)))
print("mean: {}".format(mean(times)))
print("mean: {}".format(mean(times)))
print("median: {}".format(median(times)))
print("median: {}".format(median(times)))
print("95th: {}".format(percentile(95, times)))
print("95th: {}".format(percentile(95, times)))
 
print("errors: {}".format(total_errors))
 
print("error rate: {}".format(
 
total_errors / (total_errors + PARALLELISM * ITERATIONS),
 
))
Loading