Changeset 18891d9e in trunk


Ignore:
Timestamp:
2021-12-02T21:44:59Z (3 years ago)
Author:
meejah <meejah@…>
Branches:
master
Children:
5bb6fbc
Parents:
531fe30b (diff), e605be3 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent.
Message:

Merge remote-tracking branch 'security/master'

Files:
16 added
23 edited

Legend:

Unmodified
Added
Removed
  • TabularUnified docs/frontends/FTP-and-SFTP.rst

    r531fe30b r18891d9e  
    4848might grant a particular user), and second to decide what directory cap
    4949should be used as the root directory for a log-in by the authenticated user.
    50 A username and password can be used; as of Tahoe-LAFS v1.11, RSA or DSA
    51 public key authentication is also supported.
     50As of Tahoe-LAFS v1.17,
     51RSA/DSA public key authentication is the only supported mechanism.
    5252
    5353Tahoe-LAFS provides two mechanisms to perform this user-to-cap mapping.
     
    6060To use the first form, create a file (for example ``BASEDIR/private/accounts``)
    6161in which each non-comment/non-blank line is a space-separated line of
    62 (USERNAME, PASSWORD, ROOTCAP), like so::
     62(USERNAME, KEY-TYPE, PUBLIC-KEY, ROOTCAP), like so::
    6363
    6464 % cat BASEDIR/private/accounts
    65  # This is a password line: username password cap
    66  alice password URI:DIR2:ioej8xmzrwilg772gzj4fhdg7a:wtiizszzz2rgmczv4wl6bqvbv33ag4kvbr6prz3u6w3geixa6m6a
    67  bob sekrit URI:DIR2:6bdmeitystckbl9yqlw7g56f4e:serp5ioqxnh34mlbmzwvkp3odehsyrr7eytt5f64we3k9hhcrcja
    68 
    6965 # This is a public key line: username keytype pubkey cap
    7066 # (Tahoe-LAFS v1.11 or later)
    7167 carol ssh-rsa AAAA... URI:DIR2:ovjy4yhylqlfoqg2vcze36dhde:4d4f47qko2xm5g7osgo2yyidi5m4muyo2vjjy53q4vjju2u55mfa
    7268
    73 For public key authentication, the keytype may be either "ssh-rsa" or "ssh-dsa".
    74 To avoid ambiguity between passwords and public key types, a password cannot
    75 start with "ssh-".
     69The key type may be either "ssh-rsa" or "ssh-dsa".
    7670
    7771Now add an ``accounts.file`` directive to your ``tahoe.cfg`` file, as described in
  • TabularUnified integration/conftest.py

    r531fe30b r18891d9e  
    354354    return nodes
    355355
     356@pytest.fixture(scope="session")
     357def alice_sftp_client_key_path(temp_dir):
     358    # The client SSH key path is typically going to be somewhere else (~/.ssh,
     359    # typically), but for convenience sake for testing we'll put it inside node.
     360    return join(temp_dir, "alice", "private", "ssh_client_rsa_key")
    356361
    357362@pytest.fixture(scope='session')
    358363@log_call(action_type=u"integration:alice", include_args=[], include_result=False)
    359 def alice(reactor, temp_dir, introducer_furl, flog_gatherer, storage_nodes, request):
     364def alice(
     365        reactor,
     366        temp_dir,
     367        introducer_furl,
     368        flog_gatherer,
     369        storage_nodes,
     370        alice_sftp_client_key_path,
     371        request,
     372):
    360373    process = pytest_twisted.blockon(
    361374        _create_node(
     
    388401    generate_ssh_key(host_ssh_key_path)
    389402
    390     # 3. Add a SFTP access file with username/password and SSH key auth.
    391 
    392     # The client SSH key path is typically going to be somewhere else (~/.ssh,
    393     # typically), but for convenience sake for testing we'll put it inside node.
    394     client_ssh_key_path = join(process.node_dir, "private", "ssh_client_rsa_key")
    395     generate_ssh_key(client_ssh_key_path)
     403    # 3. Add a SFTP access file with an SSH key for auth.
     404    generate_ssh_key(alice_sftp_client_key_path)
    396405    # Pub key format is "ssh-rsa <thekey> <username>". We want the key.
    397     ssh_public_key = open(client_ssh_key_path + ".pub").read().strip().split()[1]
     406    ssh_public_key = open(alice_sftp_client_key_path + ".pub").read().strip().split()[1]
    398407    with open(accounts_path, "w") as f:
    399408        f.write("""\
    400 alice password {rwcap}
    401 
    402 alice2 ssh-rsa {ssh_public_key} {rwcap}
     409alice-key ssh-rsa {ssh_public_key} {rwcap}
    403410""".format(rwcap=rwcap, ssh_public_key=ssh_public_key))
    404411
  • TabularUnified integration/test_sftp.py

    r531fe30b r18891d9e  
    2020    from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
    2121
     22import os.path
    2223from posixpath import join
    2324from stat import S_ISDIR
     
    3435
    3536
    36 def connect_sftp(connect_args={"username": "alice", "password": "password"}):
     37def connect_sftp(connect_args):
    3738    """Create an SFTP client."""
    3839    client = SSHClient()
     
    6162def test_bad_account_password_ssh_key(alice, tmpdir):
    6263    """
    63     Can't login with unknown username, wrong password, or wrong SSH pub key.
     64    Can't login with unknown username, any password, or wrong SSH pub key.
    6465    """
    65     # Wrong password, wrong username:
    66     for u, p in [("alice", "wrong"), ("someuser", "password")]:
     66    # Any password, wrong username:
     67    for u, p in [("alice-key", "wrong"), ("someuser", "password")]:
    6768        with pytest.raises(AuthenticationException):
    6869            connect_sftp(connect_args={
     
    7071            })
    7172
    72     another_key = join(str(tmpdir), "ssh_key")
     73    another_key = os.path.join(str(tmpdir), "ssh_key")
    7374    generate_ssh_key(another_key)
    74     good_key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key"))
     75    good_key = RSAKey(filename=os.path.join(alice.node_dir, "private", "ssh_client_rsa_key"))
    7576    bad_key = RSAKey(filename=another_key)
    7677
     
    7879    with pytest.raises(AuthenticationException):
    7980        connect_sftp(connect_args={
    80             "username": "alice2", "pkey": bad_key,
     81            "username": "alice-key", "pkey": bad_key,
    8182        })
    8283
     
    8788        })
    8889
     90def sftp_client_key(node):
     91    return RSAKey(
     92        filename=os.path.join(node.node_dir, "private", "ssh_client_rsa_key"),
     93    )
     94
     95def test_sftp_client_key_exists(alice, alice_sftp_client_key_path):
     96    """
     97    Weakly validate the sftp client key fixture by asserting that *something*
     98    exists at the supposed key path.
     99    """
     100    assert os.path.exists(alice_sftp_client_key_path)
    89101
    90102@run_in_thread
    91103def test_ssh_key_auth(alice):
    92104    """It's possible to login authenticating with SSH public key."""
    93     key = RSAKey(filename=join(alice.node_dir, "private", "ssh_client_rsa_key"))
     105    key = sftp_client_key(alice)
    94106    sftp = connect_sftp(connect_args={
    95         "username": "alice2", "pkey": key
     107        "username": "alice-key", "pkey": key
    96108    })
    97109    assert sftp.listdir() == []
     
    101113def test_read_write_files(alice):
    102114    """It's possible to upload and download files."""
    103     sftp = connect_sftp()
     115    sftp = connect_sftp(connect_args={
     116        "username": "alice-key",
     117        "pkey": sftp_client_key(alice),
     118    })
    104119    with sftp.file("myfile", "wb") as f:
    105120        f.write(b"abc")
     
    118133    them.
    119134    """
    120     sftp = connect_sftp()
     135    sftp = connect_sftp(connect_args={
     136        "username": "alice-key",
     137        "pkey": sftp_client_key(alice),
     138    })
    121139    assert sftp.listdir() == []
    122140
     
    149167def test_rename(alice):
    150168    """Directories and files can be renamed."""
    151     sftp = connect_sftp()
     169    sftp = connect_sftp(connect_args={
     170        "username": "alice-key",
     171        "pkey": sftp_client_key(alice),
     172    })
    152173    sftp.mkdir("dir")
    153174
  • TabularUnified src/allmydata/frontends/auth.py

    r531fe30b r18891d9e  
    1313from zope.interface import implementer
    1414from twisted.internet import defer
    15 from twisted.cred import error, checkers, credentials
     15from twisted.cred import checkers, credentials
    1616from twisted.conch.ssh import keys
    1717from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB
     
    3333@implementer(checkers.ICredentialsChecker)
    3434class AccountFileChecker(object):
    35     credentialInterfaces = (credentials.IUsernamePassword,
    36                             credentials.IUsernameHashedPassword,
    37                             credentials.ISSHPrivateKey)
     35    credentialInterfaces = (credentials.ISSHPrivateKey,)
     36
    3837    def __init__(self, client, accountfile):
    3938        self.client = client
    40         self.passwords = BytesKeyDict()
    41         pubkeys = BytesKeyDict()
    42         self.rootcaps = BytesKeyDict()
    43         with open(abspath_expanduser_unicode(accountfile), "rb") as f:
    44             for line in f:
    45                 line = line.strip()
    46                 if line.startswith(b"#") or not line:
    47                     continue
    48                 name, passwd, rest = line.split(None, 2)
    49                 if passwd.startswith(b"ssh-"):
    50                     bits = rest.split()
    51                     keystring = b" ".join([passwd] + bits[:-1])
    52                     key = keys.Key.fromString(keystring)
    53                     rootcap = bits[-1]
    54                     pubkeys[name] = [key]
    55                 else:
    56                     self.passwords[name] = passwd
    57                     rootcap = rest
    58                 self.rootcaps[name] = rootcap
     39        path = abspath_expanduser_unicode(accountfile)
     40        with open_account_file(path) as f:
     41            self.rootcaps, pubkeys = load_account_file(f)
    5942        self._pubkeychecker = SSHPublicKeyChecker(InMemorySSHKeyDB(pubkeys))
    6043
    6144    def _avatarId(self, username):
    6245        return FTPAvatarID(username, self.rootcaps[username])
    63 
    64     def _cbPasswordMatch(self, matched, username):
    65         if matched:
    66             return self._avatarId(username)
    67         raise error.UnauthorizedLogin
    6846
    6947    def requestAvatarId(self, creds):
     
    7250            d.addCallback(self._avatarId)
    7351            return d
    74         elif credentials.IUsernameHashedPassword.providedBy(creds):
    75             return self._checkPassword(creds)
    76         elif credentials.IUsernamePassword.providedBy(creds):
    77             return self._checkPassword(creds)
    78         else:
    79             raise NotImplementedError()
     52        raise NotImplementedError()
    8053
    81     def _checkPassword(self, creds):
    82         """
    83         Determine whether the password in the given credentials matches the
    84         password in the account file.
     54def open_account_file(path):
     55    """
     56    Open and return the accounts file at the given path.
     57    """
     58    return open(path, "rt", encoding="utf-8")
    8559
    86         Returns a Deferred that fires with the username if the password matches
    87         or with an UnauthorizedLogin failure otherwise.
    88         """
    89         try:
    90             correct = self.passwords[creds.username]
    91         except KeyError:
    92             return defer.fail(error.UnauthorizedLogin())
     60def load_account_file(lines):
     61    """
     62    Load credentials from an account file.
    9363
    94         d = defer.maybeDeferred(creds.checkPassword, correct)
    95         d.addCallback(self._cbPasswordMatch, creds.username)
    96         return d
     64    :param lines: An iterable of account lines to load.
     65
     66    :return: See ``create_account_maps``.
     67    """
     68    return create_account_maps(
     69        parse_accounts(
     70            content_lines(
     71                lines,
     72            ),
     73        ),
     74    )
     75
     76def content_lines(lines):
     77    """
     78    Drop empty and commented-out lines (``#``-prefixed) from an iterator of
     79    lines.
     80
     81    :param lines: An iterator of lines to process.
     82
     83    :return: An iterator of lines including only those from ``lines`` that
     84        include content intended to be loaded.
     85    """
     86    for line in lines:
     87        line = line.strip()
     88        if line and not line.startswith("#"):
     89            yield line
     90
     91def parse_accounts(lines):
     92    """
     93    Parse account lines into their components (name, key, rootcap).
     94    """
     95    for line in lines:
     96        name, passwd, rest = line.split(None, 2)
     97        if not passwd.startswith("ssh-"):
     98            raise ValueError(
     99                "Password-based authentication is not supported; "
     100                "configure key-based authentication instead."
     101            )
     102
     103        bits = rest.split()
     104        keystring = " ".join([passwd] + bits[:-1])
     105        key = keys.Key.fromString(keystring)
     106        rootcap = bits[-1]
     107        yield (name, key, rootcap)
     108
     109def create_account_maps(accounts):
     110    """
     111    Build mappings from account names to keys and rootcaps.
     112
     113    :param accounts: An iterator if (name, key, rootcap) tuples.
     114
     115    :return: A tuple of two dicts.  The first maps account names to rootcaps.
     116        The second maps account names to public keys.
     117    """
     118    rootcaps = BytesKeyDict()
     119    pubkeys = BytesKeyDict()
     120    for (name, key, rootcap) in accounts:
     121        name_bytes = name.encode("utf-8")
     122        rootcaps[name_bytes] = rootcap.encode("utf-8")
     123        pubkeys[name_bytes] = [key]
     124    return rootcaps, pubkeys
  • TabularUnified src/allmydata/interfaces.py

    r531fe30b r18891d9e  
    5353LeaseCancelSecret = Hash # was used to protect lease cancellation requests
    5454
     55class NoSpace(Exception):
     56    """Storage space was not available for a space-allocating operation."""
    5557
    5658class DataTooLargeError(Exception):
  • TabularUnified src/allmydata/introducer/server.py

    r531fe30b r18891d9e  
    134134        furl = self.tub.registerReference(introducerservice,
    135135                                          furlFile=private_fn)
    136         self.log(" introducer is at %s" % furl, umid="qF2L9A")
     136        self.log(" introducer can be found in {!r}".format(private_fn), umid="qF2L9A")
    137137        self.introducer_url = furl # for tests
    138138
  • TabularUnified src/allmydata/scripts/admin.py

    r531fe30b r18891d9e  
    1919
    2020from twisted.python import usage
    21 from allmydata.scripts.common import BaseOptions
     21from twisted.python.filepath import (
     22    FilePath,
     23)
     24from allmydata.scripts.common import (
     25    BaseOptions,
     26    BasedirOptions,
     27)
     28from allmydata.storage import (
     29    crawler,
     30    expirer,
     31)
    2232
    2333class GenerateKeypairOptions(BaseOptions):
     
    6676    return 0
    6777
     78class MigrateCrawlerOptions(BasedirOptions):
     79
     80    def getSynopsis(self):
     81        return "Usage: tahoe [global-options] admin migrate-crawler"
     82
     83    def getUsage(self, width=None):
     84        t = BasedirOptions.getUsage(self, width)
     85        t += (
     86            "The crawler data is now stored as JSON to avoid"
     87            " potential security issues with pickle files.\n\nIf"
     88            " you are confident the state files in the 'storage/'"
     89            " subdirectory of your node are trustworthy, run this"
     90            " command to upgrade them to JSON.\n\nThe files are:"
     91            " lease_checker.history, lease_checker.state, and"
     92            " bucket_counter.state"
     93        )
     94        return t
     95
     96
     97def migrate_crawler(options):
     98    out = options.stdout
     99    storage = FilePath(options['basedir']).child("storage")
     100
     101    conversions = [
     102        (storage.child("lease_checker.state"), crawler._convert_pickle_state_to_json),
     103        (storage.child("bucket_counter.state"), crawler._convert_pickle_state_to_json),
     104        (storage.child("lease_checker.history"), expirer._convert_pickle_state_to_json),
     105    ]
     106
     107    for fp, converter in conversions:
     108        existed = fp.exists()
     109        newfp = crawler._upgrade_pickle_to_json(fp, converter)
     110        if existed:
     111            print("Converted '{}' to '{}'".format(fp.path, newfp.path), file=out)
     112        else:
     113            if newfp.exists():
     114                print("Already converted: '{}'".format(newfp.path), file=out)
     115            else:
     116                print("Not found: '{}'".format(fp.path), file=out)
     117
     118
    68119class AdminCommand(BaseOptions):
    69120    subCommands = [
     
    72123        ("derive-pubkey", None, DerivePubkeyOptions,
    73124         "Derive a public key from a private key."),
     125        ("migrate-crawler", None, MigrateCrawlerOptions,
     126         "Write the crawler-history data as JSON."),
    74127        ]
    75128    def postOptions(self):
     
    89142    "generate-keypair": print_keypair,
    90143    "derive-pubkey": derive_pubkey,
     144    "migrate-crawler": migrate_crawler,
    91145    }
    92146
  • TabularUnified src/allmydata/scripts/debug.py

    r531fe30b r18891d9e  
    231231            when = format_expiration_time(lease.get_expiration_time())
    232232            print("  expires in %s" % when, file=out)
    233             print("  renew_secret: %s" % str(base32.b2a(lease.renew_secret), "utf-8"), file=out)
    234             print("  cancel_secret: %s" % str(base32.b2a(lease.cancel_secret), "utf-8"), file=out)
     233            print("  renew_secret: %s" % lease.present_renew_secret(), file=out)
     234            print("  cancel_secret: %s" % lease.present_cancel_secret(), file=out)
    235235            print("  secrets are for nodeid: %s" % idlib.nodeid_b2a(lease.nodeid), file=out)
    236236    else:
  • TabularUnified src/allmydata/scripts/tahoe_run.py

    r531fe30b r18891d9e  
    2828from allmydata.util.configutil import UnknownConfigError
    2929from allmydata.util.deferredutil import HookMixin
    30 
     30from allmydata.storage.crawler import (
     31    MigratePickleFileError,
     32)
    3133from allmydata.node import (
    3234    PortAssignmentRequired,
     
    165167                elif reason.check(PrivacyError):
    166168                    self.stderr.write("\n{}\n\n".format(reason.value))
     169                elif reason.check(MigratePickleFileError):
     170                    self.stderr.write(
     171                        "Error\nAt least one 'pickle' format file exists.\n"
     172                        "The file is {}\n"
     173                        "You must either delete the pickle-format files"
     174                        " or migrate them using the command:\n"
     175                        "    tahoe admin migrate-crawler --basedir {}\n\n"
     176                        .format(
     177                            reason.value.args[0].path,
     178                            self.basedir,
     179                        )
     180                    )
    167181                else:
    168182                    self.stderr.write("\nUnknown error\n")
  • TabularUnified src/allmydata/storage/common.py

    r531fe30b r18891d9e  
    1717from allmydata.interfaces import DataTooLargeError  # noqa: F401
    1818
    19 class UnknownMutableContainerVersionError(Exception):
    20     pass
    21 class UnknownImmutableContainerVersionError(Exception):
     19class UnknownContainerVersionError(Exception):
     20    def __init__(self, filename, version):
     21        self.filename = filename
     22        self.version = version
     23
     24    def __str__(self):
     25        return "sharefile {!r} had unexpected version {!r}".format(
     26            self.filename,
     27            self.version,
     28        )
     29
     30class UnknownMutableContainerVersionError(UnknownContainerVersionError):
    2231    pass
    2332
     33class UnknownImmutableContainerVersionError(UnknownContainerVersionError):
     34    pass
    2435
    2536def si_b2a(storageindex):
  • TabularUnified src/allmydata/storage/crawler.py

    r531fe30b r18891d9e  
    1212from future.utils import PY2, PY3
    1313if PY2:
    14     # We don't import bytes, object, dict, and list just in case they're used,
    15     # so as not to create brittle pickles with random magic objects.
    16     from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, range, str, max, min  # noqa: F401
    17 
    18 import os, time, struct
    19 try:
    20     import cPickle as pickle
    21 except ImportError:
    22     import pickle  # type: ignore
     14    from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
     15
     16import os
     17import time
     18import json
     19import struct
    2320from twisted.internet import reactor
    2421from twisted.application import service
     22from twisted.python.filepath import FilePath
    2523from allmydata.storage.common import si_b2a
    2624from allmydata.util import fileutil
     
    2826class TimeSliceExceeded(Exception):
    2927    pass
     28
     29
     30class MigratePickleFileError(Exception):
     31    """
     32    A pickle-format file exists (the FilePath to the file will be the
     33    single arg).
     34    """
     35    pass
     36
     37
     38def _convert_cycle_data(state):
     39    """
     40    :param dict state: cycle-to-date or history-item state
     41
     42    :return dict: the state in the JSON form
     43    """
     44
     45    def _convert_expiration_mode(value):
     46        # original is a 4-tuple, with the last element being a 2-tuple
     47        # .. convert both to lists
     48        return [
     49            value[0],
     50            value[1],
     51            value[2],
     52            list(value[3]),
     53        ]
     54
     55    def _convert_lease_age(value):
     56        # if we're in cycle-to-date, this is a dict
     57        if isinstance(value, dict):
     58            return {
     59                "{},{}".format(k[0], k[1]): v
     60                for k, v in value.items()
     61            }
     62        # otherwise, it's a history-item and they're 3-tuples
     63        return [
     64            list(v)
     65            for v in value
     66        ]
     67
     68    converters = {
     69        "configured-expiration-mode": _convert_expiration_mode,
     70        "cycle-start-finish-times": list,
     71        "lease-age-histogram": _convert_lease_age,
     72        "corrupt-shares": lambda value: [
     73            list(x)
     74            for x in value
     75        ],
     76        "leases-per-share-histogram": lambda value: {
     77            str(k): v
     78            for k, v in value.items()
     79        },
     80    }
     81    return {
     82            k: converters.get(k, lambda z: z)(v)
     83            for k, v in state.items()
     84    }
     85
     86
     87def _convert_pickle_state_to_json(state):
     88    """
     89    :param dict state: the pickled state
     90
     91    :return dict: the state in the JSON form
     92    """
     93    assert state["version"] == 1, "Only known version is 1"
     94
     95    converters = {
     96        "cycle-to-date": _convert_cycle_data,
     97    }
     98    return {
     99        k: converters.get(k, lambda x: x)(v)
     100        for k, v in state.items()
     101    }
     102
     103
     104def _upgrade_pickle_to_json(state_path, convert_pickle):
     105    """
     106    :param FilePath state_path: the filepath to ensure is json
     107
     108    :param Callable[dict] convert_pickle: function to change
     109        pickle-style state into JSON-style state
     110
     111    :returns FilePath: the local path where the state is stored
     112
     113    If this state is pickle, convert to the JSON format and return the
     114    JSON path.
     115    """
     116    json_state_path = state_path.siblingExtension(".json")
     117
     118    # if there's no file there at all, we're done because there's
     119    # nothing to upgrade
     120    if not state_path.exists():
     121        return json_state_path
     122
     123    # upgrade the pickle data to JSON
     124    import pickle
     125    with state_path.open("rb") as f:
     126        state = pickle.load(f)
     127    new_state = convert_pickle(state)
     128    _dump_json_to_file(new_state, json_state_path)
     129
     130    # we've written the JSON, delete the pickle
     131    state_path.remove()
     132    return json_state_path
     133
     134
     135def _confirm_json_format(fp):
     136    """
     137    :param FilePath fp: the original (pickle) name of a state file
     138
     139    This confirms that we do _not_ have the pickle-version of a
     140    state-file and _do_ either have nothing, or the JSON version. If
     141    the pickle-version exists, an exception is raised.
     142
     143    :returns FilePath: the JSON name of a state file
     144    """
     145    if fp.path.endswith(".json"):
     146        return fp
     147    jsonfp = fp.siblingExtension(".json")
     148    if fp.exists():
     149        raise MigratePickleFileError(fp)
     150    return jsonfp
     151
     152
     153def _dump_json_to_file(js, afile):
     154    """
     155    Dump the JSON object `js` to the FilePath `afile`
     156    """
     157    with afile.open("wb") as f:
     158        data = json.dumps(js)
     159        if PY2:
     160            f.write(data)
     161        else:
     162            f.write(data.encode("utf8"))
     163
     164
     165class _LeaseStateSerializer(object):
     166    """
     167    Read and write state for LeaseCheckingCrawler. This understands
     168    how to read the legacy pickle format files and upgrade them to the
     169    new JSON format (which will occur automatically).
     170    """
     171
     172    def __init__(self, state_path):
     173        self._path = _confirm_json_format(FilePath(state_path))
     174
     175    def load(self):
     176        """
     177        :returns: deserialized JSON state
     178        """
     179        with self._path.open("rb") as f:
     180            return json.load(f)
     181
     182    def save(self, data):
     183        """
     184        Serialize the given data as JSON into the state-path
     185        :returns: None
     186        """
     187        tmpfile = self._path.siblingExtension(".tmp")
     188        _dump_json_to_file(data, tmpfile)
     189        fileutil.move_into_place(tmpfile.path, self._path.path)
     190        return None
     191
    30192
    31193class ShareCrawler(service.MultiService):
     
    91253        self.server = server
    92254        self.sharedir = server.sharedir
    93         self.statefile = statefile
     255        self._state_serializer = _LeaseStateSerializer(statefile)
    94256        self.prefixes = [si_b2a(struct.pack(">H", i << (16-10)))[:2]
    95257                         for i in range(2**10)]
     
    214376        #                            None if we are sleeping between cycles
    215377        try:
    216             with open(self.statefile, "rb") as f:
    217                 state = pickle.load(f)
     378            state = self._state_serializer.load()
    218379        except Exception:
    219380            state = {"version": 1,
     
    251412            last_complete_prefix = self.prefixes[lcpi]
    252413        self.state["last-complete-prefix"] = last_complete_prefix
    253         tmpfile = self.statefile + ".tmp"
    254         with open(tmpfile, "wb") as f:
    255             # Newer protocols won't work in Python 2; when it is dropped,
    256             # protocol v4 can be used (added in Python 3.4).
    257             pickle.dump(self.state, f, protocol=2)
    258         fileutil.move_into_place(tmpfile, self.statefile)
     414        self._state_serializer.save(self.get_state())
    259415
    260416    def startService(self):
  • TabularUnified src/allmydata/storage/expirer.py

    r531fe30b r18891d9e  
    66from future.utils import PY2
    77if PY2:
    8     # We omit anything that might end up in pickle, just in case.
    9     from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, range, str, max, min  # noqa: F401
    10 
    11 import time, os, pickle, struct
    12 from allmydata.storage.crawler import ShareCrawler
     8    from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min  # noqa: F401
     9import json
     10import time
     11import os
     12import struct
     13from allmydata.storage.crawler import (
     14    ShareCrawler,
     15    _confirm_json_format,
     16    _convert_cycle_data,
     17    _dump_json_to_file,
     18)
    1319from allmydata.storage.shares import get_share_file
    1420from allmydata.storage.common import UnknownMutableContainerVersionError, \
    1521     UnknownImmutableContainerVersionError
    1622from twisted.python import log as twlog
     23from twisted.python.filepath import FilePath
     24
     25
     26def _convert_pickle_state_to_json(state):
     27    """
     28    Convert a pickle-serialized crawler-history state to the new JSON
     29    format.
     30
     31    :param dict state: the pickled state
     32
     33    :return dict: the state in the JSON form
     34    """
     35    return {
     36        str(k): _convert_cycle_data(v)
     37        for k, v in state.items()
     38    }
     39
     40
     41class _HistorySerializer(object):
     42    """
     43    Serialize the 'history' file of the lease-crawler state. This is
     44    "storage/lease_checker.history" for the pickle or
     45    "storage/lease_checker.history.json" for the new JSON format.
     46    """
     47
     48    def __init__(self, history_path):
     49        self._path = _confirm_json_format(FilePath(history_path))
     50
     51        if not self._path.exists():
     52            _dump_json_to_file({}, self._path)
     53
     54    def load(self):
     55        """
     56        Deserialize the existing data.
     57
     58        :return dict: the existing history state
     59        """
     60        with self._path.open("rb") as f:
     61            history = json.load(f)
     62        return history
     63
     64    def save(self, new_history):
     65        """
     66        Serialize the existing data as JSON.
     67        """
     68        _dump_json_to_file(new_history, self._path)
     69        return None
     70
    1771
    1872class LeaseCheckingCrawler(ShareCrawler):
     
    64118                 cutoff_date, # used if expiration_mode=="cutoff-date"
    65119                 sharetypes):
    66         self.historyfile = historyfile
     120        self._history_serializer = _HistorySerializer(historyfile)
    67121        self.expiration_enabled = expiration_enabled
    68122        self.mode = mode
     
    92146            self.state["cycle-to-date"].setdefault(k, so_far[k])
    93147
    94         # initialize history
    95         if not os.path.exists(self.historyfile):
    96             history = {} # cyclenum -> dict
    97             with open(self.historyfile, "wb") as f:
    98                 # Newer protocols won't work in Python 2; when it is dropped,
    99                 # protocol v4 can be used (added in Python 3.4).
    100                 pickle.dump(history, f, protocol=2)
    101 
    102148    def create_empty_cycle_dict(self):
    103149        recovered = self.create_empty_recovered_dict()
     
    143189                twlog.msg("lease-checker error processing %s" % sharefile)
    144190                twlog.err()
    145                 which = (storage_index_b32, shnum)
     191                which = [storage_index_b32, shnum]
    146192                self.state["cycle-to-date"]["corrupt-shares"].append(which)
    147193                wks = (1, 1, 1, "unknown")
     
    213259
    214260        so_far = self.state["cycle-to-date"]
    215         self.increment(so_far["leases-per-share-histogram"], num_leases, 1)
     261        self.increment(so_far["leases-per-share-histogram"], str(num_leases), 1)
    216262        self.increment_space("examined", s, sharetype)
    217263
     
    292338        start = self.state["current-cycle-start-time"]
    293339        now = time.time()
    294         h["cycle-start-finish-times"] = (start, now)
     340        h["cycle-start-finish-times"] = [start, now]
    295341        h["expiration-enabled"] = self.expiration_enabled
    296         h["configured-expiration-mode"] = (self.mode,
    297                                            self.override_lease_duration,
    298                                            self.cutoff_date,
    299                                            self.sharetypes_to_expire)
     342        h["configured-expiration-mode"] = [
     343            self.mode,
     344            self.override_lease_duration,
     345            self.cutoff_date,
     346            self.sharetypes_to_expire,
     347        ]
    300348
    301349        s = self.state["cycle-to-date"]
     
    315363        h["space-recovered"] = s["space-recovered"].copy()
    316364
    317         with open(self.historyfile, "rb") as f:
    318             history = pickle.load(f)
    319         history[cycle] = h
     365        history = self._history_serializer.load()
     366        history[str(cycle)] = h
    320367        while len(history) > 10:
    321             oldcycles = sorted(history.keys())
    322             del history[oldcycles[0]]
    323         with open(self.historyfile, "wb") as f:
    324             # Newer protocols won't work in Python 2; when it is dropped,
    325             # protocol v4 can be used (added in Python 3.4).
    326             pickle.dump(history, f, protocol=2)
     368            oldcycles = sorted(int(k) for k in history.keys())
     369            del history[str(oldcycles[0])]
     370        self._history_serializer.save(history)
    327371
    328372    def get_state(self):
     
    393437
    394438        state = ShareCrawler.get_state(self) # does a shallow copy
    395         with open(self.historyfile, "rb") as f:
    396             history = pickle.load(f)
    397         state["history"] = history
     439        state["history"] = self._history_serializer.load()
    398440
    399441        if not progress["cycle-in-progress"]:
     
    407449        so_far["lease-age-histogram"] = self.convert_lease_age_histogram(lah)
    408450        so_far["expiration-enabled"] = self.expiration_enabled
    409         so_far["configured-expiration-mode"] = (self.mode,
    410                                                 self.override_lease_duration,
    411                                                 self.cutoff_date,
    412                                                 self.sharetypes_to_expire)
     451        so_far["configured-expiration-mode"] = [
     452            self.mode,
     453            self.override_lease_duration,
     454            self.cutoff_date,
     455            self.sharetypes_to_expire,
     456        ]
    413457
    414458        so_far_sr = so_far["space-recovered"]
  • TabularUnified src/allmydata/storage/immutable.py

    r531fe30b r18891d9e  
    2222    RIBucketWriter, RIBucketReader, ConflictingWriteError,
    2323    DataTooLargeError,
     24    NoSpace,
    2425)
    2526from allmydata.util import base32, fileutil, log
    2627from allmydata.util.assertutil import precondition
    27 from allmydata.storage.lease import LeaseInfo
    2828from allmydata.storage.common import UnknownImmutableContainerVersionError
     29
     30from .immutable_schema import (
     31    NEWEST_SCHEMA_VERSION,
     32    schema_from_version,
     33)
     34
    2935
    3036# each share file (in storage/shares/$SI/$SHNUM) contains lease information
     
    3440
    3541# The share file has the following layout:
    36 #  0x00: share file version number, four bytes, current version is 1
     42#  0x00: share file version number, four bytes, current version is 2
    3743#  0x04: share data length, four bytes big-endian = A # See Footnote 1 below.
    3844#  0x08: number of leases, four bytes big-endian
     
    4046#  A+0x0c = B: first lease. Lease format is:
    4147#   B+0x00: owner number, 4 bytes big-endian, 0 is reserved for no-owner
    42 #   B+0x04: renew secret, 32 bytes (SHA256)
    43 #   B+0x24: cancel secret, 32 bytes (SHA256)
     48#   B+0x04: renew secret, 32 bytes (SHA256 + blake2b) # See Footnote 2 below.
     49#   B+0x24: cancel secret, 32 bytes (SHA256 + blake2b)
    4450#   B+0x44: expiration time, 4 bytes big-endian seconds-since-epoch
    4551#   B+0x48: next lease, or end of record
     
    5359# modulo 2**32.
    5460
     61# Footnote 2: The change between share file version number 1 and 2 is that
     62# storage of lease secrets is changed from plaintext to hashed.  This change
     63# protects the secrets from compromises of local storage on the server: if a
     64# plaintext cancel secret is somehow exfiltrated from the storage server, an
     65# attacker could use it to cancel that lease and potentially cause user data
     66# to be discarded before intended by the real owner.  As of this comment,
     67# lease cancellation is disabled because there have been at least two bugs
     68# which leak the persisted value of the cancellation secret.  If lease secrets
     69# were stored hashed instead of plaintext then neither of these bugs would
     70# have allowed an attacker to learn a usable cancel secret.
     71#
     72# Clients are free to construct these secrets however they like.  The
     73# Tahoe-LAFS client uses a SHA256-based construction.  The server then uses
     74# blake2b to hash these values for storage so that it retains no persistent
     75# copy of the original secret.
     76#
     77
     78def _fix_lease_count_format(lease_count_format):
     79    """
     80    Turn a single character struct format string into a format string suitable
     81    for use in encoding and decoding the lease count value inside a share
     82    file, if possible.
     83
     84    :param str lease_count_format: A single character format string like
     85        ``"B"`` or ``"L"``.
     86
     87    :raise ValueError: If the given format string is not suitable for use
     88        encoding and decoding a lease count.
     89
     90    :return str: A complete format string which can safely be used to encode
     91        and decode lease counts in a share file.
     92    """
     93    if len(lease_count_format) != 1:
     94        raise ValueError(
     95            "Cannot construct ShareFile with lease_count_format={!r}; "
     96            "format must accept a single value".format(
     97                lease_count_format,
     98            ),
     99        )
     100    # Make it big-endian with standard size so all platforms agree on the
     101    # result.
     102    fixed = ">" + lease_count_format
     103    if struct.calcsize(fixed) > 4:
     104        # There is only room for at most 4 bytes in the share file format so
     105        # we can't allow any larger formats.
     106        raise ValueError(
     107            "Cannot construct ShareFile with lease_count_format={!r}; "
     108            "size must be smaller than size of '>L'".format(
     109                lease_count_format,
     110            ),
     111        )
     112    return fixed
     113
     114
    55115class ShareFile(object):
     116    """
     117    Support interaction with persistent storage of a share.
     118
     119    :ivar str _lease_count_format: The format string which is used to encode
     120        and decode the lease count inside the share file.  As stated in the
     121        comment in this module there is room for at most 4 bytes in this part
     122        of the file.  A format string that works on fewer bytes is allowed to
     123        restrict the number of leases allowed in the share file to a smaller
     124        number than could be supported by using the full 4 bytes.  This is
     125        mostly of interest for testing.
     126    """
    56127    LEASE_SIZE = struct.calcsize(">L32s32sL")
    57128    sharetype = "immutable"
     
    70141        """
    71142        (version,) = struct.unpack(">L", header[:4])
    72         return version == 1
    73 
    74     def __init__(self, filename, max_size=None, create=False):
    75         """ If max_size is not None then I won't allow more than max_size to be written to me. If create=True and max_size must not be None. """
     143        return schema_from_version(version) is not None
     144
     145    def __init__(
     146            self,
     147            filename,
     148            max_size=None,
     149            create=False,
     150            lease_count_format="L",
     151            schema=NEWEST_SCHEMA_VERSION,
     152    ):
     153        """
     154        Initialize a ``ShareFile``.
     155
     156        :param Optional[int] max_size: If given, the maximum number of bytes
     157           that this ``ShareFile`` will accept to be stored.
     158
     159        :param bool create: If ``True``, create the file (and fail if it
     160            exists already).  ``max_size`` must not be ``None`` in this case.
     161            If ``False``, open an existing file for reading.
     162
     163        :param str lease_count_format: A format character to use to encode and
     164            decode the number of leases in the share file.  There are only 4
     165            bytes available in the file so the format must be 4 bytes or
     166            smaller.  If different formats are used at different times with
     167            the same share file, the result will likely be nonsense.
     168
     169            This parameter is intended for the test suite to use to be able to
     170            exercise values near the maximum encodeable value without having
     171            to create billions of leases.
     172
     173        :raise ValueError: If the encoding of ``lease_count_format`` is too
     174            large or if it is not a single format character.
     175        """
     176
    76177        precondition((max_size is not None) or (not create), max_size, create)
     178
     179        self._lease_count_format = _fix_lease_count_format(lease_count_format)
     180        self._lease_count_size = struct.calcsize(self._lease_count_format)
    77181        self.home = filename
    78182        self._max_size = max_size
     
    82186            assert not os.path.exists(self.home)
    83187            fileutil.make_dirs(os.path.dirname(self.home))
    84             # The second field -- the four-byte share data length -- is no
    85             # longer used as of Tahoe v1.3.0, but we continue to write it in
    86             # there in case someone downgrades a storage server from >=
    87             # Tahoe-1.3.0 to < Tahoe-1.3.0, or moves a share file from one
    88             # server to another, etc. We do saturation -- a share data length
    89             # larger than 2**32-1 (what can fit into the field) is marked as
    90             # the largest length that can fit into the field. That way, even
    91             # if this does happen, the old < v1.3.0 server will still allow
    92             # clients to read the first part of the share.
     188            self._schema = schema
    93189            with open(self.home, 'wb') as f:
    94                 f.write(struct.pack(">LLL", 1, min(2**32-1, max_size), 0))
     190                f.write(self._schema.header(max_size))
    95191            self._lease_offset = max_size + 0x0c
    96192            self._num_leases = 0
     
    99195                filesize = os.path.getsize(self.home)
    100196                (version, unused, num_leases) = struct.unpack(">LLL", f.read(0xc))
    101             if version != 1:
    102                 msg = "sharefile %s had version %d but we wanted 1" % \
    103                       (filename, version)
    104                 raise UnknownImmutableContainerVersionError(msg)
     197            self._schema = schema_from_version(version)
     198            if self._schema is None:
     199                raise UnknownImmutableContainerVersionError(filename, version)
    105200            self._num_leases = num_leases
    106201            self._lease_offset = filesize - (num_leases * self.LEASE_SIZE)
     
    137232        f.seek(offset)
    138233        assert f.tell() == offset
    139         f.write(lease_info.to_immutable_data())
     234        f.write(self._schema.lease_serializer.serialize(lease_info))
    140235
    141236    def _read_num_leases(self, f):
    142237        f.seek(0x08)
    143         (num_leases,) = struct.unpack(">L", f.read(4))
     238        (num_leases,) = struct.unpack(
     239            self._lease_count_format,
     240            f.read(self._lease_count_size),
     241        )
    144242        return num_leases
    145243
    146244    def _write_num_leases(self, f, num_leases):
     245        self._write_encoded_num_leases(
     246            f,
     247            struct.pack(self._lease_count_format, num_leases),
     248        )
     249
     250    def _write_encoded_num_leases(self, f, encoded_num_leases):
    147251        f.seek(0x08)
    148         f.write(struct.pack(">L", num_leases))
     252        f.write(encoded_num_leases)
    149253
    150254    def _truncate_leases(self, f, num_leases):
     
    159263                data = f.read(self.LEASE_SIZE)
    160264                if data:
    161                     yield LeaseInfo.from_immutable_data(data)
     265                    yield self._schema.lease_serializer.unserialize(data)
    162266
    163267    def add_lease(self, lease_info):
    164268        with open(self.home, 'rb+') as f:
    165269            num_leases = self._read_num_leases(f)
     270            # Before we write the new lease record, make sure we can encode
     271            # the new lease count.
     272            new_lease_count = struct.pack(self._lease_count_format, num_leases + 1)
    166273            self._write_lease_record(f, num_leases, lease_info)
    167             self._write_num_leases(f, num_leases+1)
     274            self._write_encoded_num_leases(f, new_lease_count)
    168275
    169276    def renew_lease(self, renew_secret, new_expire_time, allow_backdate=False):
     
    190297        raise IndexError("unable to renew non-existent lease")
    191298
    192     def add_or_renew_lease(self, lease_info):
     299    def add_or_renew_lease(self, available_space, lease_info):
     300        """
     301        Renew an existing lease if possible, otherwise allocate a new one.
     302
     303        :param int available_space: The maximum number of bytes of storage to
     304            commit in this operation.  If more than this number of bytes is
     305            required, raise ``NoSpace`` instead.
     306
     307        :param LeaseInfo lease_info: The details of the lease to renew or add.
     308
     309        :raise NoSpace: If more than ``available_space`` bytes is required to
     310            complete the operation.  In this case, no lease is added.
     311
     312        :return: ``None``
     313        """
    193314        try:
    194315            self.renew_lease(lease_info.renew_secret,
    195316                             lease_info.get_expiration_time())
    196317        except IndexError:
     318            if lease_info.immutable_size() > available_space:
     319                raise NoSpace()
    197320            self.add_lease(lease_info)
    198 
    199321
    200322    def cancel_lease(self, cancel_secret):
  • TabularUnified src/allmydata/storage/lease.py

    r531fe30b r18891d9e  
    1616import attr
    1717
     18from zope.interface import (
     19    Interface,
     20    implementer,
     21)
     22
     23from twisted.python.components import (
     24    proxyForInterface,
     25)
     26
    1827from allmydata.util.hashutil import timing_safe_compare
    19 
     28from allmydata.util import base32
     29
     30# struct format for representation of a lease in an immutable share
     31IMMUTABLE_FORMAT = ">L32s32sL"
     32
     33# struct format for representation of a lease in a mutable share
     34MUTABLE_FORMAT = ">LL32s32s20s"
     35
     36
     37class ILeaseInfo(Interface):
     38    """
     39    Represent a marker attached to a share that indicates that share should be
     40    retained for some amount of time.
     41
     42    Typically clients will create and renew leases on their shares as a way to
     43    inform storage servers that there is still interest in those shares.  A
     44    share may have more than one lease.  If all leases on a share have
     45    expiration times in the past then the storage server may take this as a
     46    strong hint that no one is interested in the share anymore and therefore
     47    the share may be deleted to reclaim the space.
     48    """
     49    def renew(new_expire_time):
     50        """
     51        Create a new ``ILeaseInfo`` with the given expiration time.
     52
     53        :param Union[int, float] new_expire_time: The expiration time the new
     54            ``ILeaseInfo`` will have.
     55
     56        :return: The new ``ILeaseInfo`` provider with the new expiration time.
     57        """
     58
     59    def get_expiration_time():
     60        """
     61        :return Union[int, float]: this lease's expiration time
     62        """
     63
     64    def get_grant_renew_time_time():
     65        """
     66        :return Union[int, float]: a guess about the last time this lease was
     67            renewed
     68        """
     69
     70    def get_age():
     71        """
     72        :return Union[int, float]: a guess about how long it has been since this
     73            lease was renewed
     74        """
     75
     76    def to_immutable_data():
     77        """
     78        :return bytes: a serialized representation of this lease suitable for
     79            inclusion in an immutable container
     80        """
     81
     82    def to_mutable_data():
     83        """
     84        :return bytes: a serialized representation of this lease suitable for
     85            inclusion in a mutable container
     86        """
     87
     88    def immutable_size():
     89        """
     90        :return int: the size of the serialized representation of this lease in an
     91            immutable container
     92        """
     93
     94    def mutable_size():
     95        """
     96        :return int: the size of the serialized representation of this lease in a
     97            mutable container
     98        """
     99
     100    def is_renew_secret(candidate_secret):
     101        """
     102        :return bool: ``True`` if the given byte string is this lease's renew
     103            secret, ``False`` otherwise
     104        """
     105
     106    def present_renew_secret():
     107        """
     108        :return str: Text which could reasonably be shown to a person representing
     109            this lease's renew secret.
     110        """
     111
     112    def is_cancel_secret(candidate_secret):
     113        """
     114        :return bool: ``True`` if the given byte string is this lease's cancel
     115            secret, ``False`` otherwise
     116        """
     117
     118    def present_cancel_secret():
     119        """
     120        :return str: Text which could reasonably be shown to a person representing
     121            this lease's cancel secret.
     122        """
     123
     124
     125@implementer(ILeaseInfo)
    20126@attr.s(frozen=True)
    21127class LeaseInfo(object):
     
    81187        return timing_safe_compare(self.renew_secret, candidate_secret)
    82188
     189    def present_renew_secret(self):
     190        # type: () -> str
     191        """
     192        Return the renew secret, base32-encoded.
     193        """
     194        return str(base32.b2a(self.renew_secret), "utf-8")
     195
    83196    def is_cancel_secret(self, candidate_secret):
    84197        # type: (bytes) -> bool
     
    90203        """
    91204        return timing_safe_compare(self.cancel_secret, candidate_secret)
     205
     206    def present_cancel_secret(self):
     207        # type: () -> str
     208        """
     209        Return the cancel secret, base32-encoded.
     210        """
     211        return str(base32.b2a(self.cancel_secret), "utf-8")
    92212
    93213    def get_grant_renew_time_time(self):
     
    111231            "expiration_time",
    112232        ]
    113         values = struct.unpack(">L32s32sL", data)
     233        values = struct.unpack(IMMUTABLE_FORMAT, data)
    114234        return cls(nodeid=None, **dict(zip(names, values)))
    115235
     236    def immutable_size(self):
     237        """
     238        :return int: The size, in bytes, of the representation of this lease in an
     239            immutable share file.
     240        """
     241        return struct.calcsize(IMMUTABLE_FORMAT)
     242
     243    def mutable_size(self):
     244        """
     245        :return int: The size, in bytes, of the representation of this lease in a
     246            mutable share file.
     247        """
     248        return struct.calcsize(MUTABLE_FORMAT)
     249
    116250    def to_immutable_data(self):
    117         return struct.pack(">L32s32sL",
     251        return struct.pack(IMMUTABLE_FORMAT,
    118252                           self.owner_num,
    119253                           self.renew_secret, self.cancel_secret,
     
    121255
    122256    def to_mutable_data(self):
    123         return struct.pack(">LL32s32s20s",
     257        return struct.pack(MUTABLE_FORMAT,
    124258                           self.owner_num,
    125259                           int(self._expiration_time),
     
    141275            "nodeid",
    142276        ]
    143         values = struct.unpack(">LL32s32s20s", data)
     277        values = struct.unpack(MUTABLE_FORMAT, data)
    144278        return cls(**dict(zip(names, values)))
     279
     280
     281@attr.s(frozen=True)
     282class HashedLeaseInfo(proxyForInterface(ILeaseInfo, "_lease_info")): # type: ignore # unsupported dynamic base class
     283    """
     284    A ``HashedLeaseInfo`` wraps lease information in which the secrets have
     285    been hashed.
     286    """
     287    _lease_info = attr.ib()
     288    _hash = attr.ib()
     289
     290    # proxyForInterface will take care of forwarding all methods on ILeaseInfo
     291    # to `_lease_info`.  Here we override a few of those methods to adjust
     292    # their behavior to make them suitable for use with hashed secrets.
     293
     294    def renew(self, new_expire_time):
     295        # Preserve the HashedLeaseInfo wrapper around the renewed LeaseInfo.
     296        return attr.assoc(
     297            self,
     298            _lease_info=super(HashedLeaseInfo, self).renew(new_expire_time),
     299        )
     300
     301    def is_renew_secret(self, candidate_secret):
     302        # type: (bytes) -> bool
     303        """
     304        Hash the candidate secret and compare the result to the stored hashed
     305        secret.
     306        """
     307        return super(HashedLeaseInfo, self).is_renew_secret(self._hash(candidate_secret))
     308
     309    def present_renew_secret(self):
     310        # type: () -> str
     311        """
     312        Present the hash of the secret with a marker indicating it is a hash.
     313        """
     314        return u"hash:" + super(HashedLeaseInfo, self).present_renew_secret()
     315
     316    def is_cancel_secret(self, candidate_secret):
     317        # type: (bytes) -> bool
     318        """
     319        Hash the candidate secret and compare the result to the stored hashed
     320        secret.
     321        """
     322        if isinstance(candidate_secret, _HashedCancelSecret):
     323            # Someone read it off of this object in this project - probably
     324            # the lease crawler - and is just trying to use it to identify
     325            # which lease it wants to operate on.  Avoid re-hashing the value.
     326            #
     327            # It is important that this codepath is only availably internally
     328            # for this process to talk to itself.  If it were to be exposed to
     329            # clients over the network, they could just provide the hashed
     330            # value to avoid having to ever learn the original value.
     331            hashed_candidate = candidate_secret.hashed_value
     332        else:
     333            # It is not yet hashed so hash it.
     334            hashed_candidate = self._hash(candidate_secret)
     335
     336        return super(HashedLeaseInfo, self).is_cancel_secret(hashed_candidate)
     337
     338    def present_cancel_secret(self):
     339        # type: () -> str
     340        """
     341        Present the hash of the secret with a marker indicating it is a hash.
     342        """
     343        return u"hash:" + super(HashedLeaseInfo, self).present_cancel_secret()
     344
     345    @property
     346    def owner_num(self):
     347        return self._lease_info.owner_num
     348
     349    @property
     350    def nodeid(self):
     351        return self._lease_info.nodeid
     352
     353    @property
     354    def cancel_secret(self):
     355        """
     356        Give back an opaque wrapper around the hashed cancel secret which can
     357        later be presented for a succesful equality comparison.
     358        """
     359        # We don't *have* the cancel secret.  We hashed it and threw away the
     360        # original.  That's good.  It does mean that some code that runs
     361        # in-process with the storage service (LeaseCheckingCrawler) runs into
     362        # some difficulty.  That code wants to cancel leases and does so using
     363        # the same interface that faces storage clients (or would face them,
     364        # if lease cancellation were exposed).
     365        #
     366        # Since it can't use the hashed secret to cancel a lease (that's the
     367        # point of the hashing) and we don't have the unhashed secret to give
     368        # it, instead we give it a marker that `cancel_lease` will recognize.
     369        # On recognizing it, if the hashed value given matches the hashed
     370        # value stored it is considered a match and the lease can be
     371        # cancelled.
     372        #
     373        # This isn't great.  Maybe the internal and external consumers of
     374        # cancellation should use different interfaces.
     375        return _HashedCancelSecret(self._lease_info.cancel_secret)
     376
     377
     378@attr.s(frozen=True)
     379class _HashedCancelSecret(object):
     380    """
     381    ``_HashedCancelSecret`` is a marker type for an already-hashed lease
     382    cancel secret that lets internal lease cancellers bypass the hash-based
     383    protection that's imposed on external lease cancellers.
     384
     385    :ivar bytes hashed_value: The already-hashed secret.
     386    """
     387    hashed_value = attr.ib()
  • TabularUnified src/allmydata/storage/mutable.py

    r531fe30b r18891d9e  
    1414import os, stat, struct
    1515
    16 from allmydata.interfaces import BadWriteEnablerError
     16from allmydata.interfaces import (
     17    BadWriteEnablerError,
     18    NoSpace,
     19)
    1720from allmydata.util import idlib, log
    1821from allmydata.util.assertutil import precondition
     
    2225     DataTooLargeError
    2326from allmydata.mutable.layout import MAX_MUTABLE_SHARE_SIZE
    24 
     27from .mutable_schema import (
     28    NEWEST_SCHEMA_VERSION,
     29    schema_from_header,
     30)
    2531
    2632# the MutableShareFile is like the ShareFile, but used for mutable data. It
     
    6268    # binary data to reduce the chance that a regular text file will look
    6369    # like a sharefile.
    64     MAGIC = b"Tahoe mutable container v1\n" + b"\x75\x09\x44\x03\x8e"
    65     assert len(MAGIC) == 32
    66     assert isinstance(MAGIC, bytes)
    6770    MAX_SIZE = MAX_MUTABLE_SHARE_SIZE
    6871    # TODO: decide upon a policy for max share size
     
    8083            ``False`` otherwise.
    8184        """
    82         return header.startswith(cls.MAGIC)
    83 
    84     def __init__(self, filename, parent=None):
     85        return schema_from_header(header) is not None
     86
     87    def __init__(self, filename, parent=None, schema=NEWEST_SCHEMA_VERSION):
    8588        self.home = filename
    8689        if os.path.exists(self.home):
    8790            # we don't cache anything, just check the magic
    8891            with open(self.home, 'rb') as f:
    89                 data = f.read(self.HEADER_SIZE)
    90             (magic,
    91              write_enabler_nodeid, write_enabler,
    92              data_length, extra_least_offset) = \
    93              struct.unpack(">32s20s32sQQ", data)
    94             if not self.is_valid_header(data):
    95                 msg = "sharefile %s had magic '%r' but we wanted '%r'" % \
    96                       (filename, magic, self.MAGIC)
    97                 raise UnknownMutableContainerVersionError(msg)
     92                header = f.read(self.HEADER_SIZE)
     93            self._schema = schema_from_header(header)
     94            if self._schema is None:
     95                raise UnknownMutableContainerVersionError(filename, header)
     96        else:
     97            self._schema = schema
    9898        self.parent = parent # for logging
    9999
     
    103103    def create(self, my_nodeid, write_enabler):
    104104        assert not os.path.exists(self.home)
    105         data_length = 0
    106         extra_lease_offset = (self.HEADER_SIZE
    107                               + 4 * self.LEASE_SIZE
    108                               + data_length)
    109         assert extra_lease_offset == self.DATA_OFFSET # true at creation
    110         num_extra_leases = 0
    111105        with open(self.home, 'wb') as f:
    112             header = struct.pack(
    113                 ">32s20s32sQQ",
    114                 self.MAGIC, my_nodeid, write_enabler,
    115                 data_length, extra_lease_offset,
    116             )
    117             leases = (b"\x00" * self.LEASE_SIZE) * 4
    118             f.write(header + leases)
    119             # data goes here, empty after creation
    120             f.write(struct.pack(">L", num_extra_leases))
    121             # extra leases go here, none at creation
     106            f.write(self._schema.header(my_nodeid, write_enabler))
    122107
    123108    def unlink(self):
     
    135120    def _read_share_data(self, f, offset, length):
    136121        precondition(offset >= 0)
     122        precondition(length >= 0)
    137123        data_length = self._read_data_length(f)
    138124        if offset+length > data_length:
     
    251237        f.seek(offset)
    252238        assert f.tell() == offset
    253         f.write(lease_info.to_mutable_data())
     239        f.write(self._schema.lease_serializer.serialize(lease_info))
    254240
    255241    def _read_lease_record(self, f, lease_number):
     
    268254        assert f.tell() == offset
    269255        data = f.read(self.LEASE_SIZE)
    270         lease_info = LeaseInfo.from_mutable_data(data)
     256        lease_info = self._schema.lease_serializer.unserialize(data)
    271257        if lease_info.owner_num == 0:
    272258            return None
     
    303289                return
    304290
    305     def add_lease(self, lease_info):
     291    def add_lease(self, available_space, lease_info):
     292        """
     293        Add a new lease to this share.
     294
     295        :param int available_space: The maximum number of bytes of storage to
     296            commit in this operation.  If more than this number of bytes is
     297            required, raise ``NoSpace`` instead.
     298
     299        :raise NoSpace: If more than ``available_space`` bytes is required to
     300            complete the operation.  In this case, no lease is added.
     301
     302        :return: ``None``
     303        """
    306304        precondition(lease_info.owner_num != 0) # 0 means "no lease here"
    307305        with open(self.home, 'rb+') as f:
     
    311309                self._write_lease_record(f, empty_slot, lease_info)
    312310            else:
     311                if lease_info.mutable_size() > available_space:
     312                    raise NoSpace()
    313313                self._write_lease_record(f, num_lease_slots, lease_info)
    314314
     
    346346        raise IndexError(msg)
    347347
    348     def add_or_renew_lease(self, lease_info):
     348    def add_or_renew_lease(self, available_space, lease_info):
    349349        precondition(lease_info.owner_num != 0) # 0 means "no lease here"
    350350        try:
     
    352352                             lease_info.get_expiration_time())
    353353        except IndexError:
    354             self.add_lease(lease_info)
     354            self.add_lease(available_space, lease_info)
    355355
    356356    def cancel_lease(self, cancel_secret):
  • TabularUnified src/allmydata/storage/server.py

    r531fe30b r18891d9e  
    6363    """
    6464    name = 'storage'
     65    # only the tests change this to anything else
    6566    LeaseCheckerClass = LeaseCheckingCrawler
    6667
     
    8384        fileutil.make_dirs(sharedir)
    8485        self.sharedir = sharedir
    85         # we don't actually create the corruption-advisory dir until necessary
    8686        self.corruption_advisory_dir = os.path.join(storedir,
    8787                                                    "corruption-advisories")
     88        fileutil.make_dirs(self.corruption_advisory_dir)
    8889        self.reserved_space = int(reserved_space)
    8990        self.no_storage = discard_storage
     
    302303        start = self._clock.seconds()
    303304        self.count("allocate")
    304         alreadygot = set()
     305        alreadygot = {}
    305306        bucketwriters = {} # k: shnum, v: BucketWriter
    306307        si_dir = storage_index_to_dir(storage_index)
     
    334335        # file, they'll want us to hold leases for this file.
    335336        for (shnum, fn) in self._get_bucket_shares(storage_index):
    336             alreadygot.add(shnum)
     337            alreadygot[shnum] = ShareFile(fn)
    337338            if renew_leases:
    338339                sf = ShareFile(fn)
     
    370371
    371372        self.add_latency("allocate", self._clock.seconds() - start)
    372         return alreadygot, bucketwriters
     373        return set(alreadygot), bucketwriters
    373374
    374375    def remote_allocate_buckets(self, storage_index,
     
    627628        Put the given lease onto the given shares.
    628629
    629         :param dict[int, MutableShareFile] shares: The shares to put the lease
    630             onto.
     630        :param Iterable[Union[MutableShareFile, ShareFile]] shares: The shares
     631            to put the lease onto.
    631632
    632633        :param LeaseInfo lease_info: The lease to put on the shares.
    633634        """
    634         for share in six.viewvalues(shares):
    635             share.add_or_renew_lease(lease_info)
     635        for share in shares:
     636            share.add_or_renew_lease(self.get_available_space(), lease_info)
    636637
    637638    def slot_testv_and_readv_and_writev(  # type: ignore # warner/foolscap#78
     
    693694            if renew_leases:
    694695                lease_info = self._make_lease_info(renew_secret, cancel_secret)
    695                 self._add_or_renew_leases(remaining_shares, lease_info)
     696                self._add_or_renew_leases(remaining_shares.values(), lease_info)
    696697
    697698        # all done
     
    748749        return datavs
    749750
     751    def _share_exists(self, storage_index, shnum):
     752        """
     753        Check local share storage to see if a matching share exists.
     754
     755        :param bytes storage_index: The storage index to inspect.
     756        :param int shnum: The share number to check for.
     757
     758        :return bool: ``True`` if a share with the given number exists at the
     759            given storage index, ``False`` otherwise.
     760        """
     761        for existing_sharenum, ignored in self._get_bucket_shares(storage_index):
     762            if existing_sharenum == shnum:
     763                return True
     764        return False
     765
    750766    def remote_advise_corrupt_share(self, share_type, storage_index, shnum,
    751767                                    reason):
     
    754770        assert isinstance(share_type, bytes)
    755771        assert isinstance(reason, bytes), "%r is not bytes" % (reason,)
    756         fileutil.make_dirs(self.corruption_advisory_dir)
    757         now = time_format.iso_utc(sep="T")
     772
    758773        si_s = si_b2a(storage_index)
    759         # windows can't handle colons in the filename
    760         fn = os.path.join(
    761             self.corruption_advisory_dir,
    762             ("%s--%s-%d" % (now, str(si_s, "utf-8"), shnum)).replace(":","")
    763         )
    764         with open(fn, "w") as f:
    765             f.write("report: Share Corruption\n")
    766             f.write("type: %s\n" % bytes_to_native_str(share_type))
    767             f.write("storage_index: %s\n" % bytes_to_native_str(si_s))
    768             f.write("share_number: %d\n" % shnum)
    769             f.write("\n")
    770             f.write(bytes_to_native_str(reason))
    771             f.write("\n")
     774
     775        if not self._share_exists(storage_index, shnum):
     776            log.msg(
     777                format=(
     778                    "discarding client corruption claim for %(si)s/%(shnum)d "
     779                    "which I do not have"
     780                ),
     781                si=si_s,
     782                shnum=shnum,
     783            )
     784            return
     785
    772786        log.msg(format=("client claims corruption in (%(share_type)s) " +
    773787                        "%(si)s-%(shnum)d: %(reason)s"),
    774788                share_type=share_type, si=si_s, shnum=shnum, reason=reason,
    775789                level=log.SCARY, umid="SGx2fA")
     790
     791        report = render_corruption_report(share_type, si_s, shnum, reason)
     792        if len(report) > self.get_available_space():
     793            return None
     794
     795        now = time_format.iso_utc(sep="T")
     796        report_path = get_corruption_report_path(
     797            self.corruption_advisory_dir,
     798            now,
     799            si_s,
     800            shnum,
     801        )
     802        with open(report_path, "w") as f:
     803            f.write(report)
     804
    776805        return None
     806
     807CORRUPTION_REPORT_FORMAT = """\
     808report: Share Corruption
     809type: {type}
     810storage_index: {storage_index}
     811share_number: {share_number}
     812
     813{reason}
     814
     815"""
     816
     817def render_corruption_report(share_type, si_s, shnum, reason):
     818    """
     819    Create a string that explains a corruption report using freeform text.
     820
     821    :param bytes share_type: The type of the share which the report is about.
     822
     823    :param bytes si_s: The encoded representation of the storage index which
     824        the report is about.
     825
     826    :param int shnum: The share number which the report is about.
     827
     828    :param bytes reason: The reason given by the client for the corruption
     829        report.
     830    """
     831    return CORRUPTION_REPORT_FORMAT.format(
     832        type=bytes_to_native_str(share_type),
     833        storage_index=bytes_to_native_str(si_s),
     834        share_number=shnum,
     835        reason=bytes_to_native_str(reason),
     836    )
     837
     838def get_corruption_report_path(base_dir, now, si_s, shnum):
     839    """
     840    Determine the path to which a certain corruption report should be written.
     841
     842    :param str base_dir: The directory beneath which to construct the path.
     843
     844    :param str now: The time of the report.
     845
     846    :param str si_s: The encoded representation of the storage index which the
     847        report is about.
     848
     849    :param int shnum: The share number which the report is about.
     850
     851    :return str: A path to which the report can be written.
     852    """
     853    # windows can't handle colons in the filename
     854    return os.path.join(
     855        base_dir,
     856        ("%s--%s-%d" % (now, str(si_s, "utf-8"), shnum)).replace(":","")
     857    )
  • TabularUnified src/allmydata/test/common.py

    r531fe30b r18891d9e  
    8989    MDMF_VERSION,
    9090    IAddressFamily,
     91    NoSpace,
    9192)
    9293from allmydata.check_results import CheckResults, CheckAndRepairResults, \
     
    140141    ""
    141142)
     143
     144@attr.s
     145class FakeDisk(object):
     146    """
     147    Just enough of a disk to be able to report free / used information.
     148    """
     149    total = attr.ib()
     150    used = attr.ib()
     151
     152    def use(self, num_bytes):
     153        """
     154        Mark some amount of available bytes as used (and no longer available).
     155
     156        :param int num_bytes: The number of bytes to use.
     157
     158        :raise NoSpace: If there are fewer bytes available than ``num_bytes``.
     159
     160        :return: ``None``
     161        """
     162        if num_bytes > self.total - self.used:
     163            raise NoSpace()
     164        self.used += num_bytes
     165
     166    @property
     167    def available(self):
     168        return self.total - self.used
     169
     170    def get_disk_stats(self, whichdir, reserved_space):
     171        avail = self.available
     172        return {
     173            'total': self.total,
     174            'free_for_root': avail,
     175            'free_for_nonroot': avail,
     176            'used': self.used,
     177            'avail': avail - reserved_space,
     178        }
    142179
    143180
  • TabularUnified src/allmydata/test/strategies.py

    r531fe30b r18891d9e  
    1717    builds,
    1818    binary,
     19    integers,
    1920)
    2021
     
    120121        mdmf_capabilities(),
    121122    )
     123
     124def offsets(min_value=0, max_value=2 ** 16):
     125    """
     126    Build ``int`` values that could be used as valid offsets into a sequence
     127    (such as share data in a share file).
     128    """
     129    return integers(min_value, max_value)
     130
     131def lengths(min_value=1, max_value=2 ** 16):
     132    """
     133    Build ``int`` values that could be used as valid lengths of data (such as
     134    share data in a share file).
     135    """
     136    return integers(min_value, max_value)
  • TabularUnified src/allmydata/test/test_auth.py

    r531fe30b r18891d9e  
    99from future.utils import PY2
    1010if PY2:
    11     from future.builtins import str  # noqa: F401
     11    from future.builtins import str, open  # noqa: F401
     12
     13from hypothesis import (
     14    given,
     15)
     16from hypothesis.strategies import (
     17    text,
     18    characters,
     19    lists,
     20)
    1221
    1322from twisted.trial import unittest
     
    3948""")
    4049
    41 DUMMY_ACCOUNTS = u"""\
    42 alice herpassword URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111
    43 bob sekrit URI:DIR2:bbbbbbbbbbbbbbbbbbbbbbbbbb:2222222222222222222222222222222222222222222222222222
    44 
    45 # dennis password URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111
     50DUMMY_KEY_DSA = keys.Key.fromString("""\
     51-----BEGIN OPENSSH PRIVATE KEY-----
     52b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABsQAAAAdzc2gtZH
     53NzAAAAgQDKMh/ELaiP21LYRBuPbUy7dUhv/XZwV7aS1LzxSP+KaJvtDOei8X76XEAfkqX+
     54aGh9eup+BLkezrV6LlpO9uPzhY8ChlKpkvw5PZKv/2agSrVxZyG7yEzHNtSBQXE6qNMwIk
     55N/ycXLGCqyAhQSzRhLz9ETNaslRDLo7YyVWkiuAQAAABUA5nTatFKux5EqZS4EarMWFRBU
     56i1UAAACAFpkkK+JsPixSTPyn0DNMoGKA0Klqy8h61Ds6pws+4+aJQptUBshpwNw1ypo7MO
     57+goDZy3wwdWtURTPGMgesNdEfxp8L2/kqE4vpMK0myoczCqOiWMeNB/x1AStbSkBI8WmHW
     582htgsC01xbaix/FrA3edK8WEyv+oIxlbV1FkrPkAAACANb0EpCc8uoR4/32rO2JLsbcLBw
     59H5wc2khe7AKkIa9kUknRIRvoCZUtXF5XuXXdRmnpVEm2KcsLdtZjip43asQcqgt0Kz3nuF
     60kAf7bI98G1waFUimcCSPsal4kCmW2HC11sg/BWOt5qczX/0/3xVxpo6juUeBq9ncnFTvPX
     615fOlEAAAHoJkFqHiZBah4AAAAHc3NoLWRzcwAAAIEAyjIfxC2oj9tS2EQbj21Mu3VIb/12
     62cFe2ktS88Uj/imib7QznovF++lxAH5Kl/mhofXrqfgS5Hs61ei5aTvbj84WPAoZSqZL8OT
     632Sr/9moEq1cWchu8hMxzbUgUFxOqjTMCJDf8nFyxgqsgIUEs0YS8/REzWrJUQy6O2MlVpI
     64rgEAAAAVAOZ02rRSrseRKmUuBGqzFhUQVItVAAAAgBaZJCvibD4sUkz8p9AzTKBigNCpas
     65vIetQ7OqcLPuPmiUKbVAbIacDcNcqaOzDvoKA2ct8MHVrVEUzxjIHrDXRH8afC9v5KhOL6
     66TCtJsqHMwqjoljHjQf8dQErW0pASPFph1tobYLAtNcW2osfxawN3nSvFhMr/qCMZW1dRZK
     67z5AAAAgDW9BKQnPLqEeP99qztiS7G3CwcB+cHNpIXuwCpCGvZFJJ0SEb6AmVLVxeV7l13U
     68Zp6VRJtinLC3bWY4qeN2rEHKoLdCs957hZAH+2yPfBtcGhVIpnAkj7GpeJAplthwtdbIPw
     69VjreanM1/9P98VcaaOo7lHgavZ3JxU7z1+XzpRAAAAFQC7360pZLbv7PFt4BPFJ8zAHxAe
     70QwAAAA5leGFya3VuQGJhcnlvbgECAwQ=
     71-----END OPENSSH PRIVATE KEY-----
     72""")
     73
     74ACCOUNTS = u"""\
     75# dennis {key} URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111
    4676carol {key} URI:DIR2:cccccccccccccccccccccccccc:3333333333333333333333333333333333333333333333333333
    4777""".format(key=str(DUMMY_KEY.public().toString("openssh"), "ascii")).encode("ascii")
    4878
     79# Python str.splitlines considers NEXT LINE, LINE SEPARATOR, and PARAGRAPH
     80# separator to be line separators, too.  However, file.readlines() does not...
     81LINE_SEPARATORS = (
     82    '\x0a', # line feed
     83    '\x0b', # vertical tab
     84    '\x0c', # form feed
     85    '\x0d', # carriage return
     86)
     87
     88class AccountFileParserTests(unittest.TestCase):
     89    """
     90    Tests for ``load_account_file`` and its helper functions.
     91    """
     92    @given(lists(
     93        text(alphabet=characters(
     94            blacklist_categories=(
     95                # Surrogates are an encoding trick to help out UTF-16.
     96                # They're not necessary to represent any non-surrogate code
     97                # point in unicode.  They're also not legal individually but
     98                # only in pairs.
     99                'Cs',
     100            ),
     101            # Exclude all our line separators too.
     102            blacklist_characters=("\n", "\r"),
     103        )),
     104    ))
     105    def test_ignore_comments(self, lines):
     106        """
     107        ``auth.content_lines`` filters out lines beginning with `#` and empty
     108        lines.
     109        """
     110        expected = set()
     111
     112        # It's not clear that real files and StringIO behave sufficiently
     113        # similarly to use the latter instead of the former here.  In
     114        # particular, they seem to have distinct and incompatible
     115        # line-splitting rules.
     116        bufpath = self.mktemp()
     117        with open(bufpath, "wt", encoding="utf-8") as buf:
     118            for line in lines:
     119                stripped = line.strip()
     120                is_content = stripped and not stripped.startswith("#")
     121                if is_content:
     122                    expected.add(stripped)
     123                buf.write(line + "\n")
     124
     125        with auth.open_account_file(bufpath) as buf:
     126            actual = set(auth.content_lines(buf))
     127
     128        self.assertEqual(expected, actual)
     129
     130    def test_parse_accounts(self):
     131        """
     132        ``auth.parse_accounts`` accepts an iterator of account lines and returns
     133        an iterator of structured account data.
     134        """
     135        alice_key = DUMMY_KEY.public().toString("openssh").decode("utf-8")
     136        alice_cap = "URI:DIR2:aaaa:1111"
     137
     138        bob_key = DUMMY_KEY_DSA.public().toString("openssh").decode("utf-8")
     139        bob_cap = "URI:DIR2:aaaa:2222"
     140        self.assertEqual(
     141            list(auth.parse_accounts([
     142                "alice {} {}".format(alice_key, alice_cap),
     143                "bob {} {}".format(bob_key, bob_cap),
     144            ])),
     145            [
     146                ("alice", DUMMY_KEY.public(), alice_cap),
     147                ("bob", DUMMY_KEY_DSA.public(), bob_cap),
     148            ],
     149        )
     150
     151    def test_parse_accounts_rejects_passwords(self):
     152        """
     153        The iterator returned by ``auth.parse_accounts`` raises ``ValueError``
     154        when processing reaches a line that has what looks like a password
     155        instead of an ssh key.
     156        """
     157        with self.assertRaises(ValueError):
     158            list(auth.parse_accounts(["alice apassword URI:DIR2:aaaa:1111"]))
     159
     160    def test_create_account_maps(self):
     161        """
     162        ``auth.create_account_maps`` accepts an iterator of structured account
     163        data and returns two mappings: one from account name to rootcap, the
     164        other from account name to public keys.
     165        """
     166        alice_cap = "URI:DIR2:aaaa:1111"
     167        alice_key = DUMMY_KEY.public()
     168        bob_cap = "URI:DIR2:aaaa:2222"
     169        bob_key = DUMMY_KEY_DSA.public()
     170        accounts = [
     171            ("alice", alice_key, alice_cap),
     172            ("bob", bob_key, bob_cap),
     173        ]
     174        self.assertEqual(
     175            auth.create_account_maps(accounts),
     176            ({
     177                b"alice": alice_cap.encode("utf-8"),
     178                b"bob": bob_cap.encode("utf-8"),
     179            },
     180             {
     181                 b"alice": [alice_key],
     182                 b"bob": [bob_key],
     183             }),
     184        )
     185
     186    def test_load_account_file(self):
     187        """
     188        ``auth.load_account_file`` accepts an iterator of serialized account lines
     189        and returns two mappings: one from account name to rootcap, the other
     190        from account name to public keys.
     191        """
     192        alice_key = DUMMY_KEY.public().toString("openssh").decode("utf-8")
     193        alice_cap = "URI:DIR2:aaaa:1111"
     194
     195        bob_key = DUMMY_KEY_DSA.public().toString("openssh").decode("utf-8")
     196        bob_cap = "URI:DIR2:aaaa:2222"
     197
     198        accounts = [
     199            "alice {} {}".format(alice_key, alice_cap),
     200            "bob {} {}".format(bob_key, bob_cap),
     201            "# carol {} {}".format(alice_key, alice_cap),
     202        ]
     203
     204        self.assertEqual(
     205            auth.load_account_file(accounts),
     206            ({
     207                b"alice": alice_cap.encode("utf-8"),
     208                b"bob": bob_cap.encode("utf-8"),
     209            },
     210             {
     211                 b"alice": [DUMMY_KEY.public()],
     212                 b"bob": [DUMMY_KEY_DSA.public()],
     213             }),
     214        )
     215
     216
    49217class AccountFileCheckerKeyTests(unittest.TestCase):
    50218    """
     
    53221    def setUp(self):
    54222        self.account_file = filepath.FilePath(self.mktemp())
    55         self.account_file.setContent(DUMMY_ACCOUNTS)
     223        self.account_file.setContent(ACCOUNTS)
    56224        abspath = abspath_expanduser_unicode(str(self.account_file.path))
    57225        self.checker = auth.AccountFileChecker(None, abspath)
    58226
    59     def test_unknown_user_ssh(self):
     227    def test_unknown_user(self):
    60228        """
    61229        AccountFileChecker.requestAvatarId returns a Deferred that fires with
     
    65233        key_credentials = credentials.SSHPrivateKey(
    66234            b"dennis", b"md5", None, None, None)
    67         avatarId = self.checker.requestAvatarId(key_credentials)
    68         return self.assertFailure(avatarId, error.UnauthorizedLogin)
    69 
    70     def test_unknown_user_password(self):
    71         """
    72         AccountFileChecker.requestAvatarId returns a Deferred that fires with
    73         UnauthorizedLogin if called with an SSHPrivateKey object with a
    74         username not present in the account file.
    75 
    76         We use a commented out user, so we're also checking that comments are
    77         skipped.
    78         """
    79         key_credentials = credentials.UsernamePassword(b"dennis", b"password")
    80         d = self.checker.requestAvatarId(key_credentials)
    81         return self.assertFailure(d, error.UnauthorizedLogin)
    82 
    83     def test_password_auth_user_with_ssh_key(self):
    84         """
    85         AccountFileChecker.requestAvatarId returns a Deferred that fires with
    86         UnauthorizedLogin if called with an SSHPrivateKey object for a username
    87         only associated with a password in the account file.
    88         """
    89         key_credentials = credentials.SSHPrivateKey(
    90             b"alice", b"md5", None, None, None)
    91         avatarId = self.checker.requestAvatarId(key_credentials)
    92         return self.assertFailure(avatarId, error.UnauthorizedLogin)
    93 
    94     def test_password_auth_user_with_correct_password(self):
    95         """
    96         AccountFileChecker.requestAvatarId returns a Deferred that fires with
    97         the user if the correct password is given.
    98         """
    99         key_credentials = credentials.UsernamePassword(b"alice", b"herpassword")
    100         d = self.checker.requestAvatarId(key_credentials)
    101         def authenticated(avatarId):
    102             self.assertEqual(
    103                 (b"alice",
    104                  b"URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111"),
    105                 (avatarId.username, avatarId.rootcap))
    106         return d
    107 
    108     def test_password_auth_user_with_correct_hashed_password(self):
    109         """
    110         AccountFileChecker.requestAvatarId returns a Deferred that fires with
    111         the user if the correct password is given in hashed form.
    112         """
    113         key_credentials = credentials.UsernameHashedPassword(b"alice", b"herpassword")
    114         d = self.checker.requestAvatarId(key_credentials)
    115         def authenticated(avatarId):
    116             self.assertEqual(
    117                 (b"alice",
    118                  b"URI:DIR2:aaaaaaaaaaaaaaaaaaaaaaaaaa:1111111111111111111111111111111111111111111111111111"),
    119                 (avatarId.username, avatarId.rootcap))
    120         return d
    121 
    122     def test_password_auth_user_with_wrong_password(self):
    123         """
    124         AccountFileChecker.requestAvatarId returns a Deferred that fires with
    125         UnauthorizedLogin if the wrong password is given.
    126         """
    127         key_credentials = credentials.UsernamePassword(b"alice", b"WRONG")
    128235        avatarId = self.checker.requestAvatarId(key_credentials)
    129236        return self.assertFailure(avatarId, error.UnauthorizedLogin)
  • TabularUnified src/allmydata/test/test_download.py

    r531fe30b r18891d9e  
    499499        return d
    500500
    501     def test_simultaneous_onefails_onecancelled(self):
     501    def test_simul_1fail_1cancel(self):
    502502        # This exercises an mplayer behavior in ticket #1154. I believe that
    503503        # mplayer made two simultaneous webapi GET requests: first one for an
     
    11141114                d.addCallback(lambda ign: self.restore_all_shares(self.shares))
    11151115                d.addCallback(fireEventually)
    1116             corrupt_values = [(3, 2, "no-sh2"),
    1117                               (15, 2, "need-4th"), # share looks v2
    1118                               ]
     1116            corrupt_values = [
     1117                # Make the container version for share number 2 look
     1118                # unsupported.  If you add support for immutable share file
     1119                # version number much past 16 million then you will have to
     1120                # update this test.  Also maybe you have other problems.
     1121                (1, 255, "no-sh2"),
     1122                # Make the immutable share number 2 (not the container, the
     1123                # thing inside the container) look unsupported.  Ditto the
     1124                # above about version numbers in the ballpark of 16 million.
     1125                (13, 255, "need-4th"),
     1126            ]
    11191127            for i,newvalue,expected in corrupt_values:
    11201128                d.addCallback(self._corrupt_set, imm_uri, i, newvalue)
  • TabularUnified src/allmydata/test/test_storage.py

    r531fe30b r18891d9e  
    1414from six import ensure_str
    1515
     16from io import (
     17    BytesIO,
     18)
    1619import time
    1720import os.path
     
    2023import struct
    2124import shutil
     25from functools import partial
    2226from uuid import uuid4
     27
     28from testtools.matchers import (
     29    HasLength,
     30)
    2331
    2432from twisted.trial import unittest
     
    3543from allmydata.storage.shares import get_share_file
    3644from allmydata.storage.mutable import MutableShareFile
     45from allmydata.storage.mutable_schema import (
     46    ALL_SCHEMAS as ALL_MUTABLE_SCHEMAS,
     47)
    3748from allmydata.storage.immutable import BucketWriter, BucketReader, ShareFile
     49from allmydata.storage.immutable_schema import (
     50    ALL_SCHEMAS as ALL_IMMUTABLE_SCHEMAS,
     51)
    3852from allmydata.storage.common import storage_index_to_dir, \
    3953     UnknownMutableContainerVersionError, UnknownImmutableContainerVersionError, \
     
    5771    _StorageServer,
    5872)
    59 from .common import LoggingServiceParent, ShouldFailMixin
     73from .common import (
     74    LoggingServiceParent,
     75    ShouldFailMixin,
     76    FakeDisk,
     77    SyncTestCase,
     78)
    6079from .common_util import FakeCanary
     80from .common_storage import (
     81    upload_immutable,
     82    upload_mutable,
     83)
     84from .strategies import (
     85    offsets,
     86    lengths,
     87)
    6188
    6289
     
    102129    def register_producer(self, producer):
    103130        pass
     131
    104132
    105133class Bucket(unittest.TestCase):
     
    750778        e = self.failUnlessRaises(UnknownImmutableContainerVersionError,
    751779                                  ss.remote_get_buckets, b"si1")
    752         self.failUnlessIn(" had version 0 but we wanted 1", str(e))
     780        self.assertEqual(e.filename, fn)
     781        self.assertEqual(e.version, 0)
     782        self.assertIn("had unexpected version 0", str(e))
    753783
    754784    def test_disconnect(self):
     
    778808        self.failUnlessEqual(set(writers.keys()), set([0,1,2]))
    779809
     810    def test_reserved_space_immutable_lease(self):
     811        """
     812        If there is not enough available space to store an additional lease on an
     813        immutable share then ``remote_add_lease`` fails with ``NoSpace`` when
     814        an attempt is made to use it to create a new lease.
     815        """
     816        disk = FakeDisk(total=1024, used=0)
     817        self.patch(fileutil, "get_disk_stats", disk.get_disk_stats)
     818
     819        ss = self.create("test_reserved_space_immutable_lease")
     820
     821        storage_index = b"x" * 16
     822        renew_secret = b"r" * 32
     823        cancel_secret = b"c" * 32
     824        shares = {0: b"y" * 500}
     825        upload_immutable(ss, storage_index, renew_secret, cancel_secret, shares)
     826
     827        # use up all the available space
     828        disk.use(disk.available)
     829
     830        # Different secrets to produce a different lease, not a renewal.
     831        renew_secret = b"R" * 32
     832        cancel_secret = b"C" * 32
     833        with self.assertRaises(interfaces.NoSpace):
     834            ss.remote_add_lease(storage_index, renew_secret, cancel_secret)
     835
     836    def test_reserved_space_mutable_lease(self):
     837        """
     838        If there is not enough available space to store an additional lease on a
     839        mutable share then ``remote_add_lease`` fails with ``NoSpace`` when an
     840        attempt is made to use it to create a new lease.
     841        """
     842        disk = FakeDisk(total=1024, used=0)
     843        self.patch(fileutil, "get_disk_stats", disk.get_disk_stats)
     844
     845        ss = self.create("test_reserved_space_mutable_lease")
     846
     847        renew_secrets = iter(
     848            "{}{}".format("r" * 31, i).encode("ascii")
     849            for i
     850            in range(5)
     851        )
     852
     853        storage_index = b"x" * 16
     854        write_enabler = b"w" * 32
     855        cancel_secret = b"c" * 32
     856        secrets = (write_enabler, next(renew_secrets), cancel_secret)
     857        shares = {0: b"y" * 500}
     858        upload_mutable(ss, storage_index, secrets, shares)
     859
     860        # use up all the available space
     861        disk.use(disk.available)
     862
     863        # The upload created one lease.  There is room for three more leases
     864        # in the share header.  Even if we're out of disk space, on a boring
     865        # enough filesystem we can write these.
     866        for i in range(3):
     867            ss.remote_add_lease(storage_index, next(renew_secrets), cancel_secret)
     868
     869        # Having used all of the space for leases in the header, we would have
     870        # to allocate storage for the next lease.  Since there is no space
     871        # available, this must fail instead.
     872        with self.assertRaises(interfaces.NoSpace):
     873            ss.remote_add_lease(storage_index, next(renew_secrets), cancel_secret)
     874
     875
    780876    def test_reserved_space(self):
    781877        reserved = 10000
     
    9061002        # Create a bucket:
    9071003        rs0, cs0 = self.create_bucket_5_shares(ss, b"si0")
     1004
     1005        # Upload of an immutable implies creation of a single lease with the
     1006        # supplied secrets.
    9081007        (lease,) = ss.get_leases(b"si0")
    9091008        self.assertTrue(lease.is_renew_secret(rs0))
     
    10371136        self.failUnlessEqual(b[0].remote_read(0, 25), b"\x00" * 25)
    10381137
     1138    def test_reserved_space_advise_corruption(self):
     1139        """
     1140        If there is no available space then ``remote_advise_corrupt_share`` does
     1141        not write a corruption report.
     1142        """
     1143        disk = FakeDisk(total=1024, used=1024)
     1144        self.patch(fileutil, "get_disk_stats", disk.get_disk_stats)
     1145
     1146        workdir = self.workdir("test_reserved_space_advise_corruption")
     1147        ss = StorageServer(workdir, b"\x00" * 20, discard_storage=True)
     1148        ss.setServiceParent(self.sparent)
     1149
     1150        upload_immutable(ss, b"si0", b"r" * 32, b"c" * 32, {0: b""})
     1151        ss.remote_advise_corrupt_share(b"immutable", b"si0", 0,
     1152                                       b"This share smells funny.\n")
     1153
     1154        self.assertEqual(
     1155            [],
     1156            os.listdir(ss.corruption_advisory_dir),
     1157        )
     1158
    10391159    def test_advise_corruption(self):
    10401160        workdir = self.workdir("test_advise_corruption")
     
    10431163
    10441164        si0_s = base32.b2a(b"si0")
     1165        upload_immutable(ss, b"si0", b"r" * 32, b"c" * 32, {0: b""})
    10451166        ss.remote_advise_corrupt_share(b"immutable", b"si0", 0,
    10461167                                       b"This share smells funny.\n")
     
    10811202        self.failUnlessIn(b"This share tastes like dust.", report)
    10821203
     1204    def test_advise_corruption_missing(self):
     1205        """
     1206        If a corruption advisory is received for a share that is not present on
     1207        this server then it is not persisted.
     1208        """
     1209        workdir = self.workdir("test_advise_corruption_missing")
     1210        ss = StorageServer(workdir, b"\x00" * 20, discard_storage=True)
     1211        ss.setServiceParent(self.sparent)
     1212
     1213        # Upload one share for this storage index
     1214        upload_immutable(ss, b"si0", b"r" * 32, b"c" * 32, {0: b""})
     1215
     1216        # And try to submit a corruption advisory about a different share
     1217        ss.remote_advise_corrupt_share(b"immutable", b"si0", 1,
     1218                                       b"This share smells funny.\n")
     1219
     1220        self.assertEqual(
     1221            [],
     1222            os.listdir(ss.corruption_advisory_dir),
     1223        )
    10831224
    10841225
     
    11501291        e = self.failUnlessRaises(UnknownMutableContainerVersionError,
    11511292                                  read, b"si1", [0], [(0,10)])
    1152         self.failUnlessIn(" had magic ", str(e))
    1153         self.failUnlessIn(" but we wanted ", str(e))
     1293        self.assertEqual(e.filename, fn)
     1294        self.assertTrue(e.version.startswith(b"BAD MAGIC"))
     1295        self.assertIn("had unexpected version", str(e))
     1296        self.assertIn("BAD MAGIC", str(e))
    11541297
    11551298    def test_container_size(self):
     
    13741517
    13751518    def compare_leases_without_timestamps(self, leases_a, leases_b):
    1376         self.failUnlessEqual(len(leases_a), len(leases_b))
    1377         for i in range(len(leases_a)):
    1378             a = leases_a[i]
    1379             b = leases_b[i]
    1380             self.failUnlessEqual(a.owner_num,       b.owner_num)
    1381             self.failUnlessEqual(a.renew_secret,    b.renew_secret)
    1382             self.failUnlessEqual(a.cancel_secret,   b.cancel_secret)
    1383             self.failUnlessEqual(a.nodeid,          b.nodeid)
     1519        """
     1520        Assert that, except for expiration times, ``leases_a`` contains the same
     1521        lease information as ``leases_b``.
     1522        """
     1523        for a, b in zip(leases_a, leases_b):
     1524            # The leases aren't always of the same type (though of course
     1525            # corresponding elements in the two lists should be of the same
     1526            # type as each other) so it's inconvenient to just reach in and
     1527            # normalize the expiration timestamp.  We don't want to call
     1528            # `renew` on both objects to normalize the expiration timestamp in
     1529            # case `renew` is broken and gives us back equal outputs from
     1530            # non-equal inputs (expiration timestamp aside).  It seems
     1531            # reasonably safe to use `renew` to make _one_ of the timestamps
     1532            # equal to the other though.
     1533            self.assertEqual(
     1534                a.renew(b.get_expiration_time()),
     1535                b,
     1536            )
     1537        self.assertEqual(len(leases_a), len(leases_b))
    13841538
    13851539    def test_leases(self):
     
    31473301        self.failUnless(output["get"]["99_9_percentile"] is None, output)
    31483302
     3303immutable_schemas = strategies.sampled_from(list(ALL_IMMUTABLE_SCHEMAS))
    31493304
    31503305class ShareFileTests(unittest.TestCase):
    31513306    """Tests for allmydata.storage.immutable.ShareFile."""
    31523307
    3153     def get_sharefile(self):
    3154         sf = ShareFile(self.mktemp(), max_size=1000, create=True)
     3308    def get_sharefile(self, **kwargs):
     3309        sf = ShareFile(self.mktemp(), max_size=1000, create=True, **kwargs)
    31553310        sf.write_share_data(0, b"abc")
    31563311        sf.write_share_data(2, b"DEF")
     
    31583313        return sf
    31593314
    3160     def test_read_write(self):
     3315    @given(immutable_schemas)
     3316    def test_read_write(self, schema):
    31613317        """Basic writes can be read."""
    3162         sf = self.get_sharefile()
     3318        sf = self.get_sharefile(schema=schema)
    31633319        self.assertEqual(sf.read_share_data(0, 3), b"abD")
    31643320        self.assertEqual(sf.read_share_data(1, 4), b"bDEF")
    31653321
    3166     def test_reads_beyond_file_end(self):
     3322    @given(immutable_schemas)
     3323    def test_reads_beyond_file_end(self, schema):
    31673324        """Reads beyond the file size are truncated."""
    3168         sf = self.get_sharefile()
     3325        sf = self.get_sharefile(schema=schema)
    31693326        self.assertEqual(sf.read_share_data(0, 10), b"abDEF")
    31703327        self.assertEqual(sf.read_share_data(5, 10), b"")
    31713328
    3172     def test_too_large_write(self):
     3329    @given(immutable_schemas)
     3330    def test_too_large_write(self, schema):
    31733331        """Can't do write larger than file size."""
    3174         sf = self.get_sharefile()
     3332        sf = self.get_sharefile(schema=schema)
    31753333        with self.assertRaises(DataTooLargeError):
    31763334            sf.write_share_data(0, b"x" * 3000)
    31773335
    3178     def test_no_leases_cancelled(self):
     3336    @given(immutable_schemas)
     3337    def test_no_leases_cancelled(self, schema):
    31793338        """If no leases were cancelled, IndexError is raised."""
    3180         sf = self.get_sharefile()
     3339        sf = self.get_sharefile(schema=schema)
    31813340        with self.assertRaises(IndexError):
    31823341            sf.cancel_lease(b"garbage")
    31833342
    3184     def test_renew_secret(self):
    3185         """
    3186         A lease loaded from a share file can have its renew secret verified.
     3343    @given(immutable_schemas)
     3344    def test_long_lease_count_format(self, schema):
     3345        """
     3346        ``ShareFile.__init__`` raises ``ValueError`` if the lease count format
     3347        given is longer than one character.
     3348        """
     3349        with self.assertRaises(ValueError):
     3350            self.get_sharefile(schema=schema, lease_count_format="BB")
     3351
     3352    @given(immutable_schemas)
     3353    def test_large_lease_count_format(self, schema):
     3354        """
     3355        ``ShareFile.__init__`` raises ``ValueError`` if the lease count format
     3356        encodes to a size larger than 8 bytes.
     3357        """
     3358        with self.assertRaises(ValueError):
     3359            self.get_sharefile(schema=schema, lease_count_format="Q")
     3360
     3361    @given(immutable_schemas)
     3362    def test_avoid_lease_overflow(self, schema):
     3363        """
     3364        If the share file already has the maximum number of leases supported then
     3365        ``ShareFile.add_lease`` raises ``struct.error`` and makes no changes
     3366        to the share file contents.
     3367        """
     3368        make_lease = partial(
     3369            LeaseInfo,
     3370            renew_secret=b"r" * 32,
     3371            cancel_secret=b"c" * 32,
     3372            expiration_time=2 ** 31,
     3373        )
     3374        # Make it a little easier to reach the condition by limiting the
     3375        # number of leases to only 255.
     3376        sf = self.get_sharefile(schema=schema, lease_count_format="B")
     3377
     3378        # Add the leases.
     3379        for i in range(2 ** 8 - 1):
     3380            lease = make_lease(owner_num=i)
     3381            sf.add_lease(lease)
     3382
     3383        # Capture the state of the share file at this point so we can
     3384        # determine whether the next operation modifies it or not.
     3385        with open(sf.home, "rb") as f:
     3386            before_data = f.read()
     3387
     3388        # It is not possible to add a 256th lease.
     3389        lease = make_lease(owner_num=256)
     3390        with self.assertRaises(struct.error):
     3391            sf.add_lease(lease)
     3392
     3393        # Compare the share file state to what we captured earlier.  Any
     3394        # change is a bug.
     3395        with open(sf.home, "rb") as f:
     3396            after_data = f.read()
     3397
     3398        self.assertEqual(before_data, after_data)
     3399
     3400    @given(immutable_schemas)
     3401    def test_renew_secret(self, schema):
     3402        """
     3403        A lease loaded from an immutable share file at any schema version can have
     3404        its renew secret verified.
    31873405        """
    31883406        renew_secret = b"r" * 32
     
    31903408        expiration_time = 2 ** 31
    31913409
    3192         sf = self.get_sharefile()
     3410        sf = self.get_sharefile(schema=schema)
    31933411        lease = LeaseInfo(
    31943412            owner_num=0,
     
    32013419        self.assertTrue(loaded_lease.is_renew_secret(renew_secret))
    32023420
    3203     def test_cancel_secret(self):
    3204         """
    3205         A lease loaded from a share file can have its cancel secret verified.
     3421    @given(immutable_schemas)
     3422    def test_cancel_secret(self, schema):
     3423        """
     3424        A lease loaded from an immutable share file at any schema version can have
     3425        its cancel secret verified.
    32063426        """
    32073427        renew_secret = b"r" * 32
     
    32093429        expiration_time = 2 ** 31
    32103430
    3211         sf = self.get_sharefile()
     3431        sf = self.get_sharefile(schema=schema)
    32123432        lease = LeaseInfo(
    32133433            owner_num=0,
     
    32203440        self.assertTrue(loaded_lease.is_cancel_secret(cancel_secret))
    32213441
    3222 
    3223 class LeaseInfoTests(unittest.TestCase):
     3442mutable_schemas = strategies.sampled_from(list(ALL_MUTABLE_SCHEMAS))
     3443
     3444class MutableShareFileTests(unittest.TestCase):
     3445    """
     3446    Tests for allmydata.storage.mutable.MutableShareFile.
     3447    """
     3448    def get_sharefile(self, **kwargs):
     3449        return MutableShareFile(self.mktemp(), **kwargs)
     3450
     3451    @given(
     3452        schema=mutable_schemas,
     3453        nodeid=strategies.just(b"x" * 20),
     3454        write_enabler=strategies.just(b"y" * 32),
     3455        datav=strategies.lists(
     3456            # Limit the max size of these so we don't write *crazy* amounts of
     3457            # data to disk.
     3458            strategies.tuples(offsets(), strategies.binary(max_size=2 ** 8)),
     3459            max_size=2 ** 8,
     3460        ),
     3461        new_length=offsets(),
     3462    )
     3463    def test_readv_reads_share_data(self, schema, nodeid, write_enabler, datav, new_length):
     3464        """
     3465        ``MutableShareFile.readv`` returns bytes from the share data portion
     3466        of the share file.
     3467        """
     3468        sf = self.get_sharefile(schema=schema)
     3469        sf.create(my_nodeid=nodeid, write_enabler=write_enabler)
     3470        sf.writev(datav=datav, new_length=new_length)
     3471
     3472        # Apply all of the writes to a simple in-memory buffer so we can
     3473        # resolve the final state of the share data.  In particular, this
     3474        # helps deal with overlapping writes which otherwise make it tricky to
     3475        # figure out what data to expect to be able to read back.
     3476        buf = BytesIO()
     3477        for (offset, data) in datav:
     3478            buf.seek(offset)
     3479            buf.write(data)
     3480        buf.truncate(new_length)
     3481
     3482        # Using that buffer, determine the expected result of a readv for all
     3483        # of the data just written.
     3484        def read_from_buf(offset, length):
     3485            buf.seek(offset)
     3486            return buf.read(length)
     3487        expected_data = list(
     3488            read_from_buf(offset, len(data))
     3489            for (offset, data)
     3490            in datav
     3491        )
     3492
     3493        # Perform a read that gives back all of the data written to the share
     3494        # file.
     3495        read_vectors = list((offset, len(data)) for (offset, data) in datav)
     3496        read_data = sf.readv(read_vectors)
     3497
     3498        # Make sure the read reproduces the value we computed using our local
     3499        # buffer.
     3500        self.assertEqual(expected_data, read_data)
     3501
     3502    @given(
     3503        schema=mutable_schemas,
     3504        nodeid=strategies.just(b"x" * 20),
     3505        write_enabler=strategies.just(b"y" * 32),
     3506        readv=strategies.lists(strategies.tuples(offsets(), lengths()), min_size=1),
     3507        random=strategies.randoms(),
     3508    )
     3509    def test_readv_rejects_negative_length(self, schema, nodeid, write_enabler, readv, random):
     3510        """
     3511        If a negative length is given to ``MutableShareFile.readv`` in a read
     3512        vector then ``AssertionError`` is raised.
     3513        """
     3514        # Pick a read vector to break with a negative value
     3515        readv_index = random.randrange(len(readv))
     3516        # Decide on whether we're breaking offset or length
     3517        offset_or_length = random.randrange(2)
     3518
     3519        # A helper function that will take a valid offset and length and break
     3520        # one of them.
     3521        def corrupt(break_length, offset, length):
     3522            if break_length:
     3523                # length must not be 0 or flipping the sign does nothing
     3524                # length must not be negative or flipping the sign *fixes* it
     3525                assert length > 0
     3526                return (offset, -length)
     3527            else:
     3528                if offset > 0:
     3529                    # We can break offset just by flipping the sign.
     3530                    return (-offset, length)
     3531                else:
     3532                    # Otherwise it has to be zero.  If it was negative, what's
     3533                    # going on?
     3534                    assert offset == 0
     3535                    # Since we can't just flip the sign on 0 to break things,
     3536                    # replace a 0 offset with a simple negative value.  All
     3537                    # other negative values will be tested by the `offset > 0`
     3538                    # case above.
     3539                    return (-1, length)
     3540
     3541        # Break the read vector very slightly!
     3542        broken_readv = readv[:]
     3543        broken_readv[readv_index] = corrupt(
     3544            offset_or_length,
     3545            *broken_readv[readv_index]
     3546        )
     3547
     3548        sf = self.get_sharefile(schema=schema)
     3549        sf.create(my_nodeid=nodeid, write_enabler=write_enabler)
     3550
     3551        # A read with a broken read vector is an error.
     3552        with self.assertRaises(AssertionError):
     3553            sf.readv(broken_readv)
     3554
     3555
     3556class LeaseInfoTests(SyncTestCase):
    32243557    """
    32253558    Tests for ``allmydata.storage.lease.LeaseInfo``.
     
    32803613        )
    32813614        self.assertFalse(lease.is_cancel_secret(renew_secret))
     3615
     3616    @given(
     3617        strategies.tuples(
     3618            strategies.integers(min_value=0, max_value=2 ** 31 - 1),
     3619            strategies.binary(min_size=32, max_size=32),
     3620            strategies.binary(min_size=32, max_size=32),
     3621            strategies.integers(min_value=0, max_value=2 ** 31 - 1),
     3622            strategies.binary(min_size=20, max_size=20),
     3623        ),
     3624    )
     3625    def test_immutable_size(self, initializer_args):
     3626        """
     3627        ``LeaseInfo.immutable_size`` returns the length of the result of
     3628        ``LeaseInfo.to_immutable_data``.
     3629
     3630        ``LeaseInfo.mutable_size`` returns the length of the result of
     3631        ``LeaseInfo.to_mutable_data``.
     3632        """
     3633        info = LeaseInfo(*initializer_args)
     3634        self.expectThat(
     3635            info.to_immutable_data(),
     3636            HasLength(info.immutable_size()),
     3637        )
     3638        self.expectThat(
     3639            info.to_mutable_data(),
     3640            HasLength(info.mutable_size()),
     3641        )
  • TabularUnified src/allmydata/test/test_storage_web.py

    r531fe30b r18891d9e  
    2020import re
    2121import json
     22from unittest import skipIf
     23from six.moves import StringIO
    2224
    2325from twisted.trial import unittest
    24 
    2526from twisted.internet import defer
    2627from twisted.application import service
    2728from twisted.web.template import flattenString
     29from twisted.python.filepath import FilePath
     30from twisted.python.runtime import platform
    2831
    2932from foolscap.api import fireEventually
     
    3235     UnknownMutableContainerVersionError, UnknownImmutableContainerVersionError
    3336from allmydata.storage.server import StorageServer
    34 from allmydata.storage.crawler import BucketCountingCrawler
    35 from allmydata.storage.expirer import LeaseCheckingCrawler
     37from allmydata.storage.crawler import (
     38    BucketCountingCrawler,
     39    _LeaseStateSerializer,
     40)
     41from allmydata.storage.expirer import (
     42    LeaseCheckingCrawler,
     43    _HistorySerializer,
     44)
    3645from allmydata.web.storage import (
    3746    StorageStatus,
    3847    StorageStatusElement,
    3948    remove_prefix
     49)
     50from allmydata.scripts.admin import (
     51    migrate_crawler,
     52)
     53from allmydata.scripts.runner import (
     54    Options,
    4055)
    4156from .common_util import FakeCanary
     
    377392            self.failUnlessEqual(len(lah), 1)
    378393            self.failUnlessEqual(lah, [ (0.0, DAY, 1) ] )
    379             self.failUnlessEqual(so_far["leases-per-share-histogram"], {1: 1})
     394            self.failUnlessEqual(so_far["leases-per-share-histogram"], {"1": 1})
    380395            self.failUnlessEqual(so_far["corrupt-shares"], [])
    381396            sr1 = so_far["space-recovered"]
     
    428443            self.failIf("estimated-remaining-cycle" in s)
    429444            self.failIf("estimated-current-cycle" in s)
    430             last = s["history"][0]
     445            last = s["history"]["0"]
    431446            self.failUnlessIn("cycle-start-finish-times", last)
    432             self.failUnlessEqual(type(last["cycle-start-finish-times"]), tuple)
     447            self.failUnlessEqual(type(last["cycle-start-finish-times"]), list)
    433448            self.failUnlessEqual(last["expiration-enabled"], False)
    434449            self.failUnlessIn("configured-expiration-mode", last)
     
    438453            self.failUnlessEqual(type(lah), list)
    439454            self.failUnlessEqual(len(lah), 1)
    440             self.failUnlessEqual(lah, [ (0.0, DAY, 6) ] )
    441 
    442             self.failUnlessEqual(last["leases-per-share-histogram"], {1: 2, 2: 2})
     455            self.failUnlessEqual(lah, [ [0.0, DAY, 6] ] )
     456
     457            self.failUnlessEqual(last["leases-per-share-histogram"], {"1": 2, "2": 2})
    443458            self.failUnlessEqual(last["corrupt-shares"], [])
    444459
     
    588603
    589604            s = lc.get_state()
    590             last = s["history"][0]
     605            last = s["history"]["0"]
    591606
    592607            self.failUnlessEqual(last["expiration-enabled"], True)
    593608            self.failUnlessEqual(last["configured-expiration-mode"],
    594                                  ("age", 2000, None, ("mutable", "immutable")))
    595             self.failUnlessEqual(last["leases-per-share-histogram"], {1: 2, 2: 2})
     609                                 ["age", 2000, None, ["mutable", "immutable"]])
     610            self.failUnlessEqual(last["leases-per-share-histogram"], {"1": 2, "2": 2})
    596611
    597612            rec = last["space-recovered"]
     
    732747
    733748            s = lc.get_state()
    734             last = s["history"][0]
     749            last = s["history"]["0"]
    735750
    736751            self.failUnlessEqual(last["expiration-enabled"], True)
    737752            self.failUnlessEqual(last["configured-expiration-mode"],
    738                                  ("cutoff-date", None, then,
    739                                   ("mutable", "immutable")))
     753                                 ["cutoff-date", None, then,
     754                                  ["mutable", "immutable"]])
    740755            self.failUnlessEqual(last["leases-per-share-histogram"],
    741                                  {1: 2, 2: 2})
     756                                 {"1": 2, "2": 2})
    742757
    743758            rec = last["space-recovered"]
     
    925940            h = s["history"]
    926941            self.failUnlessEqual(len(h), 10)
    927             self.failUnlessEqual(max(h.keys()), 15)
    928             self.failUnlessEqual(min(h.keys()), 6)
     942            self.failUnlessEqual(max(int(k) for k in h.keys()), 15)
     943            self.failUnlessEqual(min(int(k) for k in h.keys()), 6)
    929944        d.addCallback(_check)
    930945        return d
     
    10151030        def _check(ignored):
    10161031            s = lc.get_state()
    1017             last = s["history"][0]
     1032            last = s["history"]["0"]
    10181033            rec = last["space-recovered"]
    10191034            self.failUnlessEqual(rec["configured-buckets"], 4)
     
    11111126        def _after_first_cycle(ignored):
    11121127            s = lc.get_state()
    1113             last = s["history"][0]
     1128            last = s["history"]["0"]
    11141129            rec = last["space-recovered"]
    11151130            self.failUnlessEqual(rec["examined-buckets"], 5)
     
    11401155        return d
    11411156
     1157    @skipIf(platform.isWindows(), "pickle test-data can't be loaded on windows")
     1158    def test_deserialize_pickle(self):
     1159        """
     1160        The crawler can read existing state from the old pickle format
     1161        """
     1162        # this file came from an "in the wild" tahoe version 1.16.0
     1163        original_pickle = FilePath(__file__).parent().child("data").child("lease_checker.state.txt")
     1164        root = FilePath(self.mktemp())
     1165        storage = root.child("storage")
     1166        storage.makedirs()
     1167        test_pickle = storage.child("lease_checker.state")
     1168        with test_pickle.open("wb") as local, original_pickle.open("rb") as remote:
     1169            local.write(remote.read())
     1170
     1171        # convert from pickle format to JSON
     1172        top = Options()
     1173        top.parseOptions([
     1174            "admin", "migrate-crawler",
     1175            "--basedir", storage.parent().path,
     1176        ])
     1177        options = top.subOptions
     1178        while hasattr(options, "subOptions"):
     1179            options = options.subOptions
     1180        options.stdout = StringIO()
     1181        migrate_crawler(options)
     1182
     1183        # the (existing) state file should have been upgraded to JSON
     1184        self.assertFalse(test_pickle.exists())
     1185        self.assertTrue(test_pickle.siblingExtension(".json").exists())
     1186        serial = _LeaseStateSerializer(test_pickle.path)
     1187
     1188        self.assertEqual(
     1189            serial.load(),
     1190            {
     1191                u'last-complete-prefix': None,
     1192                u'version': 1,
     1193                u'current-cycle-start-time': 1635003106.611748,
     1194                u'last-cycle-finished': 312,
     1195                u'cycle-to-date': {
     1196                    u'leases-per-share-histogram': {
     1197                        u'1': 36793,
     1198                        u'2': 1,
     1199                    },
     1200                    u'space-recovered': {
     1201                        u'examined-buckets-immutable': 17183,
     1202                        u'configured-buckets-mutable': 0,
     1203                        u'examined-shares-mutable': 1796,
     1204                        u'original-shares-mutable': 1563,
     1205                        u'configured-buckets-immutable': 0,
     1206                        u'original-shares-immutable': 27926,
     1207                        u'original-diskbytes-immutable': 431149056,
     1208                        u'examined-shares-immutable': 34998,
     1209                        u'original-buckets': 14661,
     1210                        u'actual-shares-immutable': 0,
     1211                        u'configured-shares': 0,
     1212                        u'original-buckets-mutable': 899,
     1213                        u'actual-diskbytes': 4096,
     1214                        u'actual-shares-mutable': 0,
     1215                        u'configured-buckets': 1,
     1216                        u'examined-buckets-unknown': 14,
     1217                        u'actual-sharebytes': 0,
     1218                        u'original-shares': 29489,
     1219                        u'actual-buckets-immutable': 0,
     1220                        u'original-sharebytes': 312664812,
     1221                        u'examined-sharebytes-immutable': 383801602,
     1222                        u'actual-shares': 0,
     1223                        u'actual-sharebytes-immutable': 0,
     1224                        u'original-diskbytes': 441643008,
     1225                        u'configured-diskbytes-mutable': 0,
     1226                        u'configured-sharebytes-immutable': 0,
     1227                        u'configured-shares-mutable': 0,
     1228                        u'actual-diskbytes-immutable': 0,
     1229                        u'configured-diskbytes-immutable': 0,
     1230                        u'original-diskbytes-mutable': 10489856,
     1231                        u'actual-sharebytes-mutable': 0,
     1232                        u'configured-sharebytes': 0,
     1233                        u'examined-shares': 36794,
     1234                        u'actual-diskbytes-mutable': 0,
     1235                        u'actual-buckets': 1,
     1236                        u'original-buckets-immutable': 13761,
     1237                        u'configured-sharebytes-mutable': 0,
     1238                        u'examined-sharebytes': 390369660,
     1239                        u'original-sharebytes-immutable': 308125753,
     1240                        u'original-sharebytes-mutable': 4539059,
     1241                        u'actual-buckets-mutable': 0,
     1242                        u'examined-buckets-mutable': 1043,
     1243                        u'configured-shares-immutable': 0,
     1244                        u'examined-diskbytes': 476598272,
     1245                        u'examined-diskbytes-mutable': 9154560,
     1246                        u'examined-sharebytes-mutable': 6568058,
     1247                        u'examined-buckets': 18241,
     1248                        u'configured-diskbytes': 4096,
     1249                        u'examined-diskbytes-immutable': 467443712},
     1250                    u'corrupt-shares': [
     1251                        [u'2dn6xnlnsqwtnapwxfdivpm3s4', 4],
     1252                        [u'2dn6xnlnsqwtnapwxfdivpm3s4', 1],
     1253                        [u'2rrzthwsrrxolevmwdvbdy3rqi', 4],
     1254                        [u'2rrzthwsrrxolevmwdvbdy3rqi', 1],
     1255                        [u'2skfngcto6h7eqmn4uo7ntk3ne', 4],
     1256                        [u'2skfngcto6h7eqmn4uo7ntk3ne', 1],
     1257                        [u'32d5swqpqx2mwix7xmqzvhdwje', 4],
     1258                        [u'32d5swqpqx2mwix7xmqzvhdwje', 1],
     1259                        [u'5mmayp66yflmpon3o6unsnbaca', 4],
     1260                        [u'5mmayp66yflmpon3o6unsnbaca', 1],
     1261                        [u'6ixhpvbtre7fnrl6pehlrlflc4', 4],
     1262                        [u'6ixhpvbtre7fnrl6pehlrlflc4', 1],
     1263                        [u'ewzhvswjsz4vp2bqkb6mi3bz2u', 4],
     1264                        [u'ewzhvswjsz4vp2bqkb6mi3bz2u', 1],
     1265                        [u'fu7pazf6ogavkqj6z4q5qqex3u', 4],
     1266                        [u'fu7pazf6ogavkqj6z4q5qqex3u', 1],
     1267                        [u'hbyjtqvpcimwxiyqbcbbdn2i4a', 4],
     1268                        [u'hbyjtqvpcimwxiyqbcbbdn2i4a', 1],
     1269                        [u'pmcjbdkbjdl26k3e6yja77femq', 4],
     1270                        [u'pmcjbdkbjdl26k3e6yja77femq', 1],
     1271                        [u'r6swof4v2uttbiiqwj5pi32cm4', 4],
     1272                        [u'r6swof4v2uttbiiqwj5pi32cm4', 1],
     1273                        [u't45v5akoktf53evc2fi6gwnv6y', 4],
     1274                        [u't45v5akoktf53evc2fi6gwnv6y', 1],
     1275                        [u'y6zb4faar3rdvn3e6pfg4wlotm', 4],
     1276                        [u'y6zb4faar3rdvn3e6pfg4wlotm', 1],
     1277                        [u'z3yghutvqoqbchjao4lndnrh3a', 4],
     1278                        [u'z3yghutvqoqbchjao4lndnrh3a', 1],
     1279                    ],
     1280                    u'lease-age-histogram': {
     1281                        "1641600,1728000": 78,
     1282                        "12441600,12528000": 78,
     1283                        "8640000,8726400": 32,
     1284                        "1814400,1900800": 1860,
     1285                        "2764800,2851200": 76,
     1286                        "11491200,11577600": 20,
     1287                        "10713600,10800000": 183,
     1288                        "47865600,47952000": 7,
     1289                        "3110400,3196800": 328,
     1290                        "10627200,10713600": 43,
     1291                        "45619200,45705600": 4,
     1292                        "12873600,12960000": 5,
     1293                        "7430400,7516800": 7228,
     1294                        "1555200,1641600": 492,
     1295                        "38880000,38966400": 3,
     1296                        "12528000,12614400": 193,
     1297                        "7344000,7430400": 12689,
     1298                        "2678400,2764800": 278,
     1299                        "2332800,2419200": 12,
     1300                        "9244800,9331200": 73,
     1301                        "12787200,12873600": 218,
     1302                        "49075200,49161600": 19,
     1303                        "10368000,10454400": 117,
     1304                        "4665600,4752000": 256,
     1305                        "7516800,7603200": 993,
     1306                        "42336000,42422400": 33,
     1307                        "10972800,11059200": 122,
     1308                        "39052800,39139200": 51,
     1309                        "12614400,12700800": 210,
     1310                        "7603200,7689600": 2004,
     1311                        "10540800,10627200": 16,
     1312                        "950400,1036800": 4435,
     1313                        "42076800,42163200": 4,
     1314                        "8812800,8899200": 57,
     1315                        "5788800,5875200": 954,
     1316                        "36374400,36460800": 3,
     1317                        "9331200,9417600": 12,
     1318                        "30499200,30585600": 5,
     1319                        "12700800,12787200": 25,
     1320                        "2073600,2160000": 388,
     1321                        "12960000,13046400": 8,
     1322                        "11923200,12009600": 89,
     1323                        "3369600,3456000": 79,
     1324                        "3196800,3283200": 628,
     1325                        "37497600,37584000": 11,
     1326                        "33436800,33523200": 7,
     1327                        "44928000,45014400": 2,
     1328                        "37929600,38016000": 3,
     1329                        "38966400,39052800": 61,
     1330                        "3283200,3369600": 86,
     1331                        "11750400,11836800": 7,
     1332                        "3801600,3888000": 32,
     1333                        "46310400,46396800": 1,
     1334                        "4838400,4924800": 386,
     1335                        "8208000,8294400": 38,
     1336                        "37411200,37497600": 4,
     1337                        "12009600,12096000": 329,
     1338                        "10454400,10540800": 1239,
     1339                        "40176000,40262400": 1,
     1340                        "3715200,3801600": 104,
     1341                        "44409600,44496000": 13,
     1342                        "38361600,38448000": 5,
     1343                        "12268800,12355200": 2,
     1344                        "28771200,28857600": 6,
     1345                        "41990400,42076800": 10,
     1346                        "2592000,2678400": 40,
     1347                    },
     1348                },
     1349                'current-cycle': None,
     1350                'last-complete-bucket': None,
     1351            }
     1352        )
     1353        second_serial = _LeaseStateSerializer(serial._path.path)
     1354        self.assertEqual(
     1355            serial.load(),
     1356            second_serial.load(),
     1357        )
     1358
     1359    @skipIf(platform.isWindows(), "pickle test-data can't be loaded on windows")
     1360    def test_deserialize_history_pickle(self):
     1361        """
     1362        The crawler can read existing history state from the old pickle
     1363        format
     1364        """
     1365        # this file came from an "in the wild" tahoe version 1.16.0
     1366        original_pickle = FilePath(__file__).parent().child("data").child("lease_checker.history.txt")
     1367        root = FilePath(self.mktemp())
     1368        storage = root.child("storage")
     1369        storage.makedirs()
     1370        test_pickle = storage.child("lease_checker.history")
     1371        with test_pickle.open("wb") as local, original_pickle.open("rb") as remote:
     1372            local.write(remote.read())
     1373
     1374        # convert from pickle format to JSON
     1375        top = Options()
     1376        top.parseOptions([
     1377            "admin", "migrate-crawler",
     1378            "--basedir", storage.parent().path,
     1379        ])
     1380        options = top.subOptions
     1381        while hasattr(options, "subOptions"):
     1382            options = options.subOptions
     1383        options.stdout = StringIO()
     1384        migrate_crawler(options)
     1385
     1386        serial = _HistorySerializer(test_pickle.path)
     1387
     1388        self.maxDiff = None
     1389        self.assertEqual(
     1390            serial.load(),
     1391            {
     1392                "363": {
     1393                    'configured-expiration-mode': ['age', None, None, ['immutable', 'mutable']],
     1394                    'expiration-enabled': False,
     1395                    'leases-per-share-histogram': {
     1396                        '1': 39774,
     1397                    },
     1398                    'lease-age-histogram': [
     1399                        [0, 86400, 3125],
     1400                        [345600, 432000, 4175],
     1401                        [950400, 1036800, 141],
     1402                        [1036800, 1123200, 345],
     1403                        [1123200, 1209600, 81],
     1404                        [1296000, 1382400, 1832],
     1405                        [1555200, 1641600, 390],
     1406                        [1728000, 1814400, 12],
     1407                        [2073600, 2160000, 84],
     1408                        [2160000, 2246400, 228],
     1409                        [2246400, 2332800, 75],
     1410                        [2592000, 2678400, 644],
     1411                        [2678400, 2764800, 273],
     1412                        [2764800, 2851200, 94],
     1413                        [2851200, 2937600, 97],
     1414                        [3196800, 3283200, 143],
     1415                        [3283200, 3369600, 48],
     1416                        [4147200, 4233600, 374],
     1417                        [4320000, 4406400, 534],
     1418                        [5270400, 5356800, 1005],
     1419                        [6739200, 6825600, 8704],
     1420                        [6825600, 6912000, 3986],
     1421                        [6912000, 6998400, 7592],
     1422                        [6998400, 7084800, 2607],
     1423                        [7689600, 7776000, 35],
     1424                        [8035200, 8121600, 33],
     1425                        [8294400, 8380800, 54],
     1426                        [8640000, 8726400, 45],
     1427                        [8726400, 8812800, 27],
     1428                        [8812800, 8899200, 12],
     1429                        [9763200, 9849600, 77],
     1430                        [9849600, 9936000, 91],
     1431                        [9936000, 10022400, 1210],
     1432                        [10022400, 10108800, 45],
     1433                        [10108800, 10195200, 186],
     1434                        [10368000, 10454400, 113],
     1435                        [10972800, 11059200, 21],
     1436                        [11232000, 11318400, 5],
     1437                        [11318400, 11404800, 19],
     1438                        [11404800, 11491200, 238],
     1439                        [11491200, 11577600, 159],
     1440                        [11750400, 11836800, 1],
     1441                        [11836800, 11923200, 32],
     1442                        [11923200, 12009600, 192],
     1443                        [12009600, 12096000, 222],
     1444                        [12096000, 12182400, 18],
     1445                        [12182400, 12268800, 224],
     1446                        [12268800, 12355200, 9],
     1447                        [12355200, 12441600, 9],
     1448                        [12441600, 12528000, 10],
     1449                        [12528000, 12614400, 6],
     1450                        [12614400, 12700800, 6],
     1451                        [12700800, 12787200, 18],
     1452                        [12787200, 12873600, 6],
     1453                        [12873600, 12960000, 62],
     1454                    ],
     1455                    'cycle-start-finish-times': [1634446505.241972, 1634446666.055401],
     1456                    'space-recovered': {
     1457                        'examined-buckets-immutable': 17896,
     1458                        'configured-buckets-mutable': 0,
     1459                        'examined-shares-mutable': 2473,
     1460                        'original-shares-mutable': 1185,
     1461                        'configured-buckets-immutable': 0,
     1462                        'original-shares-immutable': 27457,
     1463                        'original-diskbytes-immutable': 2810982400,
     1464                        'examined-shares-immutable': 37301,
     1465                        'original-buckets': 14047,
     1466                        'actual-shares-immutable': 0,
     1467                        'configured-shares': 0,
     1468                        'original-buckets-mutable': 691,
     1469                        'actual-diskbytes': 4096,
     1470                        'actual-shares-mutable': 0,
     1471                        'configured-buckets': 1,
     1472                        'examined-buckets-unknown': 14,
     1473                        'actual-sharebytes': 0,
     1474                        'original-shares': 28642,
     1475                        'actual-buckets-immutable': 0,
     1476                        'original-sharebytes': 2695552941,
     1477                        'examined-sharebytes-immutable': 2754798505,
     1478                        'actual-shares': 0,
     1479                        'actual-sharebytes-immutable': 0,
     1480                        'original-diskbytes': 2818981888,
     1481                        'configured-diskbytes-mutable': 0,
     1482                        'configured-sharebytes-immutable': 0,
     1483                        'configured-shares-mutable': 0,
     1484                        'actual-diskbytes-immutable': 0,
     1485                        'configured-diskbytes-immutable': 0,
     1486                        'original-diskbytes-mutable': 7995392,
     1487                        'actual-sharebytes-mutable': 0,
     1488                        'configured-sharebytes': 0,
     1489                        'examined-shares': 39774,
     1490                        'actual-diskbytes-mutable': 0,
     1491                        'actual-buckets': 1,
     1492                        'original-buckets-immutable': 13355,
     1493                        'configured-sharebytes-mutable': 0,
     1494                        'examined-sharebytes': 2763646972,
     1495                        'original-sharebytes-immutable': 2692076909,
     1496                        'original-sharebytes-mutable': 3476032,
     1497                        'actual-buckets-mutable': 0,
     1498                        'examined-buckets-mutable': 1286,
     1499                        'configured-shares-immutable': 0,
     1500                        'examined-diskbytes': 2854801408,
     1501                        'examined-diskbytes-mutable': 12161024,
     1502                        'examined-sharebytes-mutable': 8848467,
     1503                        'examined-buckets': 19197,
     1504                        'configured-diskbytes': 4096,
     1505                        'examined-diskbytes-immutable': 2842640384
     1506                    },
     1507                    'corrupt-shares': [
     1508                        ['2dn6xnlnsqwtnapwxfdivpm3s4', 3],
     1509                        ['2dn6xnlnsqwtnapwxfdivpm3s4', 0],
     1510                        ['2rrzthwsrrxolevmwdvbdy3rqi', 3],
     1511                        ['2rrzthwsrrxolevmwdvbdy3rqi', 0],
     1512                        ['2skfngcto6h7eqmn4uo7ntk3ne', 3],
     1513                        ['2skfngcto6h7eqmn4uo7ntk3ne', 0],
     1514                        ['32d5swqpqx2mwix7xmqzvhdwje', 3],
     1515                        ['32d5swqpqx2mwix7xmqzvhdwje', 0],
     1516                        ['5mmayp66yflmpon3o6unsnbaca', 3],
     1517                        ['5mmayp66yflmpon3o6unsnbaca', 0],
     1518                        ['6ixhpvbtre7fnrl6pehlrlflc4', 3],
     1519                        ['6ixhpvbtre7fnrl6pehlrlflc4', 0],
     1520                        ['ewzhvswjsz4vp2bqkb6mi3bz2u', 3],
     1521                        ['ewzhvswjsz4vp2bqkb6mi3bz2u', 0],
     1522                        ['fu7pazf6ogavkqj6z4q5qqex3u', 3],
     1523                        ['fu7pazf6ogavkqj6z4q5qqex3u', 0],
     1524                        ['hbyjtqvpcimwxiyqbcbbdn2i4a', 3],
     1525                        ['hbyjtqvpcimwxiyqbcbbdn2i4a', 0],
     1526                        ['pmcjbdkbjdl26k3e6yja77femq', 3],
     1527                        ['pmcjbdkbjdl26k3e6yja77femq', 0],
     1528                        ['r6swof4v2uttbiiqwj5pi32cm4', 3],
     1529                        ['r6swof4v2uttbiiqwj5pi32cm4', 0],
     1530                        ['t45v5akoktf53evc2fi6gwnv6y', 3],
     1531                        ['t45v5akoktf53evc2fi6gwnv6y', 0],
     1532                        ['y6zb4faar3rdvn3e6pfg4wlotm', 3],
     1533                        ['y6zb4faar3rdvn3e6pfg4wlotm', 0],
     1534                        ['z3yghutvqoqbchjao4lndnrh3a', 3],
     1535                        ['z3yghutvqoqbchjao4lndnrh3a', 0],
     1536                    ]
     1537                }
     1538            }
     1539        )
     1540
    11421541
    11431542class WebStatus(unittest.TestCase, pollmixin.PollMixin):
  • TabularUnified src/allmydata/web/storage.py

    r531fe30b r18891d9e  
    257257        if so_far["corrupt-shares"]:
    258258            add("Corrupt shares:",
    259                 T.ul( (T.li( ["SI %s shnum %d" % corrupt_share
    260                               for corrupt_share in so_far["corrupt-shares"] ]
     259                T.ul( (T.li( ["SI %s shnum %d" % (si, shnum)
     260                              for si, shnum in so_far["corrupt-shares"] ]
    261261                             ))))
    262262        return tag("Current cycle:", p)
     
    268268        if not h:
    269269            return ""
    270         last = h[max(h.keys())]
     270        biggest = str(max(int(k) for k in h.keys()))
     271        last = h[biggest]
    271272
    272273        start, end = last["cycle-start-finish-times"]
     
    291292        if last["corrupt-shares"]:
    292293            add("Corrupt shares:",
    293                 T.ul( (T.li( ["SI %s shnum %d" % corrupt_share
    294                               for corrupt_share in last["corrupt-shares"] ]
     294                T.ul( (T.li( ["SI %s shnum %d" % (si, shnum)
     295                              for si, shnum in last["corrupt-shares"] ]
    295296                             ))))
    296297
Note: See TracChangeset for help on using the changeset viewer.