Changeset 3668cb3 in trunk


Ignore:
Timestamp:
2011-08-01T22:43:17Z (14 years ago)
Author:
Brian Warner <warner@…>
Branches:
master
Children:
884df6f
Parents:
550d67f
Message:

remove nodeid from WriteBucketProxy? classes and customers
refs #1363

Location:
src/allmydata
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • TabularUnified src/allmydata/immutable/downloader/share.py

    r550d67f r3668cb3  
    124124        ht = IncompleteHashTree(N)
    125125        num_share_hashes = len(ht.needed_hashes(0, include_leaf=True))
    126         wbp = make_write_bucket_proxy(None, share_size, r["block_size"],
    127                                       r["num_segments"], num_share_hashes, 0,
    128                                       None)
     126        wbp = make_write_bucket_proxy(None, None, share_size, r["block_size"],
     127                                      r["num_segments"], num_share_hashes, 0)
    129128        self._fieldsize = wbp.fieldsize
    130129        self._fieldstruct = wbp.fieldstruct
  • TabularUnified src/allmydata/immutable/layout.py

    r550d67f r3668cb3  
    7777FORCE_V2 = False # set briefly by unit tests to make small-sized V2 shares
    7878
    79 def make_write_bucket_proxy(rref, data_size, block_size, num_segments,
    80                             num_share_hashes, uri_extension_size_max, nodeid):
     79def make_write_bucket_proxy(rref, server,
     80                            data_size, block_size, num_segments,
     81                            num_share_hashes, uri_extension_size_max):
    8182    # Use layout v1 for small files, so they'll be readable by older versions
    8283    # (<tahoe-1.3.0). Use layout v2 for large files; they'll only be readable
     
    8586        if FORCE_V2:
    8687            raise FileTooLargeError
    87         wbp = WriteBucketProxy(rref, data_size, block_size, num_segments,
    88                                num_share_hashes, uri_extension_size_max, nodeid)
     88        wbp = WriteBucketProxy(rref, server,
     89                               data_size, block_size, num_segments,
     90                               num_share_hashes, uri_extension_size_max)
    8991    except FileTooLargeError:
    90         wbp = WriteBucketProxy_v2(rref, data_size, block_size, num_segments,
    91                                   num_share_hashes, uri_extension_size_max, nodeid)
     92        wbp = WriteBucketProxy_v2(rref, server,
     93                                  data_size, block_size, num_segments,
     94                                  num_share_hashes, uri_extension_size_max)
    9295    return wbp
    9396
     
    97100    fieldstruct = ">L"
    98101
    99     def __init__(self, rref, data_size, block_size, num_segments,
    100                  num_share_hashes, uri_extension_size_max, nodeid,
    101                  pipeline_size=50000):
     102    def __init__(self, rref, server, data_size, block_size, num_segments,
     103                 num_share_hashes, uri_extension_size_max, pipeline_size=50000):
    102104        self._rref = rref
     105        self._server = server
    103106        self._data_size = data_size
    104107        self._block_size = block_size
    105108        self._num_segments = num_segments
    106         self._nodeid = nodeid
    107109
    108110        effective_segments = mathutil.next_power_of_k(num_segments,2)
     
    162164
    163165    def __repr__(self):
    164         if self._nodeid:
    165             nodeid_s = idlib.nodeid_b2a(self._nodeid)
    166         else:
    167             nodeid_s = "[None]"
    168         return "<WriteBucketProxy for node %s>" % nodeid_s
     166        return "<WriteBucketProxy for node %s>" % self._server.get_name()
    169167
    170168    def put_header(self):
     
    248246
    249247
     248    def get_servername(self):
     249        return self._server.get_name()
    250250    def get_peerid(self):
    251         if self._nodeid:
    252             return self._nodeid
    253         return None
     251        return self._server.get_serverid()
    254252
    255253class WriteBucketProxy_v2(WriteBucketProxy):
  • TabularUnified src/allmydata/immutable/upload.py

    r550d67f r3668cb3  
    7878        self.sharesize = sharesize
    7979
    80         wbp = layout.make_write_bucket_proxy(None, sharesize,
     80        wbp = layout.make_write_bucket_proxy(None, None, sharesize,
    8181                                             blocksize, num_segments,
    8282                                             num_share_hashes,
    83                                              EXTENSION_SIZE, server.get_serverid())
     83                                             EXTENSION_SIZE)
    8484        self.wbp_class = wbp.__class__ # to create more of them
    8585        self.allocated_size = wbp.get_allocated_size()
     
    121121        b = {}
    122122        for sharenum, rref in buckets.iteritems():
    123             bp = self.wbp_class(rref, self.sharesize,
     123            bp = self.wbp_class(rref, self._server, self.sharesize,
    124124                                self.blocksize,
    125125                                self.num_segments,
    126126                                self.num_share_hashes,
    127                                 EXTENSION_SIZE,
    128                                 self._server.get_serverid())
     127                                EXTENSION_SIZE)
    129128            b[sharenum] = bp
    130129        self.buckets.update(b)
     
    150149
    151150def str_shareloc(shnum, bucketwriter):
    152     return "%s: %s" % (shnum, idlib.shortnodeid_b2a(bucketwriter._nodeid),)
     151    return "%s: %s" % (shnum, bucketwriter.get_servername(),)
    153152
    154153class Tahoe2ServerSelector(log.PrefixingLogMixin):
     
    206205
    207206        # figure out how much space to ask for
    208         wbp = layout.make_write_bucket_proxy(None, share_size, 0, num_segments,
    209                                              num_share_hashes, EXTENSION_SIZE,
    210                                              None)
     207        wbp = layout.make_write_bucket_proxy(None, None,
     208                                             share_size, 0, num_segments,
     209                                             num_share_hashes, EXTENSION_SIZE)
    211210        allocated_size = wbp.get_allocated_size()
    212211        all_servers = storage_broker.get_servers_for_psi(storage_index)
  • TabularUnified src/allmydata/test/test_storage.py

    r550d67f r3668cb3  
    137137    def test_create(self):
    138138        bw, rb, sharefname = self.make_bucket("test_create", 500)
    139         bp = WriteBucketProxy(rb,
     139        bp = WriteBucketProxy(rb, None,
    140140                              data_size=300,
    141141                              block_size=10,
    142142                              num_segments=5,
    143143                              num_share_hashes=3,
    144                               uri_extension_size_max=500, nodeid=None)
     144                              uri_extension_size_max=500)
    145145        self.failUnless(interfaces.IStorageBucketWriter.providedBy(bp), bp)
    146146
     
    168168
    169169        bw, rb, sharefname = self.make_bucket(name, sharesize)
    170         bp = wbp_class(rb,
     170        bp = wbp_class(rb, None,
    171171                       data_size=95,
    172172                       block_size=25,
    173173                       num_segments=4,
    174174                       num_share_hashes=3,
    175                        uri_extension_size_max=len(uri_extension),
    176                        nodeid=None)
     175                       uri_extension_size_max=len(uri_extension))
    177176
    178177        d = bp.put_header()
Note: See TracChangeset for help on using the changeset viewer.