source: trunk/src/allmydata/test/mutable/test_multiple_encodings.py

Last change on this file was 1cfe843d, checked in by Alexandre Detiste <alexandre.detiste@…>, at 2024-02-22T23:40:25Z

more python2 removal

  • Property mode set to 100644
File size: 6.3 KB
Line 
1"""
2Ported to Python 3.
3"""
4
5from ..common import AsyncTestCase
6from testtools.matchers import Equals
7from allmydata.interfaces import SDMF_VERSION
8from allmydata.monitor import Monitor
9from foolscap.logging import log
10from allmydata.mutable.common import MODE_READ
11from allmydata.mutable.publish import Publish, MutableData
12from allmydata.mutable.servermap import ServerMap, ServermapUpdater
13from ..common_util import DevNullDictionary
14from .util import FakeStorage, make_nodemaker
15
16class MultipleEncodings(AsyncTestCase):
17    def setUp(self):
18        super(MultipleEncodings, self).setUp()
19        self.CONTENTS = b"New contents go here"
20        self.uploadable = MutableData(self.CONTENTS)
21        self._storage = FakeStorage()
22        self._nodemaker = make_nodemaker(self._storage, num_peers=20)
23        self._storage_broker = self._nodemaker.storage_broker
24        d = self._nodemaker.create_mutable_file(self.uploadable)
25        def _created(node):
26            self._fn = node
27        d.addCallback(_created)
28        return d
29
30    def _encode(self, k, n, data, version=SDMF_VERSION):
31        # encode 'data' into a peerid->shares dict.
32
33        fn = self._fn
34        # disable the nodecache, since for these tests we explicitly need
35        # multiple nodes pointing at the same file
36        self._nodemaker._node_cache = DevNullDictionary()
37        fn2 = self._nodemaker.create_from_cap(fn.get_uri())
38        # then we copy over other fields that are normally fetched from the
39        # existing shares
40        fn2._pubkey = fn._pubkey
41        fn2._privkey = fn._privkey
42        fn2._encprivkey = fn._encprivkey
43        # and set the encoding parameters to something completely different
44        fn2._required_shares = k
45        fn2._total_shares = n
46
47        s = self._storage
48        s._peers = {} # clear existing storage
49        p2 = Publish(fn2, self._storage_broker, None)
50        uploadable = MutableData(data)
51        d = p2.publish(uploadable)
52        def _published(res):
53            shares = s._peers
54            s._peers = {}
55            return shares
56        d.addCallback(_published)
57        return d
58
59    def make_servermap(self, mode=MODE_READ, oldmap=None):
60        if oldmap is None:
61            oldmap = ServerMap()
62        smu = ServermapUpdater(self._fn, self._storage_broker, Monitor(),
63                               oldmap, mode)
64        d = smu.update()
65        return d
66
67    def test_multiple_encodings(self):
68        # we encode the same file in two different ways (3-of-10 and 4-of-9),
69        # then mix up the shares, to make sure that download survives seeing
70        # a variety of encodings. This is actually kind of tricky to set up.
71
72        contents1 = b"Contents for encoding 1 (3-of-10) go here"*1000
73        contents2 = b"Contents for encoding 2 (4-of-9) go here"*1000
74        contents3 = b"Contents for encoding 3 (4-of-7) go here"*1000
75
76        # we make a retrieval object that doesn't know what encoding
77        # parameters to use
78        fn3 = self._nodemaker.create_from_cap(self._fn.get_uri())
79
80        # now we upload a file through fn1, and grab its shares
81        d = self._encode(3, 10, contents1)
82        def _encoded_1(shares):
83            self._shares1 = shares
84        d.addCallback(_encoded_1)
85        d.addCallback(lambda res: self._encode(4, 9, contents2))
86        def _encoded_2(shares):
87            self._shares2 = shares
88        d.addCallback(_encoded_2)
89        d.addCallback(lambda res: self._encode(4, 7, contents3))
90        def _encoded_3(shares):
91            self._shares3 = shares
92        d.addCallback(_encoded_3)
93
94        def _merge(res):
95            log.msg("merging sharelists")
96            # we merge the shares from the two sets, leaving each shnum in
97            # its original location, but using a share from set1 or set2
98            # according to the following sequence:
99            #
100            #  4-of-9  a  s2
101            #  4-of-9  b  s2
102            #  4-of-7  c   s3
103            #  4-of-9  d  s2
104            #  3-of-9  e s1
105            #  3-of-9  f s1
106            #  3-of-9  g s1
107            #  4-of-9  h  s2
108            #
109            # so that neither form can be recovered until fetch [f], at which
110            # point version-s1 (the 3-of-10 form) should be recoverable. If
111            # the implementation latches on to the first version it sees,
112            # then s2 will be recoverable at fetch [g].
113
114            # Later, when we implement code that handles multiple versions,
115            # we can use this framework to assert that all recoverable
116            # versions are retrieved, and test that 'epsilon' does its job
117
118            places = [2, 2, 3, 2, 1, 1, 1, 2]
119
120            sharemap = {}
121            sb = self._storage_broker
122
123            for peerid in sorted(sb.get_all_serverids()):
124                for shnum in self._shares1.get(peerid, {}):
125                    if shnum < len(places):
126                        which = places[shnum]
127                    else:
128                        which = "x"
129                    self._storage._peers[peerid] = peers = {}
130                    in_1 = shnum in self._shares1[peerid]
131                    in_2 = shnum in self._shares2.get(peerid, {})
132                    in_3 = shnum in self._shares3.get(peerid, {})
133                    if which == 1:
134                        if in_1:
135                            peers[shnum] = self._shares1[peerid][shnum]
136                            sharemap[shnum] = peerid
137                    elif which == 2:
138                        if in_2:
139                            peers[shnum] = self._shares2[peerid][shnum]
140                            sharemap[shnum] = peerid
141                    elif which == 3:
142                        if in_3:
143                            peers[shnum] = self._shares3[peerid][shnum]
144                            sharemap[shnum] = peerid
145
146            # we don't bother placing any other shares
147            # now sort the sequence so that share 0 is returned first
148            new_sequence = [sharemap[shnum]
149                            for shnum in sorted(sharemap.keys())]
150            self._storage._sequence = new_sequence
151            log.msg("merge done")
152        d.addCallback(_merge)
153        d.addCallback(lambda res: fn3.download_best_version())
154        def _retrieved(new_contents):
155            # the current specified behavior is "first version recoverable"
156            self.assertThat(new_contents, Equals(contents1))
157        d.addCallback(_retrieved)
158        return d
Note: See TracBrowser for help on using the repository browser.