source: trunk/integration/test_web.py

Last change on this file was a30a7cb, checked in by Christopher R. Wood <chris@…>, at 2024-05-30T19:48:43Z

Factor out inline test keys into "data" directory

  • Property mode set to 100644
File size: 25.4 KB
Line 
1"""
2These tests were originally written to achieve some level of
3coverage for the WebAPI functionality during Python3 porting (there
4aren't many tests of the Web API period).
5
6Most of the tests have cursory asserts and encode 'what the WebAPI did
7at the time of testing' -- not necessarily a cohesive idea of what the
8WebAPI *should* do in every situation. It's not clear the latter
9exists anywhere, however.
10"""
11
12from __future__ import annotations
13
14import time
15from base64 import urlsafe_b64encode
16from urllib.parse import unquote as url_unquote, quote as url_quote
17
18from cryptography.hazmat.primitives.serialization import load_pem_private_key
19from twisted.internet.threads import deferToThread
20from twisted.python.filepath import FilePath
21
22import allmydata.uri
23from allmydata.crypto.rsa import (
24    create_signing_keypair,
25    der_string_from_signing_key,
26    PrivateKey,
27    PublicKey,
28)
29from allmydata.mutable.common import derive_mutable_keys
30from allmydata.util import jsonbytes as json
31
32from . import util
33from .util import run_in_thread
34
35import requests
36import html5lib
37from bs4 import BeautifulSoup
38
39import pytest_twisted
40
41
42DATA_PATH = FilePath(__file__).parent().sibling("src").child("allmydata").child("test").child("data")
43
44
45@run_in_thread
46def test_index(alice):
47    """
48    we can download the index file
49    """
50    util.web_get(alice.process, u"")
51
52
53@run_in_thread
54def test_index_json(alice):
55    """
56    we can download the index file as json
57    """
58    data = util.web_get(alice.process, u"", params={u"t": u"json"})
59    # it should be valid json
60    json.loads(data)
61
62
63@run_in_thread
64def test_upload_download(alice):
65    """
66    upload a file, then download it via readcap
67    """
68
69    FILE_CONTENTS = u"some contents"
70
71    readcap = util.web_post(
72        alice.process, u"uri",
73        data={
74            u"t": u"upload",
75            u"format": u"mdmf",
76        },
77        files={
78            u"file": FILE_CONTENTS,
79        },
80    )
81    readcap = readcap.strip()
82
83    data = util.web_get(
84        alice.process, u"uri",
85        params={
86            u"uri": readcap,
87            u"filename": u"boom",
88        }
89    )
90    assert str(data, "utf-8") == FILE_CONTENTS
91
92
93@run_in_thread
94def test_put(alice):
95    """
96    use PUT to create a file
97    """
98
99    FILE_CONTENTS = b"added via PUT" * 20
100
101    resp = requests.put(
102        util.node_url(alice.process.node_dir, u"uri"),
103        data=FILE_CONTENTS,
104    )
105    cap = allmydata.uri.from_string(resp.text.strip().encode('ascii'))
106    cfg = alice.process.get_config()
107    assert isinstance(cap, allmydata.uri.CHKFileURI)
108    assert cap.size == len(FILE_CONTENTS)
109    assert cap.total_shares == int(cfg.get_config("client", "shares.total"))
110    assert cap.needed_shares == int(cfg.get_config("client", "shares.needed"))
111
112
113@run_in_thread
114def test_helper_status(storage_nodes):
115    """
116    successfully GET the /helper_status page
117    """
118
119    url = util.node_url(storage_nodes[0].process.node_dir, "helper_status")
120    resp = requests.get(url)
121    assert resp.status_code >= 200 and resp.status_code < 300
122    dom = BeautifulSoup(resp.content, "html5lib")
123    assert str(dom.h1.string) == u"Helper Status"
124
125
126@run_in_thread
127def test_deep_stats(alice):
128    """
129    create a directory, do deep-stats on it and prove the /operations/
130    URIs work
131    """
132    resp = requests.post(
133        util.node_url(alice.process.node_dir, "uri"),
134        params={
135            "format": "sdmf",
136            "t": "mkdir",
137            "redirect_to_result": "true",
138        },
139    )
140    assert resp.status_code >= 200 and resp.status_code < 300
141
142    # when creating a directory, we'll be re-directed to a URL
143    # containing our writecap..
144    uri = url_unquote(resp.url)
145    assert 'URI:DIR2:' in uri
146    dircap = uri[uri.find("URI:DIR2:"):].rstrip('/')
147    dircap_uri = util.node_url(alice.process.node_dir, "uri/{}".format(url_quote(dircap)))
148
149    # POST a file into this directory
150    FILE_CONTENTS = u"a file in a directory"
151
152    resp = requests.post(
153        dircap_uri,
154        data={
155            u"t": u"upload",
156        },
157        files={
158            u"file": FILE_CONTENTS,
159        },
160    )
161    resp.raise_for_status()
162
163    # confirm the file is in the directory
164    resp = requests.get(
165        dircap_uri,
166        params={
167            u"t": u"json",
168        },
169    )
170    d = json.loads(resp.content)
171    k, data = d
172    assert k == u"dirnode"
173    assert len(data['children']) == 1
174    k, child = list(data['children'].values())[0]
175    assert k == u"filenode"
176    assert child['size'] == len(FILE_CONTENTS)
177
178    # perform deep-stats on it...
179    resp = requests.post(
180        dircap_uri,
181        data={
182            u"t": u"start-deep-stats",
183            u"ophandle": u"something_random",
184        },
185    )
186    assert resp.status_code >= 200 and resp.status_code < 300
187
188    # confirm we get information from the op .. after its done
189    tries = 10
190    while tries > 0:
191        tries -= 1
192        resp = requests.get(
193            util.node_url(alice.process.node_dir, u"operations/something_random"),
194        )
195        d = json.loads(resp.content)
196        if d['size-literal-files'] == len(FILE_CONTENTS):
197            print("stats completed successfully")
198            break
199        else:
200            print("{} != {}; waiting".format(d['size-literal-files'], len(FILE_CONTENTS)))
201        time.sleep(.5)
202
203
204@run_in_thread
205def test_status(alice):
206    """
207    confirm we get something sensible from /status and the various sub-types
208    """
209
210    # upload a file
211    # (because of the nature of the integration-tests, we can only
212    # assert things about "our" file because we don't know what other
213    # operations may have happened in the grid before our test runs).
214
215    FILE_CONTENTS = u"all the Important Data of alice\n" * 1200
216
217    resp = requests.put(
218        util.node_url(alice.process.node_dir, u"uri"),
219        data=FILE_CONTENTS,
220    )
221    cap = resp.text.strip()
222
223    print("Uploaded data, cap={}".format(cap))
224    resp = requests.get(
225        util.node_url(alice.process.node_dir, u"uri/{}".format(url_quote(cap))),
226    )
227
228    print("Downloaded {} bytes of data".format(len(resp.content)))
229    assert str(resp.content, "ascii") == FILE_CONTENTS
230
231    resp = requests.get(
232        util.node_url(alice.process.node_dir, "status"),
233    )
234    dom = html5lib.parse(resp.content)
235
236    hrefs = [
237        a.get('href')
238        for a in dom.iter(u'{http://www.w3.org/1999/xhtml}a')
239    ]
240
241    found_upload = False
242    found_download = False
243    for href in hrefs:
244        if href == u"/" or not href:
245            continue
246        resp = requests.get(util.node_url(alice.process.node_dir, href))
247        if href.startswith(u"/status/up"):
248            assert b"File Upload Status" in resp.content
249            if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content:
250                found_upload = True
251        elif href.startswith(u"/status/down"):
252            assert b"File Download Status" in resp.content
253            if b"Total Size: %d" % (len(FILE_CONTENTS),) in resp.content:
254                found_download = True
255
256                # download the specialized event information
257                resp = requests.get(
258                    util.node_url(alice.process.node_dir, u"{}/event_json".format(href)),
259                )
260                js = json.loads(resp.content)
261                # there's usually just one "read" operation, but this can handle many ..
262                total_bytes = sum([st['bytes_returned'] for st in js['read']], 0)
263                assert total_bytes == len(FILE_CONTENTS)
264
265
266    assert found_upload, "Failed to find the file we uploaded in the status-page"
267    assert found_download, "Failed to find the file we downloaded in the status-page"
268
269
270@pytest_twisted.ensureDeferred
271async def test_directory_deep_check(reactor, request, alice):
272    """
273    use deep-check and confirm the result pages work
274    """
275    # Make sure the node is configured compatibly with expectations of this
276    # test.
277    happy = 3
278    required = 2
279    total = 4
280
281    await alice.reconfigure_zfec(reactor, (happy, required, total), convergence=None)
282    await deferToThread(_test_directory_deep_check_blocking, alice)
283
284
285def _test_directory_deep_check_blocking(alice):
286    # create a directory
287    resp = requests.post(
288        util.node_url(alice.process.node_dir, u"uri"),
289        params={
290            u"t": u"mkdir",
291            u"redirect_to_result": u"true",
292        }
293    )
294
295    # get json information about our directory
296    dircap_url = resp.url
297    resp = requests.get(
298        dircap_url,
299        params={u"t": u"json"},
300    )
301    # Just verify it is valid JSON.
302    json.loads(resp.content)
303
304    # upload a file of pangrams into the directory
305    FILE_CONTENTS = u"Sphinx of black quartz, judge my vow.\n" * (2048*10)
306
307    resp = requests.post(
308        dircap_url,
309        params={
310            u"t": u"upload",
311            u"upload-chk": u"upload-chk",
312        },
313        files={
314            u"file": FILE_CONTENTS,
315        }
316    )
317    cap0 = resp.content
318    print("Uploaded data0, cap={}".format(cap0))
319
320    # a different pangram
321    FILE_CONTENTS = u"The five boxing wizards jump quickly.\n" * (2048*10)
322
323    resp = requests.post(
324        dircap_url,
325        params={
326            u"t": u"upload",
327            u"upload-chk": u"upload-chk",
328        },
329        files={
330            u"file": FILE_CONTENTS,
331        }
332    )
333    cap1 = resp.content
334    print("Uploaded data1, cap={}".format(cap1))
335
336    resp = requests.get(
337        util.node_url(alice.process.node_dir, u"uri/{}".format(url_quote(cap0))),
338        params={u"t": u"info"},
339    )
340
341    def check_repair_data(checkdata):
342        assert checkdata["healthy"]
343        assert checkdata["count-happiness"] == 4
344        assert checkdata["count-good-share-hosts"] == 4
345        assert checkdata["count-shares-good"] == 4
346        assert checkdata["count-corrupt-shares"] == 0
347        assert checkdata["list-corrupt-shares"] == []
348
349    # do a "check" (once for HTML, then with JSON for easier asserts)
350    resp = requests.post(
351        dircap_url,
352        params={
353            u"t": u"check",
354            u"return_to": u".",
355            u"verify": u"true",
356        }
357    )
358    resp = requests.post(
359        dircap_url,
360        params={
361            u"t": u"check",
362            u"return_to": u".",
363            u"verify": u"true",
364            u"output": u"JSON",
365        }
366    )
367    check_repair_data(json.loads(resp.content)["results"])
368
369    # "check and repair"
370    resp = requests.post(
371        dircap_url,
372        params={
373            u"t": u"check",
374            u"return_to": u".",
375            u"verify": u"true",
376            u"repair": u"true",
377        }
378    )
379    resp = requests.post(
380        dircap_url,
381        params={
382            u"t": u"check",
383            u"return_to": u".",
384            u"verify": u"true",
385            u"repair": u"true",
386            u"output": u"JSON",
387        }
388    )
389    check_repair_data(json.loads(resp.content)["post-repair-results"]["results"])
390
391    # start a "deep check and repair"
392    resp = requests.post(
393        dircap_url,
394        params={
395            u"t": u"start-deep-check",
396            u"return_to": u".",
397            u"verify": u"on",
398            u"repair": u"on",
399            u"output": u"JSON",
400            u"ophandle": u"deadbeef",
401        }
402    )
403    deepcheck_uri = resp.url
404
405    data = json.loads(resp.content)
406    tries = 10
407    while not data['finished'] and tries > 0:
408        tries -= 1
409        time.sleep(0.5)
410        print("deep-check not finished, reloading")
411        resp = requests.get(deepcheck_uri, params={u"output": "JSON"})
412        data = json.loads(resp.content)
413    print("deep-check finished")
414    assert data[u"stats"][u"count-immutable-files"] == 1
415    assert data[u"stats"][u"count-literal-files"] == 0
416    assert data[u"stats"][u"largest-immutable-file"] == 778240
417    assert data[u"count-objects-checked"] == 2
418
419    # also get the HTML version
420    resp = requests.post(
421        dircap_url,
422        params={
423            u"t": u"start-deep-check",
424            u"return_to": u".",
425            u"verify": u"on",
426            u"repair": u"on",
427            u"ophandle": u"definitely_random",
428        }
429    )
430    deepcheck_uri = resp.url
431
432    # if the operations isn't done, there's an <H2> tag with the
433    # reload link; otherwise there's only an <H1> tag..wait up to 5
434    # seconds for this to respond properly.
435    for _ in range(5):
436        resp = requests.get(deepcheck_uri)
437        dom = BeautifulSoup(resp.content, "html5lib")
438        if dom.h1 and u'Results' in str(dom.h1.string):
439            break
440        if dom.h2 and dom.h2.a and u"Reload" in str(dom.h2.a.string):
441            dom = None
442            time.sleep(1)
443    assert dom is not None, "Operation never completed"
444
445
446@run_in_thread
447def test_storage_info(storage_nodes):
448    """
449    retrieve and confirm /storage URI for one storage node
450    """
451    storage0 = storage_nodes[0]
452
453    requests.get(
454        util.node_url(storage0.process.node_dir, u"storage"),
455    )
456
457
458@run_in_thread
459def test_storage_info_json(storage_nodes):
460    """
461    retrieve and confirm /storage?t=json URI for one storage node
462    """
463    storage0 = storage_nodes[0]
464
465    resp = requests.get(
466        util.node_url(storage0.process.node_dir, u"storage"),
467        params={u"t": u"json"},
468    )
469    data = json.loads(resp.content)
470    assert data[u"stats"][u"storage_server.reserved_space"] == 1000000000
471
472
473@run_in_thread
474def test_introducer_info(introducer):
475    """
476    retrieve and confirm /introducer URI for the introducer
477    """
478    resp = requests.get(
479        util.node_url(introducer.process.node_dir, u""),
480    )
481    assert b"Introducer" in resp.content
482
483    resp = requests.get(
484        util.node_url(introducer.process.node_dir, u""),
485        params={u"t": u"json"},
486    )
487    data = json.loads(resp.content)
488    assert "announcement_summary" in data
489    assert "subscription_summary" in data
490
491
492@run_in_thread
493def test_mkdir_with_children(alice):
494    """
495    create a directory using ?t=mkdir-with-children
496    """
497
498    # create a file to put in our directory
499    FILE_CONTENTS = u"some file contents\n" * 500
500    resp = requests.put(
501        util.node_url(alice.process.node_dir, u"uri"),
502        data=FILE_CONTENTS,
503    )
504    filecap = resp.content.strip()
505
506    # create a (sub) directory to put in our directory
507    resp = requests.post(
508        util.node_url(alice.process.node_dir, u"uri"),
509        params={
510            u"t": u"mkdir",
511        }
512    )
513    # (we need both the read-write and read-only URIs I guess)
514    dircap = resp.content
515    dircap_obj = allmydata.uri.from_string(dircap)
516    dircap_ro = dircap_obj.get_readonly().to_string()
517
518    # create json information about our directory
519    meta = {
520        "a_file": [
521            "filenode", {
522                "ro_uri": filecap,
523                "metadata": {
524                    "ctime": 1202777696.7564139,
525                    "mtime": 1202777696.7564139,
526                    "tahoe": {
527                        "linkcrtime": 1202777696.7564139,
528                        "linkmotime": 1202777696.7564139
529                    }
530                }
531            }
532        ],
533        "some_subdir": [
534            "dirnode", {
535                "rw_uri": dircap,
536                "ro_uri": dircap_ro,
537                "metadata": {
538                    "ctime": 1202778102.7589991,
539                    "mtime": 1202778111.2160511,
540                    "tahoe": {
541                        "linkcrtime": 1202777696.7564139,
542                        "linkmotime": 1202777696.7564139
543                    }
544                }
545            }
546        ]
547    }
548
549    # create a new directory with one file and one sub-dir (all-at-once)
550    resp = util.web_post(
551        alice.process, u"uri",
552        params={u"t": "mkdir-with-children"},
553        data=json.dumps(meta),
554    )
555    assert resp.startswith(b"URI:DIR2")
556    cap = allmydata.uri.from_string(resp)
557    assert isinstance(cap, allmydata.uri.DirectoryURI)
558
559
560@run_in_thread
561def test_mkdir_with_random_private_key(alice):
562    """
563    Create a new directory with ?t=mkdir&private-key=... using a
564    randomly-generated RSA private key.
565
566    The writekey and fingerprint derived from the provided RSA key
567    should match those of the newly-created directory capability.
568    """
569
570    privkey, pubkey = create_signing_keypair(2048)
571
572    writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey))
573
574    # The "private-key" parameter takes a DER-encoded RSA private key
575    # encoded in URL-safe base64; PEM blocks are not supported.
576    privkey_der = der_string_from_signing_key(privkey)
577    privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii")
578
579    resp = util.web_post(
580        alice.process, u"uri",
581        params={
582            u"t": "mkdir",
583            u"private-key": privkey_encoded,
584        },
585    )
586    assert resp.startswith(b"URI:DIR2")
587
588    dircap = allmydata.uri.from_string(resp)
589    assert isinstance(dircap, allmydata.uri.DirectoryURI)
590
591    # DirectoryURI objects lack 'writekey' and 'fingerprint' attributes
592    # so extract them from the enclosed WriteableSSKFileURI object.
593    filecap = dircap.get_filenode_cap()
594    assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI)
595
596    assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint)
597
598
599@run_in_thread
600def test_mkdir_with_known_private_key(alice):
601    """
602    Create a new directory with ?t=mkdir&private-key=... using a
603    known-in-advance RSA private key.
604
605    The writekey and fingerprint derived from the provided RSA key
606    should match those of the newly-created directory capability.
607    In addition, because the writekey and fingerprint are derived
608    deterministically, given the same RSA private key, the resultant
609    directory capability should always be the same.
610    """
611    # Generated with `openssl genrsa -out openssl-rsa-2048-3.txt 2048`
612    pempath = DATA_PATH.child("openssl-rsa-2048-3.txt")
613    privkey = load_pem_private_key(pempath.getContent(), password=None)
614    assert isinstance(privkey, PrivateKey)
615    pubkey = privkey.public_key()
616    assert isinstance(pubkey, PublicKey)
617
618    writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey))
619
620    # The "private-key" parameter takes a DER-encoded RSA private key
621    # encoded in URL-safe base64; PEM blocks are not supported.
622    privkey_der = der_string_from_signing_key(privkey)
623    privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii")
624
625    resp = util.web_post(
626        alice.process, u"uri",
627        params={
628            u"t": "mkdir",
629            u"private-key": privkey_encoded,
630        },
631    )
632    assert resp.startswith(b"URI:DIR2")
633
634    dircap = allmydata.uri.from_string(resp)
635    assert isinstance(dircap, allmydata.uri.DirectoryURI)
636
637    # DirectoryURI objects lack 'writekey' and 'fingerprint' attributes
638    # so extract them from the enclosed WriteableSSKFileURI object.
639    filecap = dircap.get_filenode_cap()
640    assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI)
641
642    assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint)
643
644    assert resp == b"URI:DIR2:3oo7j7f7qqxnet2z2lf57ucup4:cpktmsxlqnd5yeekytxjxvff5e6d6fv7py6rftugcndvss7tzd2a"
645
646
647@run_in_thread
648def test_mkdir_with_children_and_random_private_key(alice):
649    """
650    Create a new directory with ?t=mkdir-with-children&private-key=...
651    using a randomly-generated RSA private key.
652
653    The writekey and fingerprint derived from the provided RSA key
654    should match those of the newly-created directory capability.
655    """
656
657    # create a file to put in our directory
658    FILE_CONTENTS = u"some file contents\n" * 500
659    resp = requests.put(
660        util.node_url(alice.process.node_dir, u"uri"),
661        data=FILE_CONTENTS,
662    )
663    filecap = resp.content.strip()
664
665    # create a (sub) directory to put in our directory
666    resp = requests.post(
667        util.node_url(alice.process.node_dir, u"uri"),
668        params={
669            u"t": u"mkdir",
670        }
671    )
672    # (we need both the read-write and read-only URIs I guess)
673    dircap = resp.content
674    dircap_obj = allmydata.uri.from_string(dircap)
675    dircap_ro = dircap_obj.get_readonly().to_string()
676
677    # create json information about our directory
678    meta = {
679        "a_file": [
680            "filenode", {
681                "ro_uri": filecap,
682                "metadata": {
683                    "ctime": 1202777696.7564139,
684                    "mtime": 1202777696.7564139,
685                    "tahoe": {
686                        "linkcrtime": 1202777696.7564139,
687                        "linkmotime": 1202777696.7564139
688                    }
689                }
690            }
691        ],
692        "some_subdir": [
693            "dirnode", {
694                "rw_uri": dircap,
695                "ro_uri": dircap_ro,
696                "metadata": {
697                    "ctime": 1202778102.7589991,
698                    "mtime": 1202778111.2160511,
699                    "tahoe": {
700                        "linkcrtime": 1202777696.7564139,
701                        "linkmotime": 1202777696.7564139
702                    }
703                }
704            }
705        ]
706    }
707
708    privkey, pubkey = create_signing_keypair(2048)
709
710    writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey))
711
712    # The "private-key" parameter takes a DER-encoded RSA private key
713    # encoded in URL-safe base64; PEM blocks are not supported.
714    privkey_der = der_string_from_signing_key(privkey)
715    privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii")
716
717    # create a new directory with one file and one sub-dir (all-at-once)
718    # with the supplied RSA private key
719    resp = util.web_post(
720        alice.process, u"uri",
721        params={
722            u"t": "mkdir-with-children",
723            u"private-key": privkey_encoded,
724        },
725        data=json.dumps(meta),
726    )
727    assert resp.startswith(b"URI:DIR2")
728
729    dircap = allmydata.uri.from_string(resp)
730    assert isinstance(dircap, allmydata.uri.DirectoryURI)
731
732    # DirectoryURI objects lack 'writekey' and 'fingerprint' attributes
733    # so extract them from the enclosed WriteableSSKFileURI object.
734    filecap = dircap.get_filenode_cap()
735    assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI)
736
737    assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint)
738
739
740@run_in_thread
741def test_mkdir_with_children_and_known_private_key(alice):
742    """
743    Create a new directory with ?t=mkdir-with-children&private-key=...
744    using a known-in-advance RSA private key.
745
746
747    The writekey and fingerprint derived from the provided RSA key
748    should match those of the newly-created directory capability.
749    In addition, because the writekey and fingerprint are derived
750    deterministically, given the same RSA private key, the resultant
751    directory capability should always be the same.
752    """
753
754    # create a file to put in our directory
755    FILE_CONTENTS = u"some file contents\n" * 500
756    resp = requests.put(
757        util.node_url(alice.process.node_dir, u"uri"),
758        data=FILE_CONTENTS,
759    )
760    filecap = resp.content.strip()
761
762    # create a (sub) directory to put in our directory
763    resp = requests.post(
764        util.node_url(alice.process.node_dir, u"uri"),
765        params={
766            u"t": u"mkdir",
767        }
768    )
769    # (we need both the read-write and read-only URIs I guess)
770    dircap = resp.content
771    dircap_obj = allmydata.uri.from_string(dircap)
772    dircap_ro = dircap_obj.get_readonly().to_string()
773
774    # create json information about our directory
775    meta = {
776        "a_file": [
777            "filenode", {
778                "ro_uri": filecap,
779                "metadata": {
780                    "ctime": 1202777696.7564139,
781                    "mtime": 1202777696.7564139,
782                    "tahoe": {
783                        "linkcrtime": 1202777696.7564139,
784                        "linkmotime": 1202777696.7564139
785                    }
786                }
787            }
788        ],
789        "some_subdir": [
790            "dirnode", {
791                "rw_uri": dircap,
792                "ro_uri": dircap_ro,
793                "metadata": {
794                    "ctime": 1202778102.7589991,
795                    "mtime": 1202778111.2160511,
796                    "tahoe": {
797                        "linkcrtime": 1202777696.7564139,
798                        "linkmotime": 1202777696.7564139
799                    }
800                }
801            }
802        ]
803    }
804
805    # Generated with `openssl genrsa -out openssl-rsa-2048-4.txt 2048`
806    pempath = DATA_PATH.child("openssl-rsa-2048-4.txt")
807    privkey = load_pem_private_key(pempath.getContent(), password=None)
808    assert isinstance(privkey, PrivateKey)
809    pubkey = privkey.public_key()
810    assert isinstance(pubkey, PublicKey)
811
812    writekey, _, fingerprint = derive_mutable_keys((pubkey, privkey))
813
814    # The "private-key" parameter takes a DER-encoded RSA private key
815    # encoded in URL-safe base64; PEM blocks are not supported.
816    privkey_der = der_string_from_signing_key(privkey)
817    privkey_encoded = urlsafe_b64encode(privkey_der).decode("ascii")
818
819    # create a new directory with one file and one sub-dir (all-at-once)
820    # with the supplied RSA private key
821    resp = util.web_post(
822        alice.process, u"uri",
823        params={
824            u"t": "mkdir-with-children",
825            u"private-key": privkey_encoded,
826        },
827        data=json.dumps(meta),
828    )
829    assert resp.startswith(b"URI:DIR2")
830
831    dircap = allmydata.uri.from_string(resp)
832    assert isinstance(dircap, allmydata.uri.DirectoryURI)
833
834    # DirectoryURI objects lack 'writekey' and 'fingerprint' attributes
835    # so extract them from the enclosed WriteableSSKFileURI object.
836    filecap = dircap.get_filenode_cap()
837    assert isinstance(filecap, allmydata.uri.WriteableSSKFileURI)
838
839    assert (writekey, fingerprint) == (filecap.writekey, filecap.fingerprint)
840
841    assert resp == b"URI:DIR2:ppwzpwrd37xi7tpribxyaa25uy:imdws47wwpzfkc5vfllo4ugspb36iit4cqps6ttuhaouc66jb2da"
Note: See TracBrowser for help on using the repository browser.