1 | """ |
---|
2 | Ported to Python 3. |
---|
3 | """ |
---|
4 | |
---|
5 | import itertools |
---|
6 | import hashlib |
---|
7 | import re |
---|
8 | from twisted.internet import defer |
---|
9 | from twisted.python.filepath import FilePath |
---|
10 | from twisted.web.resource import Resource |
---|
11 | from twisted.web.template import ( |
---|
12 | Element, |
---|
13 | XMLFile, |
---|
14 | renderer, |
---|
15 | renderElement, |
---|
16 | tags, |
---|
17 | ) |
---|
18 | from allmydata.util import base32, idlib, jsonbytes as json |
---|
19 | from allmydata.web.common import ( |
---|
20 | abbreviate_time, |
---|
21 | abbreviate_rate, |
---|
22 | abbreviate_size, |
---|
23 | exception_to_child, |
---|
24 | plural, |
---|
25 | compute_rate, |
---|
26 | render_exception, |
---|
27 | render_time, |
---|
28 | MultiFormatResource, |
---|
29 | SlotsSequenceElement, |
---|
30 | WebError, |
---|
31 | ) |
---|
32 | |
---|
33 | from allmydata.interfaces import ( |
---|
34 | IUploadStatus, |
---|
35 | IDownloadStatus, |
---|
36 | IPublishStatus, |
---|
37 | IRetrieveStatus, |
---|
38 | IServermapUpdaterStatus, |
---|
39 | ) |
---|
40 | |
---|
41 | |
---|
42 | class UploadResultsRendererMixin(Element): |
---|
43 | # this requires a method named 'upload_results' |
---|
44 | |
---|
45 | @renderer |
---|
46 | def pushed_shares(self, req, tag): |
---|
47 | d = self.upload_results() |
---|
48 | d.addCallback(lambda res: str(res.get_pushed_shares())) |
---|
49 | return d |
---|
50 | |
---|
51 | @renderer |
---|
52 | def preexisting_shares(self, req, tag): |
---|
53 | d = self.upload_results() |
---|
54 | d.addCallback(lambda res: str(res.get_preexisting_shares())) |
---|
55 | return d |
---|
56 | |
---|
57 | @renderer |
---|
58 | def sharemap(self, req, tag): |
---|
59 | d = self.upload_results() |
---|
60 | d.addCallback(lambda res: res.get_sharemap()) |
---|
61 | def _render(sharemap): |
---|
62 | if sharemap is None: |
---|
63 | return "None" |
---|
64 | ul = tags.ul() |
---|
65 | for shnum, servers in sorted(sharemap.items()): |
---|
66 | server_names = ', '.join([str(s.get_name(), "utf-8") for s in servers]) |
---|
67 | ul(tags.li("%d -> placed on [%s]" % (shnum, server_names))) |
---|
68 | return ul |
---|
69 | d.addCallback(_render) |
---|
70 | return d |
---|
71 | |
---|
72 | @renderer |
---|
73 | def servermap(self, req, tag): |
---|
74 | d = self.upload_results() |
---|
75 | d.addCallback(lambda res: res.get_servermap()) |
---|
76 | def _render(servermap): |
---|
77 | if servermap is None: |
---|
78 | return "None" |
---|
79 | ul = tags.ul() |
---|
80 | for server, shnums in sorted(servermap.items(), key=id): |
---|
81 | shares_s = ",".join(["#%d" % shnum for shnum in shnums]) |
---|
82 | ul(tags.li("[%s] got share%s: %s" % (str(server.get_name(), "utf-8"), |
---|
83 | plural(shnums), shares_s))) |
---|
84 | return ul |
---|
85 | d.addCallback(_render) |
---|
86 | return d |
---|
87 | |
---|
88 | @renderer |
---|
89 | def file_size(self, req, tag): |
---|
90 | d = self.upload_results() |
---|
91 | d.addCallback(lambda res: str(res.get_file_size())) |
---|
92 | return d |
---|
93 | |
---|
94 | def _get_time(self, name): |
---|
95 | d = self.upload_results() |
---|
96 | d.addCallback(lambda res: abbreviate_time(res.get_timings().get(name))) |
---|
97 | return d |
---|
98 | |
---|
99 | @renderer |
---|
100 | def time_total(self, req, tag): |
---|
101 | return tag(self._get_time("total")) |
---|
102 | |
---|
103 | @renderer |
---|
104 | def time_storage_index(self, req, tag): |
---|
105 | return tag(self._get_time("storage_index")) |
---|
106 | |
---|
107 | @renderer |
---|
108 | def time_contacting_helper(self, req, tag): |
---|
109 | return tag(self._get_time("contacting_helper")) |
---|
110 | |
---|
111 | @renderer |
---|
112 | def time_cumulative_fetch(self, req, tag): |
---|
113 | return tag(self._get_time("cumulative_fetch")) |
---|
114 | |
---|
115 | @renderer |
---|
116 | def time_helper_total(self, req, tag): |
---|
117 | return tag(self._get_time("helper_total")) |
---|
118 | |
---|
119 | @renderer |
---|
120 | def time_peer_selection(self, req, tag): |
---|
121 | return tag(self._get_time("peer_selection")) |
---|
122 | |
---|
123 | @renderer |
---|
124 | def time_total_encode_and_push(self, req, tag): |
---|
125 | return tag(self._get_time("total_encode_and_push")) |
---|
126 | |
---|
127 | @renderer |
---|
128 | def time_cumulative_encoding(self, req, tag): |
---|
129 | return tag(self._get_time("cumulative_encoding")) |
---|
130 | |
---|
131 | @renderer |
---|
132 | def time_cumulative_sending(self, req, tag): |
---|
133 | return tag(self._get_time("cumulative_sending")) |
---|
134 | |
---|
135 | @renderer |
---|
136 | def time_hashes_and_close(self, req, tag): |
---|
137 | return tag(self._get_time("hashes_and_close")) |
---|
138 | |
---|
139 | def _get_rate(self, name): |
---|
140 | d = self.upload_results() |
---|
141 | def _convert(r): |
---|
142 | file_size = r.get_file_size() |
---|
143 | duration = r.get_timings().get(name) |
---|
144 | return abbreviate_rate(compute_rate(file_size, duration)) |
---|
145 | d.addCallback(_convert) |
---|
146 | return d |
---|
147 | |
---|
148 | @renderer |
---|
149 | def rate_total(self, req, tag): |
---|
150 | return tag(self._get_rate("total")) |
---|
151 | |
---|
152 | @renderer |
---|
153 | def rate_storage_index(self, req, tag): |
---|
154 | return tag(self._get_rate("storage_index")) |
---|
155 | |
---|
156 | @renderer |
---|
157 | def rate_encode(self, req, tag): |
---|
158 | return tag(self._get_rate("cumulative_encoding")) |
---|
159 | |
---|
160 | @renderer |
---|
161 | def rate_push(self, req, tag): |
---|
162 | return self._get_rate("cumulative_sending") |
---|
163 | |
---|
164 | @renderer |
---|
165 | def rate_encode_and_push(self, req, tag): |
---|
166 | d = self.upload_results() |
---|
167 | def _convert(r): |
---|
168 | file_size = r.get_file_size() |
---|
169 | time1 = r.get_timings().get("cumulative_encoding") |
---|
170 | time2 = r.get_timings().get("cumulative_sending") |
---|
171 | if (time1 is None or time2 is None): |
---|
172 | return abbreviate_rate(None) |
---|
173 | else: |
---|
174 | return abbreviate_rate(compute_rate(file_size, time1+time2)) |
---|
175 | d.addCallback(_convert) |
---|
176 | return d |
---|
177 | |
---|
178 | @renderer |
---|
179 | def rate_ciphertext_fetch(self, req, tag): |
---|
180 | d = self.upload_results() |
---|
181 | def _convert(r): |
---|
182 | fetch_size = r.get_ciphertext_fetched() |
---|
183 | duration = r.get_timings().get("cumulative_fetch") |
---|
184 | return abbreviate_rate(compute_rate(fetch_size, duration)) |
---|
185 | d.addCallback(_convert) |
---|
186 | return d |
---|
187 | |
---|
188 | |
---|
189 | class UploadStatusPage(Resource, object): |
---|
190 | """Renders /status/up-%d.""" |
---|
191 | |
---|
192 | def __init__(self, upload_status): |
---|
193 | """ |
---|
194 | :param IUploadStatus upload_status: stats provider. |
---|
195 | """ |
---|
196 | super(UploadStatusPage, self).__init__() |
---|
197 | self._upload_status = upload_status |
---|
198 | |
---|
199 | @render_exception |
---|
200 | def render_GET(self, req): |
---|
201 | elem = UploadStatusElement(self._upload_status) |
---|
202 | return renderElement(req, elem) |
---|
203 | |
---|
204 | |
---|
205 | class UploadStatusElement(UploadResultsRendererMixin): |
---|
206 | |
---|
207 | loader = XMLFile(FilePath(__file__).sibling("upload-status.xhtml")) |
---|
208 | |
---|
209 | def __init__(self, upload_status): |
---|
210 | super(UploadStatusElement, self).__init__() |
---|
211 | self._upload_status = upload_status |
---|
212 | |
---|
213 | def upload_results(self): |
---|
214 | return defer.maybeDeferred(self._upload_status.get_results) |
---|
215 | |
---|
216 | @renderer |
---|
217 | def results(self, req, tag): |
---|
218 | d = self.upload_results() |
---|
219 | def _got_results(results): |
---|
220 | if results: |
---|
221 | return tag |
---|
222 | return "" |
---|
223 | d.addCallback(_got_results) |
---|
224 | return d |
---|
225 | |
---|
226 | @renderer |
---|
227 | def started(self, req, tag): |
---|
228 | started_s = render_time(self._upload_status.get_started()) |
---|
229 | return tag(started_s) |
---|
230 | |
---|
231 | @renderer |
---|
232 | def si(self, req, tag): |
---|
233 | si_s = base32.b2a_or_none(self._upload_status.get_storage_index()) |
---|
234 | if si_s is None: |
---|
235 | si_s = "(None)" |
---|
236 | else: |
---|
237 | si_s = str(si_s, "utf-8") |
---|
238 | return tag(si_s) |
---|
239 | |
---|
240 | @renderer |
---|
241 | def helper(self, req, tag): |
---|
242 | return tag({True: "Yes", |
---|
243 | False: "No"}[self._upload_status.using_helper()]) |
---|
244 | |
---|
245 | @renderer |
---|
246 | def total_size(self, req, tag): |
---|
247 | size = self._upload_status.get_size() |
---|
248 | if size is None: |
---|
249 | return "(unknown)" |
---|
250 | return tag(str(size)) |
---|
251 | |
---|
252 | @renderer |
---|
253 | def progress_hash(self, req, tag): |
---|
254 | progress = self._upload_status.get_progress()[0] |
---|
255 | # TODO: make an ascii-art bar |
---|
256 | return tag("%.1f%%" % (100.0 * progress)) |
---|
257 | |
---|
258 | @renderer |
---|
259 | def progress_ciphertext(self, req, tag): |
---|
260 | progress = self._upload_status.get_progress()[1] |
---|
261 | # TODO: make an ascii-art bar |
---|
262 | return "%.1f%%" % (100.0 * progress) |
---|
263 | |
---|
264 | @renderer |
---|
265 | def progress_encode_push(self, req, tag): |
---|
266 | progress = self._upload_status.get_progress()[2] |
---|
267 | # TODO: make an ascii-art bar |
---|
268 | return tag("%.1f%%" % (100.0 * progress)) |
---|
269 | |
---|
270 | @renderer |
---|
271 | def status(self, req, tag): |
---|
272 | return tag(self._upload_status.get_status()) |
---|
273 | |
---|
274 | |
---|
275 | def _find_overlap(events, start_key, end_key): |
---|
276 | """ |
---|
277 | given a list of event dicts, return a new list in which each event |
---|
278 | has an extra "row" key (an int, starting at 0), and if appropriate |
---|
279 | a "serverid" key (ascii-encoded server id), replacing the "server" |
---|
280 | key. This is a hint to our JS frontend about how to overlap the |
---|
281 | parts of the graph it is drawing. |
---|
282 | |
---|
283 | we must always make a copy, since we're going to be adding keys |
---|
284 | and don't want to change the original objects. If we're |
---|
285 | stringifying serverids, we'll also be changing the serverid keys. |
---|
286 | """ |
---|
287 | new_events = [] |
---|
288 | rows = [] |
---|
289 | for ev in events: |
---|
290 | ev = ev.copy() |
---|
291 | if 'server' in ev: |
---|
292 | ev["serverid"] = ev["server"].get_longname() |
---|
293 | del ev["server"] |
---|
294 | # find an empty slot in the rows |
---|
295 | free_slot = None |
---|
296 | for row,finished in enumerate(rows): |
---|
297 | if finished is not None: |
---|
298 | if ev[start_key] > finished: |
---|
299 | free_slot = row |
---|
300 | break |
---|
301 | if free_slot is None: |
---|
302 | free_slot = len(rows) |
---|
303 | rows.append(ev[end_key]) |
---|
304 | else: |
---|
305 | rows[free_slot] = ev[end_key] |
---|
306 | ev["row"] = free_slot |
---|
307 | new_events.append(ev) |
---|
308 | return new_events |
---|
309 | |
---|
310 | def _find_overlap_requests(events): |
---|
311 | """ |
---|
312 | We compute a three-element 'row tuple' for each event: (serverid, |
---|
313 | shnum, row). All elements are ints. The first is a mapping from |
---|
314 | serverid to group number, the second is a mapping from shnum to |
---|
315 | subgroup number. The third is a row within the subgroup. |
---|
316 | |
---|
317 | We also return a list of lists of rowcounts, so renderers can decide |
---|
318 | how much vertical space to give to each row. |
---|
319 | """ |
---|
320 | |
---|
321 | serverid_to_group = {} |
---|
322 | groupnum_to_rows = {} # maps groupnum to a table of rows. Each table |
---|
323 | # is a list with an element for each row number |
---|
324 | # (int starting from 0) that contains a |
---|
325 | # finish_time, indicating that the row is empty |
---|
326 | # beyond that time. If finish_time is None, it |
---|
327 | # indicate a response that has not yet |
---|
328 | # completed, so the row cannot be reused. |
---|
329 | new_events = [] |
---|
330 | for ev in events: |
---|
331 | # DownloadStatus promises to give us events in temporal order |
---|
332 | ev = ev.copy() |
---|
333 | ev["serverid"] = ev["server"].get_longname() |
---|
334 | del ev["server"] |
---|
335 | if ev["serverid"] not in serverid_to_group: |
---|
336 | groupnum = len(serverid_to_group) |
---|
337 | serverid_to_group[ev["serverid"]] = groupnum |
---|
338 | groupnum = serverid_to_group[ev["serverid"]] |
---|
339 | if groupnum not in groupnum_to_rows: |
---|
340 | groupnum_to_rows[groupnum] = [] |
---|
341 | rows = groupnum_to_rows[groupnum] |
---|
342 | # find an empty slot in the rows |
---|
343 | free_slot = None |
---|
344 | for row,finished in enumerate(rows): |
---|
345 | if finished is not None: |
---|
346 | if ev["start_time"] > finished: |
---|
347 | free_slot = row |
---|
348 | break |
---|
349 | if free_slot is None: |
---|
350 | free_slot = len(rows) |
---|
351 | rows.append(ev["finish_time"]) |
---|
352 | else: |
---|
353 | rows[free_slot] = ev["finish_time"] |
---|
354 | ev["row"] = (groupnum, free_slot) |
---|
355 | new_events.append(ev) |
---|
356 | del groupnum |
---|
357 | # maybe also return serverid_to_group, groupnum_to_rows, and some |
---|
358 | # indication of the highest finish_time |
---|
359 | # |
---|
360 | # actually, return the highest rownum for each groupnum |
---|
361 | highest_rownums = [len(groupnum_to_rows[groupnum]) |
---|
362 | for groupnum in range(len(serverid_to_group))] |
---|
363 | return new_events, highest_rownums |
---|
364 | |
---|
365 | |
---|
366 | def _color(server): |
---|
367 | h = hashlib.sha256(server.get_serverid()).digest() |
---|
368 | def m(c): |
---|
369 | return min(ord(c) // 2 + 0x80, 0xff) |
---|
370 | return "#%02x%02x%02x" % (m(h[0:1]), m(h[1:2]), m(h[2:3])) |
---|
371 | |
---|
372 | class _EventJson(Resource, object): |
---|
373 | |
---|
374 | def __init__(self, download_status): |
---|
375 | self._download_status = download_status |
---|
376 | |
---|
377 | @render_exception |
---|
378 | def render(self, request): |
---|
379 | request.setHeader("content-type", "text/plain") |
---|
380 | data = { } # this will be returned to the GET |
---|
381 | ds = self._download_status |
---|
382 | |
---|
383 | data["misc"] = _find_overlap( |
---|
384 | ds.misc_events, |
---|
385 | "start_time", "finish_time", |
---|
386 | ) |
---|
387 | data["read"] = _find_overlap( |
---|
388 | ds.read_events, |
---|
389 | "start_time", "finish_time", |
---|
390 | ) |
---|
391 | data["segment"] = _find_overlap( |
---|
392 | ds.segment_events, |
---|
393 | "start_time", "finish_time", |
---|
394 | ) |
---|
395 | # TODO: overlap on DYHB isn't very useful, and usually gets in the |
---|
396 | # way. So don't do it. |
---|
397 | data["dyhb"] = _find_overlap( |
---|
398 | ds.dyhb_requests, |
---|
399 | "start_time", "finish_time", |
---|
400 | ) |
---|
401 | data["block"],data["block_rownums"] =_find_overlap_requests(ds.block_requests) |
---|
402 | |
---|
403 | server_info = {} # maps longname to {num,color,short} |
---|
404 | server_shortnames = {} # maps servernum to shortname |
---|
405 | for d_ev in ds.dyhb_requests: |
---|
406 | s = d_ev["server"] |
---|
407 | longname = s.get_longname() |
---|
408 | if longname not in server_info: |
---|
409 | num = len(server_info) |
---|
410 | server_info[longname] = {"num": num, |
---|
411 | "color": _color(s), |
---|
412 | "short": s.get_name() } |
---|
413 | server_shortnames[str(num)] = s.get_name() |
---|
414 | |
---|
415 | data["server_info"] = server_info |
---|
416 | data["num_serverids"] = len(server_info) |
---|
417 | # we'd prefer the keys of serverids[] to be ints, but this is JSON, |
---|
418 | # so they get converted to strings. Stupid javascript. |
---|
419 | data["serverids"] = server_shortnames |
---|
420 | data["bounds"] = {"min": ds.first_timestamp, "max": ds.last_timestamp} |
---|
421 | return json.dumps(data, indent=1) + "\n" |
---|
422 | |
---|
423 | |
---|
424 | class DownloadStatusPage(Resource, object): |
---|
425 | """Renders /status/down-%d.""" |
---|
426 | |
---|
427 | def __init__(self, download_status): |
---|
428 | """ |
---|
429 | :param IDownloadStatus download_status: stats provider |
---|
430 | """ |
---|
431 | super(DownloadStatusPage, self).__init__() |
---|
432 | self._download_status = download_status |
---|
433 | self.putChild(b"event_json", _EventJson(self._download_status)) |
---|
434 | |
---|
435 | @render_exception |
---|
436 | def render_GET(self, req): |
---|
437 | elem = DownloadStatusElement(self._download_status) |
---|
438 | return renderElement(req, elem) |
---|
439 | |
---|
440 | |
---|
441 | class DownloadStatusElement(Element): |
---|
442 | |
---|
443 | loader = XMLFile(FilePath(__file__).sibling("download-status.xhtml")) |
---|
444 | |
---|
445 | def __init__(self, download_status): |
---|
446 | super(DownloadStatusElement, self).__init__() |
---|
447 | self._download_status = download_status |
---|
448 | |
---|
449 | # XXX: fun fact: the `get_results()` method which we wind up |
---|
450 | # invoking here (see immutable.downloader.status.DownloadStatus) |
---|
451 | # is unimplemented, and simply returns a `None`. As a result, |
---|
452 | # `results()` renderer returns an empty tag, and does not invoke |
---|
453 | # any of the subsequent renderers. Thus we end up not displaying |
---|
454 | # download results on the download status page. |
---|
455 | # |
---|
456 | # See #3310: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3310 |
---|
457 | def download_results(self): |
---|
458 | return self._download_status.get_results() |
---|
459 | |
---|
460 | def _relative_time(self, t): |
---|
461 | if t is None: |
---|
462 | return t |
---|
463 | if self._download_status.first_timestamp is not None: |
---|
464 | return t - self._download_status.first_timestamp |
---|
465 | return t |
---|
466 | |
---|
467 | def _short_relative_time(self, t): |
---|
468 | t = self._relative_time(t) |
---|
469 | if t is None: |
---|
470 | return "" |
---|
471 | return "+%.6fs" % t |
---|
472 | |
---|
473 | def _rate_and_time(self, bytes_count, seconds): |
---|
474 | time_s = abbreviate_time(seconds) |
---|
475 | if seconds != 0: |
---|
476 | rate = abbreviate_rate(bytes_count / seconds) |
---|
477 | return tags.span(time_s, title=rate) |
---|
478 | return tags.span(time_s) |
---|
479 | |
---|
480 | # XXX: This method is a candidate for refactoring. It renders |
---|
481 | # four tables from this function. Layout part of those tables |
---|
482 | # could be moved to download-status.xhtml. |
---|
483 | # |
---|
484 | # See #3311: https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3311 |
---|
485 | @renderer |
---|
486 | def events(self, req, tag): |
---|
487 | if not self._download_status.get_storage_index(): |
---|
488 | return tag |
---|
489 | |
---|
490 | srt = self._short_relative_time |
---|
491 | |
---|
492 | evtag = tags.div() |
---|
493 | |
---|
494 | # "DYHB Requests" table. |
---|
495 | dyhbtag = tags.table(align="left", class_="status-download-events") |
---|
496 | |
---|
497 | dyhbtag(tags.tr(tags.th("serverid"), |
---|
498 | tags.th("sent"), |
---|
499 | tags.th("received"), |
---|
500 | tags.th("shnums"), |
---|
501 | tags.th("RTT"))) |
---|
502 | |
---|
503 | for d_ev in self._download_status.dyhb_requests: |
---|
504 | server = d_ev["server"] |
---|
505 | sent = d_ev["start_time"] |
---|
506 | shnums = d_ev["response_shnums"] |
---|
507 | received = d_ev["finish_time"] |
---|
508 | rtt = None |
---|
509 | if received is not None: |
---|
510 | rtt = received - sent |
---|
511 | if not shnums: |
---|
512 | shnums = ["-"] |
---|
513 | |
---|
514 | dyhbtag(tags.tr(style="background: %s" % _color(server))( |
---|
515 | (tags.td(server.get_name()), |
---|
516 | tags.td(srt(sent)), |
---|
517 | tags.td(srt(received)), |
---|
518 | tags.td(",".join([str(shnum) for shnum in shnums])), |
---|
519 | tags.td(abbreviate_time(rtt)), |
---|
520 | ))) |
---|
521 | |
---|
522 | evtag(tags.h2("DYHB Requests:"), dyhbtag) |
---|
523 | evtag(tags.br(clear="all")) |
---|
524 | |
---|
525 | # "Read Events" table. |
---|
526 | readtag = tags.table(align="left",class_="status-download-events") |
---|
527 | |
---|
528 | readtag(tags.tr(( |
---|
529 | tags.th("range"), |
---|
530 | tags.th("start"), |
---|
531 | tags.th("finish"), |
---|
532 | tags.th("got"), |
---|
533 | tags.th("time"), |
---|
534 | tags.th("decrypttime"), |
---|
535 | tags.th("pausedtime"), |
---|
536 | tags.th("speed")))) |
---|
537 | |
---|
538 | for r_ev in self._download_status.read_events: |
---|
539 | start = r_ev["start"] |
---|
540 | length = r_ev["length"] |
---|
541 | bytes_returned = r_ev["bytes_returned"] |
---|
542 | decrypt_time = "" |
---|
543 | if bytes_returned: |
---|
544 | decrypt_time = self._rate_and_time(bytes_returned, r_ev["decrypt_time"]) |
---|
545 | speed, rtt = "","" |
---|
546 | if r_ev["finish_time"] is not None: |
---|
547 | rtt = r_ev["finish_time"] - r_ev["start_time"] - r_ev["paused_time"] |
---|
548 | speed = abbreviate_rate(compute_rate(bytes_returned, rtt)) |
---|
549 | rtt = abbreviate_time(rtt) |
---|
550 | paused = abbreviate_time(r_ev["paused_time"]) |
---|
551 | |
---|
552 | readtag(tags.tr( |
---|
553 | tags.td("[%d:+%d]" % (start, length)), |
---|
554 | tags.td(srt(r_ev["start_time"])), |
---|
555 | tags.td(srt(r_ev["finish_time"])), |
---|
556 | tags.td(str(bytes_returned)), |
---|
557 | tags.td(rtt), |
---|
558 | tags.td(decrypt_time), |
---|
559 | tags.td(paused), |
---|
560 | tags.td(speed), |
---|
561 | )) |
---|
562 | |
---|
563 | evtag(tags.h2("Read Events:"), readtag) |
---|
564 | evtag(tags.br(clear="all")) |
---|
565 | |
---|
566 | # "Segment Events" table. |
---|
567 | segtag = tags.table(align="left",class_="status-download-events") |
---|
568 | |
---|
569 | segtag(tags.tr( |
---|
570 | tags.th("segnum"), |
---|
571 | tags.th("start"), |
---|
572 | tags.th("active"), |
---|
573 | tags.th("finish"), |
---|
574 | tags.th("range"), |
---|
575 | tags.th("decodetime"), |
---|
576 | tags.th("segtime"), |
---|
577 | tags.th("speed"))) |
---|
578 | |
---|
579 | for s_ev in self._download_status.segment_events: |
---|
580 | range_s = "-" |
---|
581 | segtime_s = "-" |
---|
582 | speed = "-" |
---|
583 | decode_time = "-" |
---|
584 | if s_ev["finish_time"] is not None: |
---|
585 | if s_ev["success"]: |
---|
586 | segtime = s_ev["finish_time"] - s_ev["active_time"] |
---|
587 | segtime_s = abbreviate_time(segtime) |
---|
588 | seglen = s_ev["segment_length"] |
---|
589 | range_s = "[%d:+%d]" % (s_ev["segment_start"], seglen) |
---|
590 | speed = abbreviate_rate(compute_rate(seglen, segtime)) |
---|
591 | decode_time = self._rate_and_time(seglen, s_ev["decode_time"]) |
---|
592 | else: |
---|
593 | # error |
---|
594 | range_s = "error" |
---|
595 | else: |
---|
596 | # not finished yet |
---|
597 | pass |
---|
598 | |
---|
599 | segtag(tags.tr( |
---|
600 | tags.td("seg%d" % s_ev["segment_number"]), |
---|
601 | tags.td(srt(s_ev["start_time"])), |
---|
602 | tags.td(srt(s_ev["active_time"])), |
---|
603 | tags.td(srt(s_ev["finish_time"])), |
---|
604 | tags.td(range_s), |
---|
605 | tags.td(decode_time), |
---|
606 | tags.td(segtime_s), |
---|
607 | tags.td(speed))) |
---|
608 | |
---|
609 | evtag(tags.h2("Segment Events:"), segtag) |
---|
610 | evtag(tags.br(clear="all")) |
---|
611 | |
---|
612 | # "Requests" table. |
---|
613 | reqtab = tags.table(align="left",class_="status-download-events") |
---|
614 | |
---|
615 | reqtab(tags.tr( |
---|
616 | tags.th("serverid"), |
---|
617 | tags.th("shnum"), |
---|
618 | tags.th("range"), |
---|
619 | tags.th("txtime"), |
---|
620 | tags.th("rxtime"), |
---|
621 | tags.th("received"), |
---|
622 | tags.th("RTT"))) |
---|
623 | |
---|
624 | for r_ev in self._download_status.block_requests: |
---|
625 | server = r_ev["server"] |
---|
626 | rtt = None |
---|
627 | if r_ev["finish_time"] is not None: |
---|
628 | rtt = r_ev["finish_time"] - r_ev["start_time"] |
---|
629 | color = _color(server) |
---|
630 | reqtab(tags.tr(style="background: %s" % color) |
---|
631 | ( |
---|
632 | tags.td(server.get_name()), |
---|
633 | tags.td(str(r_ev["shnum"])), |
---|
634 | tags.td("[%d:+%d]" % (r_ev["start"], r_ev["length"])), |
---|
635 | tags.td(srt(r_ev["start_time"])), |
---|
636 | tags.td(srt(r_ev["finish_time"])), |
---|
637 | tags.td(str(r_ev["response_length"]) or ""), |
---|
638 | tags.td(abbreviate_time(rtt)), |
---|
639 | )) |
---|
640 | |
---|
641 | evtag(tags.h2("Requests:"), reqtab) |
---|
642 | evtag(tags.br(clear="all")) |
---|
643 | |
---|
644 | return evtag |
---|
645 | |
---|
646 | @renderer |
---|
647 | def results(self, req, tag): |
---|
648 | if self.download_results(): |
---|
649 | return tag |
---|
650 | return "" |
---|
651 | |
---|
652 | @renderer |
---|
653 | def started(self, req, tag): |
---|
654 | started_s = render_time(self._download_status.get_started()) |
---|
655 | return tag(started_s + " (%s)" % self._download_status.get_started()) |
---|
656 | |
---|
657 | @renderer |
---|
658 | def si(self, req, tag): |
---|
659 | si_s = base32.b2a_or_none(self._download_status.get_storage_index()) |
---|
660 | if si_s is None: |
---|
661 | si_s = "(None)" |
---|
662 | return tag(si_s) |
---|
663 | |
---|
664 | @renderer |
---|
665 | def helper(self, req, tag): |
---|
666 | return tag({True: "Yes", |
---|
667 | False: "No"}[self._download_status.using_helper()]) |
---|
668 | |
---|
669 | @renderer |
---|
670 | def total_size(self, req, tag): |
---|
671 | size = self._download_status.get_size() |
---|
672 | if size is None: |
---|
673 | return "(unknown)" |
---|
674 | return tag(str(size)) |
---|
675 | |
---|
676 | @renderer |
---|
677 | def progress(self, req, tag): |
---|
678 | progress = self._download_status.get_progress() |
---|
679 | # TODO: make an ascii-art bar |
---|
680 | return tag("%.1f%%" % (100.0 * progress)) |
---|
681 | |
---|
682 | @renderer |
---|
683 | def status(self, req, tag): |
---|
684 | return tag(self._download_status.get_status()) |
---|
685 | |
---|
686 | @renderer |
---|
687 | def servers_used(self, req, tag): |
---|
688 | servers_used = self.download_results().servers_used |
---|
689 | if not servers_used: |
---|
690 | return "" |
---|
691 | peerids_s = ", ".join(["[%s]" % idlib.shortnodeid_b2a(peerid) |
---|
692 | for peerid in servers_used]) |
---|
693 | return tags.li("Servers Used: ", peerids_s) |
---|
694 | |
---|
695 | @renderer |
---|
696 | def servermap(self, req, tag): |
---|
697 | servermap = self.download_results().servermap |
---|
698 | if not servermap: |
---|
699 | return tag("None") |
---|
700 | ul = tags.ul() |
---|
701 | for peerid in sorted(servermap.keys()): |
---|
702 | peerid_s = idlib.shortnodeid_b2a(peerid) |
---|
703 | shares_s = ",".join(["#%d" % shnum |
---|
704 | for shnum in servermap[peerid]]) |
---|
705 | ul(tags.li("[%s] has share%s: %s" % (peerid_s, |
---|
706 | plural(servermap[peerid]), |
---|
707 | shares_s))) |
---|
708 | return ul |
---|
709 | |
---|
710 | @renderer |
---|
711 | def problems(self, req, tag): |
---|
712 | server_problems = self.download_results().server_problems |
---|
713 | if not server_problems: |
---|
714 | return "" |
---|
715 | ul = tags.ul() |
---|
716 | for peerid in sorted(server_problems.keys()): |
---|
717 | peerid_s = idlib.shortnodeid_b2a(peerid) |
---|
718 | ul(tags.li("[%s]: %s" % (peerid_s, server_problems[peerid]))) |
---|
719 | return tags.li("Server Problems:", ul) |
---|
720 | |
---|
721 | @renderer |
---|
722 | def file_size(self, req, tag): |
---|
723 | return tag(str(self.download_results().file_size)) |
---|
724 | |
---|
725 | def _get_time(self, name): |
---|
726 | if self.download_results().timings: |
---|
727 | return self.download_results().timings.get(name) |
---|
728 | return None |
---|
729 | |
---|
730 | @renderer |
---|
731 | def time_total(self, req, tag): |
---|
732 | return tag(str(self._get_time("total"))) |
---|
733 | |
---|
734 | @renderer |
---|
735 | def time_peer_selection(self, req, tag): |
---|
736 | return tag(str(self._get_time("peer_selection"))) |
---|
737 | |
---|
738 | @renderer |
---|
739 | def time_uri_extension(self, req, tag): |
---|
740 | return tag(str(self._get_time("uri_extension"))) |
---|
741 | |
---|
742 | @renderer |
---|
743 | def time_hashtrees(self, req, tag): |
---|
744 | return tag(str(self._get_time("hashtrees"))) |
---|
745 | |
---|
746 | @renderer |
---|
747 | def time_segments(self, req, tag): |
---|
748 | return tag(str(self._get_time("segments"))) |
---|
749 | |
---|
750 | @renderer |
---|
751 | def time_cumulative_fetch(self, req, tag): |
---|
752 | return tag(str(self._get_time("cumulative_fetch"))) |
---|
753 | |
---|
754 | @renderer |
---|
755 | def time_cumulative_decode(self, req, tag): |
---|
756 | return tag(str(self._get_time("cumulative_decode"))) |
---|
757 | |
---|
758 | @renderer |
---|
759 | def time_cumulative_decrypt(self, req, tag): |
---|
760 | return tag(str(self._get_time("cumulative_decrypt"))) |
---|
761 | |
---|
762 | @renderer |
---|
763 | def time_paused(self, req, tag): |
---|
764 | return tag(str(self._get_time("paused"))) |
---|
765 | |
---|
766 | def _get_rate(self, name): |
---|
767 | r = self.download_results() |
---|
768 | file_size = r.file_size |
---|
769 | duration = None |
---|
770 | if r.timings: |
---|
771 | duration = r.timings.get(name) |
---|
772 | return compute_rate(file_size, duration) |
---|
773 | |
---|
774 | @renderer |
---|
775 | def rate_total(self, req, tag): |
---|
776 | return tag(str(self._get_rate("total"))) |
---|
777 | |
---|
778 | @renderer |
---|
779 | def rate_segments(self, req, tag): |
---|
780 | return tag(str(self._get_rate("segments"))) |
---|
781 | |
---|
782 | @renderer |
---|
783 | def rate_fetch(self, req, tag): |
---|
784 | return tag(str(self._get_rate("cumulative_fetch"))) |
---|
785 | |
---|
786 | @renderer |
---|
787 | def rate_decode(self, req, tag): |
---|
788 | return tag(str(self._get_rate("cumulative_decode"))) |
---|
789 | |
---|
790 | @renderer |
---|
791 | def rate_decrypt(self, req, tag): |
---|
792 | return tag(str(self._get_rate("cumulative_decrypt"))) |
---|
793 | |
---|
794 | @renderer |
---|
795 | def server_timings(self, req, tag): |
---|
796 | per_server = self._get_time("fetch_per_server") |
---|
797 | if per_server is None: |
---|
798 | return "" |
---|
799 | ul = tags.ul() |
---|
800 | for peerid in sorted(per_server.keys()): |
---|
801 | peerid_s = idlib.shortnodeid_b2a(peerid) |
---|
802 | times_s = ", ".join([abbreviate_time(t) |
---|
803 | for t in per_server[peerid]]) |
---|
804 | ul(tags.li("[%s]: %s" % (peerid_s, times_s))) |
---|
805 | return tags.li("Per-Server Segment Fetch Response Times: ", ul) |
---|
806 | |
---|
807 | |
---|
808 | class RetrieveStatusPage(MultiFormatResource): |
---|
809 | """Renders /status/retrieve-%d.""" |
---|
810 | |
---|
811 | def __init__(self, retrieve_status): |
---|
812 | """ |
---|
813 | :param retrieve.RetrieveStatus retrieve_status: stats provider. |
---|
814 | """ |
---|
815 | super(RetrieveStatusPage, self).__init__() |
---|
816 | self._retrieve_status = retrieve_status |
---|
817 | |
---|
818 | @render_exception |
---|
819 | def render_HTML(self, req): |
---|
820 | elem = RetrieveStatusElement(self._retrieve_status) |
---|
821 | return renderElement(req, elem) |
---|
822 | |
---|
823 | |
---|
824 | class RetrieveStatusElement(Element): |
---|
825 | |
---|
826 | loader = XMLFile(FilePath(__file__).sibling("retrieve-status.xhtml")) |
---|
827 | |
---|
828 | def __init__(self, retrieve_status): |
---|
829 | super(RetrieveStatusElement, self).__init__() |
---|
830 | self._retrieve_status = retrieve_status |
---|
831 | |
---|
832 | @renderer |
---|
833 | def started(self, req, tag): |
---|
834 | started_s = render_time(self._retrieve_status.get_started()) |
---|
835 | return tag(started_s) |
---|
836 | |
---|
837 | @renderer |
---|
838 | def si(self, req, tag): |
---|
839 | si_s = base32.b2a_or_none(self._retrieve_status.get_storage_index()) |
---|
840 | if si_s is None: |
---|
841 | si_s = "(None)" |
---|
842 | return tag(si_s) |
---|
843 | |
---|
844 | @renderer |
---|
845 | def helper(self, req, tag): |
---|
846 | return tag({True: "Yes", |
---|
847 | False: "No"}[self._retrieve_status.using_helper()]) |
---|
848 | |
---|
849 | @renderer |
---|
850 | def current_size(self, req, tag): |
---|
851 | size = str(self._retrieve_status.get_size()) |
---|
852 | if size is None: |
---|
853 | size = "(unknown)" |
---|
854 | return tag(size) |
---|
855 | |
---|
856 | @renderer |
---|
857 | def progress(self, req, tag): |
---|
858 | progress = self._retrieve_status.get_progress() |
---|
859 | # TODO: make an ascii-art bar |
---|
860 | return tag("%.1f%%" % (100.0 * progress)) |
---|
861 | |
---|
862 | @renderer |
---|
863 | def status(self, req, tag): |
---|
864 | return tag(self._retrieve_status.get_status()) |
---|
865 | |
---|
866 | @renderer |
---|
867 | def encoding(self, req, tag): |
---|
868 | k, n = self._retrieve_status.get_encoding() |
---|
869 | return tag("Encoding: %s of %s" % (k, n)) |
---|
870 | |
---|
871 | @renderer |
---|
872 | def problems(self, req, tag): |
---|
873 | problems = self._retrieve_status.get_problems() |
---|
874 | if not problems: |
---|
875 | return "" |
---|
876 | ul = tags.ul() |
---|
877 | for peerid in sorted(problems.keys()): |
---|
878 | peerid_s = idlib.shortnodeid_b2a(peerid) |
---|
879 | ul(tags.li("[%s]: %s" % (peerid_s, problems[peerid]))) |
---|
880 | return tag("Server Problems:", ul) |
---|
881 | |
---|
882 | def _get_rate(self, name): |
---|
883 | file_size = self._retrieve_status.get_size() |
---|
884 | duration = self._retrieve_status.timings.get(name) |
---|
885 | return compute_rate(file_size, duration) |
---|
886 | |
---|
887 | @renderer |
---|
888 | def time_total(self, req, tag): |
---|
889 | return tag(str(self._retrieve_status.timings.get("total"))) |
---|
890 | |
---|
891 | @renderer |
---|
892 | def rate_total(self, req, tag): |
---|
893 | return tag(str(self._get_rate("total"))) |
---|
894 | |
---|
895 | @renderer |
---|
896 | def time_fetch(self, req, tag): |
---|
897 | return tag(str(self._retrieve_status.timings.get("fetch"))) |
---|
898 | |
---|
899 | @renderer |
---|
900 | def rate_fetch(self, req, tag): |
---|
901 | return tag(str(self._get_rate("fetch"))) |
---|
902 | |
---|
903 | @renderer |
---|
904 | def time_decode(self, req, tag): |
---|
905 | return tag(str(self._retrieve_status.timings.get("decode"))) |
---|
906 | |
---|
907 | @renderer |
---|
908 | def rate_decode(self, req, tag): |
---|
909 | return tag(str(self._get_rate("decode"))) |
---|
910 | |
---|
911 | @renderer |
---|
912 | def time_decrypt(self, req, tag): |
---|
913 | return tag(str(self._retrieve_status.timings.get("decrypt"))) |
---|
914 | |
---|
915 | @renderer |
---|
916 | def rate_decrypt(self, req, tag): |
---|
917 | return tag(str(self._get_rate("decrypt"))) |
---|
918 | |
---|
919 | @renderer |
---|
920 | def server_timings(self, req, tag): |
---|
921 | per_server = self._retrieve_status.timings.get("fetch_per_server") |
---|
922 | if not per_server: |
---|
923 | return tag("") |
---|
924 | l = tags.ul() |
---|
925 | for server in sorted(list(per_server.keys()), key=lambda s: s.get_name()): |
---|
926 | times_s = ", ".join([abbreviate_time(t) |
---|
927 | for t in per_server[server]]) |
---|
928 | l(tags.li("[%s]: %s" % (str(server.get_name(), "utf-8"), times_s))) |
---|
929 | return tags.li("Per-Server Fetch Response Times: ", l) |
---|
930 | |
---|
931 | |
---|
932 | class PublishStatusPage(MultiFormatResource): |
---|
933 | """Renders status/publish-%d.""" |
---|
934 | |
---|
935 | def __init__(self, publish_status): |
---|
936 | """ |
---|
937 | :param mutable.publish.PublishStatus publish_status: stats provider. |
---|
938 | """ |
---|
939 | super(PublishStatusPage, self).__init__() |
---|
940 | self._publish_status = publish_status |
---|
941 | |
---|
942 | @render_exception |
---|
943 | def render_HTML(self, req): |
---|
944 | elem = PublishStatusElement(self._publish_status); |
---|
945 | return renderElement(req, elem) |
---|
946 | |
---|
947 | |
---|
948 | class PublishStatusElement(Element): |
---|
949 | |
---|
950 | loader = XMLFile(FilePath(__file__).sibling("publish-status.xhtml")) |
---|
951 | |
---|
952 | def __init__(self, publish_status): |
---|
953 | super(PublishStatusElement, self).__init__() |
---|
954 | self._publish_status = publish_status |
---|
955 | |
---|
956 | @renderer |
---|
957 | def started(self, req, tag): |
---|
958 | started_s = render_time(self._publish_status.get_started()) |
---|
959 | return tag(started_s) |
---|
960 | |
---|
961 | @renderer |
---|
962 | def si(self, req, tag): |
---|
963 | si_s = base32.b2a_or_none(self._publish_status.get_storage_index()) |
---|
964 | if si_s is None: |
---|
965 | si_s = "(None)" |
---|
966 | else: |
---|
967 | si_s = str(si_s, "utf-8") |
---|
968 | return tag(si_s) |
---|
969 | |
---|
970 | @renderer |
---|
971 | def helper(self, req, tag): |
---|
972 | return tag({True: "Yes", |
---|
973 | False: "No"}[self._publish_status.using_helper()]) |
---|
974 | |
---|
975 | @renderer |
---|
976 | def current_size(self, req, tag): |
---|
977 | size = self._publish_status.get_size() |
---|
978 | if size is None: |
---|
979 | size = "(unknown)" |
---|
980 | return tag(str(size)) |
---|
981 | |
---|
982 | @renderer |
---|
983 | def progress(self, req, tag): |
---|
984 | progress = self._publish_status.get_progress() |
---|
985 | # TODO: make an ascii-art bar |
---|
986 | return tag("%.1f%%" % (100.0 * progress)) |
---|
987 | |
---|
988 | @renderer |
---|
989 | def status(self, req, tag): |
---|
990 | return tag(self._publish_status.get_status()) |
---|
991 | |
---|
992 | @renderer |
---|
993 | def encoding(self, req, tag): |
---|
994 | k, n = self._publish_status.get_encoding() |
---|
995 | return tag("Encoding: %s of %s" % (k, n)) |
---|
996 | |
---|
997 | @renderer |
---|
998 | def sharemap(self, req, tag): |
---|
999 | servermap = self._publish_status.get_servermap() |
---|
1000 | if servermap is None: |
---|
1001 | return tag("None") |
---|
1002 | l = tags.ul() |
---|
1003 | sharemap = servermap.make_sharemap() |
---|
1004 | for shnum in sorted(sharemap.keys()): |
---|
1005 | l(tags.li("%d -> Placed on " % shnum, |
---|
1006 | ", ".join(["[%s]" % str(server.get_name(), "utf-8") |
---|
1007 | for server in sharemap[shnum]]))) |
---|
1008 | return tag("Sharemap:", l) |
---|
1009 | |
---|
1010 | @renderer |
---|
1011 | def problems(self, req, tag): |
---|
1012 | problems = self._publish_status.get_problems() |
---|
1013 | if not problems: |
---|
1014 | return tag() |
---|
1015 | l = tags.ul() |
---|
1016 | # XXX: is this exercised? I don't think PublishStatus.problems is |
---|
1017 | # ever populated |
---|
1018 | for peerid in sorted(problems.keys()): |
---|
1019 | peerid_s = idlib.shortnodeid_b2a(peerid) |
---|
1020 | l(tags.li("[%s]: %s" % (peerid_s, problems[peerid]))) |
---|
1021 | return tag(tags.li("Server Problems:", l)) |
---|
1022 | |
---|
1023 | def _get_rate(self, name): |
---|
1024 | file_size = self._publish_status.get_size() |
---|
1025 | duration = self._publish_status.timings.get(name) |
---|
1026 | return str(compute_rate(file_size, duration)) |
---|
1027 | |
---|
1028 | def _get_time(self, name): |
---|
1029 | return str(self._publish_status.timings.get(name)) |
---|
1030 | |
---|
1031 | @renderer |
---|
1032 | def time_total(self, req, tag): |
---|
1033 | return tag(self._get_time("total")) |
---|
1034 | |
---|
1035 | @renderer |
---|
1036 | def rate_total(self, req, tag): |
---|
1037 | return tag(self._get_rate("total")) |
---|
1038 | |
---|
1039 | @renderer |
---|
1040 | def time_setup(self, req, tag): |
---|
1041 | return tag(self._get_time("setup")) |
---|
1042 | |
---|
1043 | @renderer |
---|
1044 | def time_encrypt(self, req, tag): |
---|
1045 | return tag(self._get_time("encrypt")) |
---|
1046 | |
---|
1047 | @renderer |
---|
1048 | def rate_encrypt(self, req, tag): |
---|
1049 | return tag(self._get_rate("encrypt")) |
---|
1050 | |
---|
1051 | @renderer |
---|
1052 | def time_encode(self, req, tag): |
---|
1053 | return tag(self._get_time("encode")) |
---|
1054 | |
---|
1055 | @renderer |
---|
1056 | def rate_encode(self, req, tag): |
---|
1057 | return tag(self._get_rate("encode")) |
---|
1058 | |
---|
1059 | @renderer |
---|
1060 | def time_pack(self, req, tag): |
---|
1061 | return tag(self._get_time("pack")) |
---|
1062 | |
---|
1063 | @renderer |
---|
1064 | def rate_pack(self, req, tag): |
---|
1065 | return tag(self._get_rate("pack")) |
---|
1066 | |
---|
1067 | @renderer |
---|
1068 | def time_sign(self, req, tag): |
---|
1069 | return tag(self._get_time("sign")) |
---|
1070 | |
---|
1071 | @renderer |
---|
1072 | def time_push(self, req, tag): |
---|
1073 | return tag(self._get_time("push")) |
---|
1074 | |
---|
1075 | @renderer |
---|
1076 | def rate_push(self, req, tag): |
---|
1077 | return self._get_rate("push") |
---|
1078 | |
---|
1079 | @renderer |
---|
1080 | def server_timings(self, req, tag): |
---|
1081 | per_server = self._publish_status.timings.get("send_per_server") |
---|
1082 | if not per_server: |
---|
1083 | return tag() |
---|
1084 | l = tags.ul() |
---|
1085 | for server in sorted(list(per_server.keys()), key=lambda s: s.get_name()): |
---|
1086 | times_s = ", ".join([abbreviate_time(t) |
---|
1087 | for t in per_server[server]]) |
---|
1088 | l(tags.li("[%s]: %s" % (str(server.get_name(), "utf-8"), times_s))) |
---|
1089 | return tags.li("Per-Server Response Times: ", l) |
---|
1090 | |
---|
1091 | |
---|
1092 | |
---|
1093 | class MapupdateStatusPage(MultiFormatResource): |
---|
1094 | """Renders /status/mapupdate-%d.""" |
---|
1095 | |
---|
1096 | def __init__(self, update_status): |
---|
1097 | """ |
---|
1098 | :param update_status servermap.UpdateStatus: server map stats provider. |
---|
1099 | """ |
---|
1100 | super(MapupdateStatusPage, self).__init__() |
---|
1101 | self._update_status = update_status |
---|
1102 | |
---|
1103 | @render_exception |
---|
1104 | def render_HTML(self, req): |
---|
1105 | elem = MapupdateStatusElement(self._update_status); |
---|
1106 | return renderElement(req, elem) |
---|
1107 | |
---|
1108 | |
---|
1109 | class MapupdateStatusElement(Element): |
---|
1110 | |
---|
1111 | loader = XMLFile(FilePath(__file__).sibling("map-update-status.xhtml")) |
---|
1112 | |
---|
1113 | def __init__(self, update_status): |
---|
1114 | super(MapupdateStatusElement, self).__init__() |
---|
1115 | self._update_status = update_status |
---|
1116 | |
---|
1117 | @renderer |
---|
1118 | def started(self, req, tag): |
---|
1119 | started_s = render_time(self._update_status.get_started()) |
---|
1120 | return tag(started_s) |
---|
1121 | |
---|
1122 | @renderer |
---|
1123 | def finished(self, req, tag): |
---|
1124 | when = self._update_status.get_finished() |
---|
1125 | if not when: |
---|
1126 | return tag("not yet") |
---|
1127 | started_s = render_time(self._update_status.get_finished()) |
---|
1128 | return tag(started_s) |
---|
1129 | |
---|
1130 | @renderer |
---|
1131 | def si(self, req, tag): |
---|
1132 | si_s = base32.b2a_or_none(self._update_status.get_storage_index()) |
---|
1133 | if si_s is None: |
---|
1134 | si_s = "(None)" |
---|
1135 | return tag(si_s) |
---|
1136 | |
---|
1137 | @renderer |
---|
1138 | def helper(self, req, tag): |
---|
1139 | return tag({True: "Yes", |
---|
1140 | False: "No"}[self._update_status.using_helper()]) |
---|
1141 | |
---|
1142 | @renderer |
---|
1143 | def progress(self, req, tag): |
---|
1144 | progress = self._update_status.get_progress() |
---|
1145 | # TODO: make an ascii-art bar |
---|
1146 | return tag("%.1f%%" % (100.0 * progress)) |
---|
1147 | |
---|
1148 | @renderer |
---|
1149 | def status(self, req, tag): |
---|
1150 | return tag(self._update_status.get_status()) |
---|
1151 | |
---|
1152 | @renderer |
---|
1153 | def problems(self, req, tag): |
---|
1154 | problems = self._update_status.problems |
---|
1155 | if not problems: |
---|
1156 | return tag |
---|
1157 | l = tags.ul() |
---|
1158 | for peerid in sorted(problems.keys()): |
---|
1159 | peerid_s = idlib.shortnodeid_b2a(peerid) |
---|
1160 | l(tags.li("[%s]: %s" % (peerid_s, problems[peerid]))) |
---|
1161 | return tag("Server Problems:", l) |
---|
1162 | |
---|
1163 | @renderer |
---|
1164 | def privkey_from(self, req, tag): |
---|
1165 | server = self._update_status.get_privkey_from() |
---|
1166 | if server: |
---|
1167 | return tag(tags.li("Got privkey from: [%s]" % str( |
---|
1168 | server.get_name(), "utf-8"))) |
---|
1169 | else: |
---|
1170 | return tag |
---|
1171 | |
---|
1172 | # Helper to query update status timings. |
---|
1173 | # |
---|
1174 | # Querying `update_status.timings` can yield `None` or a numeric |
---|
1175 | # value, but twisted.web has trouble flattening the element tree |
---|
1176 | # when a node contains numeric values. Stringifying them helps. |
---|
1177 | def _get_update_status_timing(self, name, tag): |
---|
1178 | res = self._update_status.timings.get(name) |
---|
1179 | if not res: |
---|
1180 | return tag("0") |
---|
1181 | return tag(abbreviate_time(res)) |
---|
1182 | |
---|
1183 | @renderer |
---|
1184 | def time_total(self, req, tag): |
---|
1185 | return self._get_update_status_timing("total", tag) |
---|
1186 | |
---|
1187 | @renderer |
---|
1188 | def time_initial_queries(self, req, tag): |
---|
1189 | return self._get_update_status_timing("initial_queries", tag) |
---|
1190 | |
---|
1191 | @renderer |
---|
1192 | def time_cumulative_verify(self, req, tag): |
---|
1193 | return self._get_update_status_timing("cumulative_verify", tag) |
---|
1194 | |
---|
1195 | @renderer |
---|
1196 | def server_timings(self, req, tag): |
---|
1197 | per_server = self._update_status.timings.get("per_server") |
---|
1198 | if not per_server: |
---|
1199 | return tag("") |
---|
1200 | l = tags.ul() |
---|
1201 | for server in sorted(per_server.keys(), key=lambda s: s.get_name()): |
---|
1202 | times = [] |
---|
1203 | for op,started,t in per_server[server]: |
---|
1204 | #times.append("%s/%.4fs/%s/%s" % (op, |
---|
1205 | # started, |
---|
1206 | # self.render_time(None, started - self.update_status.get_started()), |
---|
1207 | # self.render_time(None,t))) |
---|
1208 | if op == "query": |
---|
1209 | times.append(abbreviate_time(t)) |
---|
1210 | elif op == "late": |
---|
1211 | times.append("late(" + abbreviate_time(t) + ")") |
---|
1212 | else: |
---|
1213 | times.append("privkey(" + abbreviate_time(t) + ")") |
---|
1214 | times_s = ", ".join(times) |
---|
1215 | l(tags.li("[%s]: %s" % (str(server.get_name(), "utf-8"), times_s))) |
---|
1216 | return tags.li("Per-Server Response Times: ", l) |
---|
1217 | |
---|
1218 | |
---|
1219 | def marshal_json(s): |
---|
1220 | # common item data |
---|
1221 | item = { |
---|
1222 | "storage-index-string": base32.b2a_or_none(s.get_storage_index()), |
---|
1223 | "total-size": s.get_size(), |
---|
1224 | "status": s.get_status(), |
---|
1225 | } |
---|
1226 | |
---|
1227 | # type-specific item date |
---|
1228 | if IUploadStatus.providedBy(s): |
---|
1229 | h, c, e = s.get_progress() |
---|
1230 | item["type"] = "upload" |
---|
1231 | item["progress-hash"] = h |
---|
1232 | item["progress-ciphertext"] = c |
---|
1233 | item["progress-encode-push"] = e |
---|
1234 | |
---|
1235 | elif IDownloadStatus.providedBy(s): |
---|
1236 | item["type"] = "download" |
---|
1237 | item["progress"] = s.get_progress() |
---|
1238 | |
---|
1239 | elif IPublishStatus.providedBy(s): |
---|
1240 | item["type"] = "publish" |
---|
1241 | |
---|
1242 | elif IRetrieveStatus.providedBy(s): |
---|
1243 | item["type"] = "retrieve" |
---|
1244 | |
---|
1245 | elif IServermapUpdaterStatus.providedBy(s): |
---|
1246 | item["type"] = "mapupdate" |
---|
1247 | item["mode"] = s.get_mode() |
---|
1248 | |
---|
1249 | else: |
---|
1250 | item["type"] = "unknown" |
---|
1251 | item["class"] = s.__class__.__name__ |
---|
1252 | |
---|
1253 | return item |
---|
1254 | |
---|
1255 | |
---|
1256 | class Status(MultiFormatResource): |
---|
1257 | """Renders /status page.""" |
---|
1258 | |
---|
1259 | def __init__(self, history): |
---|
1260 | """ |
---|
1261 | :param allmydata.history.History history: provides operation statuses. |
---|
1262 | """ |
---|
1263 | super(Status, self).__init__() |
---|
1264 | self.history = history |
---|
1265 | |
---|
1266 | @render_exception |
---|
1267 | def render_HTML(self, req): |
---|
1268 | elem = StatusElement(self._get_active_operations(), |
---|
1269 | self._get_recent_operations()) |
---|
1270 | return renderElement(req, elem) |
---|
1271 | |
---|
1272 | @render_exception |
---|
1273 | def render_JSON(self, req): |
---|
1274 | # modern browsers now render this instead of forcing downloads |
---|
1275 | req.setHeader("content-type", "application/json") |
---|
1276 | data = {} |
---|
1277 | data["active"] = active = [] |
---|
1278 | data["recent"] = recent = [] |
---|
1279 | |
---|
1280 | for s in self._get_active_operations(): |
---|
1281 | active.append(marshal_json(s)) |
---|
1282 | |
---|
1283 | for s in self._get_recent_operations(): |
---|
1284 | recent.append(marshal_json(s)) |
---|
1285 | |
---|
1286 | return json.dumps(data, indent=1) + "\n" |
---|
1287 | |
---|
1288 | @exception_to_child |
---|
1289 | def getChild(self, path, request): |
---|
1290 | # The "if (path is empty) return self" line should handle |
---|
1291 | # trailing slash in request path. |
---|
1292 | # |
---|
1293 | # Twisted Web's documentation says this: "If the URL ends in a |
---|
1294 | # slash, for example ``http://example.com/foo/bar/`` , the |
---|
1295 | # final URL segment will be an empty string. Resources can |
---|
1296 | # thus know if they were requested with or without a final |
---|
1297 | # slash." |
---|
1298 | if not path and request.postpath != [b'']: |
---|
1299 | return self |
---|
1300 | |
---|
1301 | h = self.history |
---|
1302 | try: |
---|
1303 | stype, count_s = path.split(b"-") |
---|
1304 | except ValueError: |
---|
1305 | raise WebError("no '-' in '{}'".format(str(path, "utf-8"))) |
---|
1306 | count = int(count_s) |
---|
1307 | stype = str(stype, "ascii") |
---|
1308 | if stype == "up": |
---|
1309 | for s in itertools.chain(h.list_all_upload_statuses(), |
---|
1310 | h.list_all_helper_statuses()): |
---|
1311 | # immutable-upload helpers use the same status object as a |
---|
1312 | # regular immutable-upload |
---|
1313 | if s.get_counter() == count: |
---|
1314 | return UploadStatusPage(s) |
---|
1315 | if stype == "down": |
---|
1316 | for s in h.list_all_download_statuses(): |
---|
1317 | if s.get_counter() == count: |
---|
1318 | return DownloadStatusPage(s) |
---|
1319 | if stype == "mapupdate": |
---|
1320 | for s in h.list_all_mapupdate_statuses(): |
---|
1321 | if s.get_counter() == count: |
---|
1322 | return MapupdateStatusPage(s) |
---|
1323 | if stype == "publish": |
---|
1324 | for s in h.list_all_publish_statuses(): |
---|
1325 | if s.get_counter() == count: |
---|
1326 | return PublishStatusPage(s) |
---|
1327 | if stype == "retrieve": |
---|
1328 | for s in h.list_all_retrieve_statuses(): |
---|
1329 | if s.get_counter() == count: |
---|
1330 | return RetrieveStatusPage(s) |
---|
1331 | |
---|
1332 | def _get_all_statuses(self): |
---|
1333 | h = self.history |
---|
1334 | return itertools.chain(h.list_all_upload_statuses(), |
---|
1335 | h.list_all_download_statuses(), |
---|
1336 | h.list_all_mapupdate_statuses(), |
---|
1337 | h.list_all_publish_statuses(), |
---|
1338 | h.list_all_retrieve_statuses(), |
---|
1339 | h.list_all_helper_statuses(), |
---|
1340 | ) |
---|
1341 | |
---|
1342 | def _get_active_operations(self): |
---|
1343 | active = [s |
---|
1344 | for s in self._get_all_statuses() |
---|
1345 | if s.get_active()] |
---|
1346 | active.sort(key=lambda a: a.get_started()) |
---|
1347 | active.reverse() |
---|
1348 | return active |
---|
1349 | |
---|
1350 | def _get_recent_operations(self): |
---|
1351 | recent = [s |
---|
1352 | for s in self._get_all_statuses() |
---|
1353 | if not s.get_active()] |
---|
1354 | recent.sort(key=lambda a: a.get_started()) |
---|
1355 | recent.reverse() |
---|
1356 | return recent |
---|
1357 | |
---|
1358 | |
---|
1359 | class StatusElement(Element): |
---|
1360 | |
---|
1361 | loader = XMLFile(FilePath(__file__).sibling("status.xhtml")) |
---|
1362 | |
---|
1363 | def __init__(self, active, recent): |
---|
1364 | super(StatusElement, self).__init__() |
---|
1365 | self._active = active |
---|
1366 | self._recent = recent |
---|
1367 | |
---|
1368 | @renderer |
---|
1369 | def active_operations(self, req, tag): |
---|
1370 | active = [self.get_op_state(op) for op in self._active] |
---|
1371 | return SlotsSequenceElement(tag, active) |
---|
1372 | |
---|
1373 | @renderer |
---|
1374 | def recent_operations(self, req, tag): |
---|
1375 | recent = [self.get_op_state(op) for op in self._recent] |
---|
1376 | return SlotsSequenceElement(tag, recent) |
---|
1377 | |
---|
1378 | @staticmethod |
---|
1379 | def get_op_state(op): |
---|
1380 | result = dict() |
---|
1381 | |
---|
1382 | started_s = render_time(op.get_started()) |
---|
1383 | result["started"] = started_s |
---|
1384 | si_s = base32.b2a_or_none(op.get_storage_index()) |
---|
1385 | if si_s is None: |
---|
1386 | si_s = "(None)" |
---|
1387 | |
---|
1388 | result["si"] = si_s |
---|
1389 | result["helper"] = {True: "Yes", False: "No"}[op.using_helper()] |
---|
1390 | |
---|
1391 | size = op.get_size() |
---|
1392 | if size is None: |
---|
1393 | size = "(unknown)" |
---|
1394 | elif isinstance(size, (int, float)): |
---|
1395 | size = abbreviate_size(size) |
---|
1396 | |
---|
1397 | result["total_size"] = size |
---|
1398 | |
---|
1399 | progress = op.get_progress() |
---|
1400 | if IUploadStatus.providedBy(op): |
---|
1401 | link = "up-%d" % op.get_counter() |
---|
1402 | result["type"] = "upload" |
---|
1403 | # TODO: make an ascii-art bar |
---|
1404 | (chk, ciphertext, encandpush) = progress |
---|
1405 | progress_s = ("hash: %.1f%%, ciphertext: %.1f%%, encode: %.1f%%" % |
---|
1406 | ((100.0 * chk), |
---|
1407 | (100.0 * ciphertext), |
---|
1408 | (100.0 * encandpush))) |
---|
1409 | result["progress"] = progress_s |
---|
1410 | elif IDownloadStatus.providedBy(op): |
---|
1411 | link = "down-%d" % op.get_counter() |
---|
1412 | result["type"] = "download" |
---|
1413 | result["progress"] = "%.1f%%" % (100.0 * progress) |
---|
1414 | elif IPublishStatus.providedBy(op): |
---|
1415 | link = "publish-%d" % op.get_counter() |
---|
1416 | result["type"] = "publish" |
---|
1417 | result["progress"] = "%.1f%%" % (100.0 * progress) |
---|
1418 | elif IRetrieveStatus.providedBy(op): |
---|
1419 | result["type"] = "retrieve" |
---|
1420 | link = "retrieve-%d" % op.get_counter() |
---|
1421 | result["progress"] = "%.1f%%" % (100.0 * progress) |
---|
1422 | else: |
---|
1423 | assert IServermapUpdaterStatus.providedBy(op) |
---|
1424 | result["type"] = "mapupdate %s" % op.get_mode() |
---|
1425 | link = "mapupdate-%d" % op.get_counter() |
---|
1426 | result["progress"] = "%.1f%%" % (100.0 * progress) |
---|
1427 | |
---|
1428 | result["status"] = tags.a(op.get_status(), |
---|
1429 | href="/status/{}".format(link)) |
---|
1430 | |
---|
1431 | return result |
---|
1432 | |
---|
1433 | |
---|
1434 | # Render "/helper_status" page. |
---|
1435 | class HelperStatus(MultiFormatResource): |
---|
1436 | |
---|
1437 | def __init__(self, helper): |
---|
1438 | super(HelperStatus, self).__init__() |
---|
1439 | self._helper = helper |
---|
1440 | |
---|
1441 | @render_exception |
---|
1442 | def render_HTML(self, req): |
---|
1443 | return renderElement(req, HelperStatusElement(self._helper)) |
---|
1444 | |
---|
1445 | @render_exception |
---|
1446 | def render_JSON(self, req): |
---|
1447 | req.setHeader("content-type", "text/plain") |
---|
1448 | if self._helper: |
---|
1449 | stats = self._helper.get_stats() |
---|
1450 | return json.dumps(stats, indent=1) + "\n" |
---|
1451 | return json.dumps({}) + "\n" |
---|
1452 | |
---|
1453 | class HelperStatusElement(Element): |
---|
1454 | |
---|
1455 | loader = XMLFile(FilePath(__file__).sibling("helper.xhtml")) |
---|
1456 | |
---|
1457 | def __init__(self, helper): |
---|
1458 | """ |
---|
1459 | :param _allmydata.immutable.offloaded.Helper helper: upload helper. |
---|
1460 | """ |
---|
1461 | super(HelperStatusElement, self).__init__() |
---|
1462 | self._helper = helper |
---|
1463 | |
---|
1464 | @renderer |
---|
1465 | def helper_running(self, req, tag): |
---|
1466 | # helper.get_stats() returns a dict of this form: |
---|
1467 | # |
---|
1468 | # {'chk_upload_helper.active_uploads': 0, |
---|
1469 | # 'chk_upload_helper.encoded_bytes': 0, |
---|
1470 | # 'chk_upload_helper.encoding_count': 0, |
---|
1471 | # 'chk_upload_helper.encoding_size': 0, |
---|
1472 | # 'chk_upload_helper.encoding_size_old': 0, |
---|
1473 | # 'chk_upload_helper.fetched_bytes': 0, |
---|
1474 | # 'chk_upload_helper.incoming_count': 0, |
---|
1475 | # 'chk_upload_helper.incoming_size': 0, |
---|
1476 | # 'chk_upload_helper.incoming_size_old': 0, |
---|
1477 | # 'chk_upload_helper.resumes': 0, |
---|
1478 | # 'chk_upload_helper.upload_already_present': 0, |
---|
1479 | # 'chk_upload_helper.upload_need_upload': 0, |
---|
1480 | # 'chk_upload_helper.upload_requests': 0} |
---|
1481 | # |
---|
1482 | # If helper is running, we render the above data on the page. |
---|
1483 | if self._helper: |
---|
1484 | self._data = self._helper.get_stats() |
---|
1485 | return tag |
---|
1486 | return tags.h1("No helper is running") |
---|
1487 | |
---|
1488 | @renderer |
---|
1489 | def active_uploads(self, req, tag): |
---|
1490 | return tag(str(self._data["chk_upload_helper.active_uploads"])) |
---|
1491 | |
---|
1492 | @renderer |
---|
1493 | def incoming(self, req, tag): |
---|
1494 | return tag("%d bytes in %d files" % (self._data["chk_upload_helper.incoming_size"], |
---|
1495 | self._data["chk_upload_helper.incoming_count"])) |
---|
1496 | |
---|
1497 | @renderer |
---|
1498 | def encoding(self, req, tag): |
---|
1499 | return tag("%d bytes in %d files" % (self._data["chk_upload_helper.encoding_size"], |
---|
1500 | self._data["chk_upload_helper.encoding_count"])) |
---|
1501 | |
---|
1502 | @renderer |
---|
1503 | def upload_requests(self, req, tag): |
---|
1504 | return tag(str(self._data["chk_upload_helper.upload_requests"])) |
---|
1505 | |
---|
1506 | @renderer |
---|
1507 | def upload_already_present(self, req, tag): |
---|
1508 | return tag(str(self._data["chk_upload_helper.upload_already_present"])) |
---|
1509 | |
---|
1510 | @renderer |
---|
1511 | def upload_need_upload(self, req, tag): |
---|
1512 | return tag(str(self._data["chk_upload_helper.upload_need_upload"])) |
---|
1513 | |
---|
1514 | @renderer |
---|
1515 | def upload_bytes_fetched(self, req, tag): |
---|
1516 | return tag(str(self._data["chk_upload_helper.fetched_bytes"])) |
---|
1517 | |
---|
1518 | @renderer |
---|
1519 | def upload_bytes_encoded(self, req, tag): |
---|
1520 | return tag(str(self._data["chk_upload_helper.encoded_bytes"])) |
---|
1521 | |
---|
1522 | |
---|
1523 | # Render "/statistics" page. |
---|
1524 | class Statistics(MultiFormatResource): |
---|
1525 | """Class that renders "/statistics" page. |
---|
1526 | |
---|
1527 | :param _allmydata.stats.StatsProvider provider: node statistics |
---|
1528 | provider. |
---|
1529 | """ |
---|
1530 | |
---|
1531 | def __init__(self, provider): |
---|
1532 | super(Statistics, self).__init__() |
---|
1533 | self._provider = provider |
---|
1534 | |
---|
1535 | @render_exception |
---|
1536 | def render_HTML(self, req): |
---|
1537 | return renderElement(req, StatisticsElement(self._provider)) |
---|
1538 | |
---|
1539 | @render_exception |
---|
1540 | def render_JSON(self, req): |
---|
1541 | stats = self._provider.get_stats() |
---|
1542 | req.setHeader("content-type", "text/plain") |
---|
1543 | return json.dumps(stats, indent=1) + "\n" |
---|
1544 | |
---|
1545 | @render_exception |
---|
1546 | def render_OPENMETRICS(self, req): |
---|
1547 | """ |
---|
1548 | Render our stats in `OpenMetrics <https://openmetrics.io/>` format. |
---|
1549 | For example Prometheus and Victoriametrics can parse this. |
---|
1550 | Point the scraper to ``/statistics?t=openmetrics`` (instead of the |
---|
1551 | default ``/metrics``). |
---|
1552 | """ |
---|
1553 | req.setHeader("content-type", "application/openmetrics-text; version=1.0.0; charset=utf-8") |
---|
1554 | stats = self._provider.get_stats() |
---|
1555 | ret = [] |
---|
1556 | |
---|
1557 | def mangle_name(name): |
---|
1558 | return re.sub( |
---|
1559 | u"_(\d\d)_(\d)_percentile", |
---|
1560 | u'{quantile="0.\g<1>\g<2>"}', |
---|
1561 | name.replace(u".", u"_") |
---|
1562 | ) |
---|
1563 | |
---|
1564 | def mangle_value(val): |
---|
1565 | return str(val) if val is not None else u"NaN" |
---|
1566 | |
---|
1567 | for (k, v) in sorted(stats['counters'].items()): |
---|
1568 | ret.append(u"tahoe_counters_%s %s" % (mangle_name(k), mangle_value(v))) |
---|
1569 | for (k, v) in sorted(stats['stats'].items()): |
---|
1570 | ret.append(u"tahoe_stats_%s %s" % (mangle_name(k), mangle_value(v))) |
---|
1571 | |
---|
1572 | ret.append(u"# EOF\n") |
---|
1573 | |
---|
1574 | return u"\n".join(ret) |
---|
1575 | |
---|
1576 | class StatisticsElement(Element): |
---|
1577 | |
---|
1578 | loader = XMLFile(FilePath(__file__).sibling("statistics.xhtml")) |
---|
1579 | |
---|
1580 | def __init__(self, provider): |
---|
1581 | super(StatisticsElement, self).__init__() |
---|
1582 | # provider.get_stats() returns a dict of the below form, for |
---|
1583 | # example (there's often more data than this): |
---|
1584 | # |
---|
1585 | # { |
---|
1586 | # 'stats': { |
---|
1587 | # 'storage_server.disk_used': 809601609728, |
---|
1588 | # 'storage_server.accepting_immutable_shares': 1, |
---|
1589 | # 'storage_server.disk_free_for_root': 131486851072, |
---|
1590 | # 'storage_server.reserved_space': 1000000000, |
---|
1591 | # 'node.uptime': 0.16520118713378906, |
---|
1592 | # 'storage_server.disk_total': 941088460800, |
---|
1593 | # 'cpu_monitor.total': 0.004513999999999907, |
---|
1594 | # 'storage_server.disk_avail': 82610759168, |
---|
1595 | # 'storage_server.allocated': 0, |
---|
1596 | # 'storage_server.disk_free_for_nonroot': 83610759168 }, |
---|
1597 | # 'counters': { |
---|
1598 | # 'uploader.files_uploaded': 0, |
---|
1599 | # 'uploader.bytes_uploaded': 0, |
---|
1600 | # ... } |
---|
1601 | # } |
---|
1602 | # |
---|
1603 | # Note that `counters` can be empty. |
---|
1604 | self._stats = provider.get_stats() |
---|
1605 | |
---|
1606 | @renderer |
---|
1607 | def uploads(self, req, tag): |
---|
1608 | files = self._stats["counters"].get("uploader.files_uploaded", 0) |
---|
1609 | bytes_uploaded = self._stats["counters"].get("uploader.bytes_uploaded", 0) |
---|
1610 | return tag(("%s files / %s bytes (%s)" % |
---|
1611 | (files, bytes_uploaded, abbreviate_size(bytes_uploaded)))) |
---|
1612 | |
---|
1613 | @renderer |
---|
1614 | def downloads(self, req, tag): |
---|
1615 | files = self._stats["counters"].get("downloader.files_downloaded", 0) |
---|
1616 | bytes_uploaded = self._stats["counters"].get("downloader.bytes_downloaded", 0) |
---|
1617 | return tag("%s files / %s bytes (%s)" % |
---|
1618 | (files, bytes_uploaded, abbreviate_size(bytes_uploaded))) |
---|
1619 | |
---|
1620 | @renderer |
---|
1621 | def publishes(self, req, tag): |
---|
1622 | files = self._stats["counters"].get("mutable.files_published", 0) |
---|
1623 | bytes_uploaded = self._stats["counters"].get("mutable.bytes_published", 0) |
---|
1624 | return tag("%s files / %s bytes (%s)" % (files, bytes_uploaded, |
---|
1625 | abbreviate_size(bytes_uploaded))) |
---|
1626 | |
---|
1627 | @renderer |
---|
1628 | def retrieves(self, req, tag): |
---|
1629 | files = self._stats["counters"].get("mutable.files_retrieved", 0) |
---|
1630 | bytes_uploaded = self._stats["counters"].get("mutable.bytes_retrieved", 0) |
---|
1631 | return tag("%s files / %s bytes (%s)" % (files, bytes_uploaded, |
---|
1632 | abbreviate_size(bytes_uploaded))) |
---|
1633 | |
---|
1634 | @renderer |
---|
1635 | def raw(self, req, tag): |
---|
1636 | raw = json.dumps(self._stats, sort_keys=True, indent=4) |
---|
1637 | return tag(raw) |
---|