mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-02-01 08:48:01 +00:00
Merge PR405: use stdlib "json" instead of simplejson
refs ticket:2766
This commit is contained in:
commit
c1ed556d40
@ -147,7 +147,6 @@ print_py_pkg_ver('pycrypto', 'Crypto')
|
|||||||
print_py_pkg_ver('pycryptopp')
|
print_py_pkg_ver('pycryptopp')
|
||||||
print_py_pkg_ver('pyflakes')
|
print_py_pkg_ver('pyflakes')
|
||||||
print_py_pkg_ver('pyOpenSSL', 'OpenSSL')
|
print_py_pkg_ver('pyOpenSSL', 'OpenSSL')
|
||||||
print_py_pkg_ver('simplejson')
|
|
||||||
print_py_pkg_ver('six')
|
print_py_pkg_ver('six')
|
||||||
print_py_pkg_ver('trialcoverage')
|
print_py_pkg_ver('trialcoverage')
|
||||||
print_py_pkg_ver('Twisted', 'twisted')
|
print_py_pkg_ver('Twisted', 'twisted')
|
||||||
|
@ -23,7 +23,6 @@
|
|||||||
# python: 2.7.5 (/usr/bin/python)
|
# python: 2.7.5 (/usr/bin/python)
|
||||||
# platform: Darwin-13.4.0-x86_64-i386-64bit (None)
|
# platform: Darwin-13.4.0-x86_64-i386-64bit (None)
|
||||||
# pyOpenSSL: 0.13 (/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python)
|
# pyOpenSSL: 0.13 (/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python)
|
||||||
# simplejson: 3.6.4 (/Applications/tahoe.app/support/lib/python2.7/site-packages/simplejson-3.6.4-py2.7-macosx-10.9-intel.egg)
|
|
||||||
# pycrypto: 2.6.1 (/Applications/tahoe.app/support/lib/python2.7/site-packages/pycrypto-2.6.1-py2.7-macosx-10.9-intel.egg)
|
# pycrypto: 2.6.1 (/Applications/tahoe.app/support/lib/python2.7/site-packages/pycrypto-2.6.1-py2.7-macosx-10.9-intel.egg)
|
||||||
# pyasn1: 0.1.7 (/Applications/tahoe.app/support/lib/python2.7/site-packages/pyasn1-0.1.7-py2.7.egg)
|
# pyasn1: 0.1.7 (/Applications/tahoe.app/support/lib/python2.7/site-packages/pyasn1-0.1.7-py2.7.egg)
|
||||||
# mock: 1.0.1 (/Applications/tahoe.app/support/lib/python2.7/site-packages)
|
# mock: 1.0.1 (/Applications/tahoe.app/support/lib/python2.7/site-packages)
|
||||||
|
@ -25,7 +25,7 @@ import pickle, os.path, time, pprint
|
|||||||
from twisted.application import internet, service, strports
|
from twisted.application import internet, service, strports
|
||||||
from twisted.web import server, resource, http
|
from twisted.web import server, resource, http
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
import simplejson
|
import json
|
||||||
from foolscap import Tub, Referenceable, RemoteInterface, eventual
|
from foolscap import Tub, Referenceable, RemoteInterface, eventual
|
||||||
from foolscap.schema import ListOf, TupleOf
|
from foolscap.schema import ListOf, TupleOf
|
||||||
from zope.interface import implements
|
from zope.interface import implements
|
||||||
@ -159,7 +159,7 @@ class CPUWatcher(service.MultiService, resource.Resource, Referenceable):
|
|||||||
data += pprint.pformat(self.current) + "\n"
|
data += pprint.pformat(self.current) + "\n"
|
||||||
elif t == "json":
|
elif t == "json":
|
||||||
#data = str(self.current) + "\n" # isn't that convenient? almost.
|
#data = str(self.current) + "\n" # isn't that convenient? almost.
|
||||||
data = simplejson.dumps(self.current, indent=True)
|
data = json.dumps(self.current, indent=True)
|
||||||
else:
|
else:
|
||||||
req.setResponseCode(http.BAD_REQUEST)
|
req.setResponseCode(http.BAD_REQUEST)
|
||||||
data = "Unknown t= %s\n" % t
|
data = "Unknown t= %s\n" % t
|
||||||
|
@ -2,10 +2,10 @@
|
|||||||
|
|
||||||
import os, sys, re
|
import os, sys, re
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
url = os.environ["url"]
|
url = os.environ["url"]
|
||||||
current = simplejson.loads(urllib.urlopen(url).read())
|
current = json.loads(urllib.urlopen(url).read())
|
||||||
|
|
||||||
configinfo = """\
|
configinfo = """\
|
||||||
graph_title Tahoe CPU Usage
|
graph_title Tahoe CPU Usage
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
# is left on all disks across the grid. The plugin should be configured with
|
# is left on all disks across the grid. The plugin should be configured with
|
||||||
# env_url= pointing at the diskwatcher.tac webport.
|
# env_url= pointing at the diskwatcher.tac webport.
|
||||||
|
|
||||||
import os, sys, urllib, simplejson
|
import os, sys, urllib, json
|
||||||
|
|
||||||
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
||||||
print """\
|
print """\
|
||||||
@ -18,5 +18,5 @@ disk_left.draw LINE1"""
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
url = os.environ["url"]
|
url = os.environ["url"]
|
||||||
data = simplejson.load(urllib.urlopen(url))["available"]
|
data = json.load(urllib.urlopen(url))["available"]
|
||||||
print "disk_left.value", data
|
print "disk_left.value", data
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
# used. The plugin should be configured with env_url= pointing at the
|
# used. The plugin should be configured with env_url= pointing at the
|
||||||
# diskwatcher.tac webport.
|
# diskwatcher.tac webport.
|
||||||
|
|
||||||
import os, sys, urllib, simplejson
|
import os, sys, urllib, json
|
||||||
|
|
||||||
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
||||||
print """\
|
print """\
|
||||||
@ -21,6 +21,6 @@ disk_used.draw LINE1"""
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
url = os.environ["url"]
|
url = os.environ["url"]
|
||||||
data = simplejson.load(urllib.urlopen(url))
|
data = json.load(urllib.urlopen(url))
|
||||||
print "disk_total.value", data["total"]
|
print "disk_total.value", data["total"]
|
||||||
print "disk_used.value", data["used"]
|
print "disk_used.value", data["used"]
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
# is being used per unit time. The plugin should be configured with env_url=
|
# is being used per unit time. The plugin should be configured with env_url=
|
||||||
# pointing at the diskwatcher.tac webport.
|
# pointing at the diskwatcher.tac webport.
|
||||||
|
|
||||||
import os, sys, urllib, simplejson
|
import os, sys, urllib, json
|
||||||
|
|
||||||
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
||||||
print """\
|
print """\
|
||||||
@ -25,7 +25,7 @@ rate_4wk.draw LINE2"""
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
url = os.environ["url"]
|
url = os.environ["url"]
|
||||||
timespans = simplejson.load(urllib.urlopen(url))["rates"]
|
timespans = json.load(urllib.urlopen(url))["rates"]
|
||||||
|
|
||||||
data = dict([(name, growth)
|
data = dict([(name, growth)
|
||||||
for (name, timespan, growth, timeleft) in timespans])
|
for (name, timespan, growth, timeleft) in timespans])
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
# used on all disks across the grid. The plugin should be configured with
|
# used on all disks across the grid. The plugin should be configured with
|
||||||
# env_url= pointing at the diskwatcher.tac webport.
|
# env_url= pointing at the diskwatcher.tac webport.
|
||||||
|
|
||||||
import os, sys, urllib, simplejson
|
import os, sys, urllib, json
|
||||||
|
|
||||||
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
||||||
print """\
|
print """\
|
||||||
@ -18,5 +18,5 @@ disk_used.draw LINE1"""
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
url = os.environ["url"]
|
url = os.environ["url"]
|
||||||
data = simplejson.load(urllib.urlopen(url))["used"]
|
data = json.load(urllib.urlopen(url))["used"]
|
||||||
print "disk_used.value", data
|
print "disk_used.value", data
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
# left before the grid fills up. The plugin should be configured with
|
# left before the grid fills up. The plugin should be configured with
|
||||||
# env_url= pointing at the diskwatcher.tac webport.
|
# env_url= pointing at the diskwatcher.tac webport.
|
||||||
|
|
||||||
import os, sys, urllib, simplejson
|
import os, sys, urllib, json
|
||||||
|
|
||||||
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
||||||
print """\
|
print """\
|
||||||
@ -24,7 +24,7 @@ days_4wk.draw LINE2"""
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
url = os.environ["url"]
|
url = os.environ["url"]
|
||||||
timespans = simplejson.load(urllib.urlopen(url))["rates"]
|
timespans = json.load(urllib.urlopen(url))["rates"]
|
||||||
|
|
||||||
data = dict([(name, timeleft)
|
data = dict([(name, timeleft)
|
||||||
for (name, timespan, growth, timeleft) in timespans
|
for (name, timespan, growth, timeleft) in timespans
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
configinfo = """\
|
configinfo = """\
|
||||||
graph_title Tahoe Helper Stats - Active Files
|
graph_title Tahoe Helper Stats - Active Files
|
||||||
@ -20,6 +20,6 @@ if len(sys.argv) > 1:
|
|||||||
|
|
||||||
url = os.environ["url"]
|
url = os.environ["url"]
|
||||||
|
|
||||||
data = simplejson.loads(urllib.urlopen(url).read())
|
data = json.loads(urllib.urlopen(url).read())
|
||||||
print "fetched.value %d" % data["chk_upload_helper.active_uploads"]
|
print "fetched.value %d" % data["chk_upload_helper.active_uploads"]
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
configinfo = """\
|
configinfo = """\
|
||||||
graph_title Tahoe Helper Stats - Bytes Fetched
|
graph_title Tahoe Helper Stats - Bytes Fetched
|
||||||
@ -22,5 +22,5 @@ if len(sys.argv) > 1:
|
|||||||
|
|
||||||
url = os.environ["url"]
|
url = os.environ["url"]
|
||||||
|
|
||||||
data = simplejson.loads(urllib.urlopen(url).read())
|
data = json.loads(urllib.urlopen(url).read())
|
||||||
print "fetched.value %d" % data["chk_upload_helper.fetched_bytes"]
|
print "fetched.value %d" % data["chk_upload_helper.fetched_bytes"]
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
configinfo = """\
|
configinfo = """\
|
||||||
graph_title Tahoe Introducer Stats
|
graph_title Tahoe Introducer Stats
|
||||||
@ -24,7 +24,7 @@ if len(sys.argv) > 1:
|
|||||||
|
|
||||||
url = os.environ["url"]
|
url = os.environ["url"]
|
||||||
|
|
||||||
data = simplejson.loads(urllib.urlopen(url).read())
|
data = json.loads(urllib.urlopen(url).read())
|
||||||
print "storage_server.value %d" % data["announcement_summary"]["storage"]
|
print "storage_server.value %d" % data["announcement_summary"]["storage"]
|
||||||
print "storage_hosts.value %d" % data["announcement_distinct_hosts"]["storage"]
|
print "storage_hosts.value %d" % data["announcement_distinct_hosts"]["storage"]
|
||||||
print "storage_client.value %d" % data["subscription_summary"]["storage"]
|
print "storage_client.value %d" % data["subscription_summary"]["storage"]
|
||||||
|
@ -27,7 +27,7 @@
|
|||||||
# This plugin should be configured with env_diskwatcher_url= pointing at the
|
# This plugin should be configured with env_diskwatcher_url= pointing at the
|
||||||
# diskwatcher.tac webport, and env_deepsize_url= pointing at the PHP script.
|
# diskwatcher.tac webport, and env_deepsize_url= pointing at the PHP script.
|
||||||
|
|
||||||
import os, sys, urllib, simplejson
|
import os, sys, urllib, json
|
||||||
|
|
||||||
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
if len(sys.argv) > 1 and sys.argv[1] == "config":
|
||||||
print """\
|
print """\
|
||||||
@ -44,9 +44,9 @@ effective_expansion.graph no"""
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
diskwatcher_url = os.environ["diskwatcher_url"]
|
diskwatcher_url = os.environ["diskwatcher_url"]
|
||||||
total = simplejson.load(urllib.urlopen(diskwatcher_url))["used"]
|
total = json.load(urllib.urlopen(diskwatcher_url))["used"]
|
||||||
deepsize_url = os.environ["deepsize_url"]
|
deepsize_url = os.environ["deepsize_url"]
|
||||||
deepsize = simplejson.load(urllib.urlopen(deepsize_url))
|
deepsize = json.load(urllib.urlopen(deepsize_url))
|
||||||
k = 3; N = 10
|
k = 3; N = 10
|
||||||
expansion = float(N) / k
|
expansion = float(N) / k
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
configinfo = """\
|
configinfo = """\
|
||||||
graph_title Tahoe Root Directory Size
|
graph_title Tahoe Root Directory Size
|
||||||
|
@ -44,7 +44,7 @@
|
|||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
node_urls = []
|
node_urls = []
|
||||||
for k,v in os.environ.items():
|
for k,v in os.environ.items():
|
||||||
@ -82,7 +82,7 @@ if len(sys.argv) > 1:
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
for nodename, url in node_urls:
|
for nodename, url in node_urls:
|
||||||
data = simplejson.loads(urllib.urlopen(url).read())
|
data = json.loads(urllib.urlopen(url).read())
|
||||||
if percentile == "mean":
|
if percentile == "mean":
|
||||||
p_key = "mean"
|
p_key = "mean"
|
||||||
else:
|
else:
|
||||||
|
@ -34,7 +34,7 @@
|
|||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
node_urls = []
|
node_urls = []
|
||||||
for k,v in os.environ.items():
|
for k,v in os.environ.items():
|
||||||
@ -68,7 +68,7 @@ if len(sys.argv) > 1:
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
for nodename, url in node_urls:
|
for nodename, url in node_urls:
|
||||||
data = simplejson.loads(urllib.urlopen(url).read())
|
data = json.loads(urllib.urlopen(url).read())
|
||||||
key = "storage_server.%s" % operation
|
key = "storage_server.%s" % operation
|
||||||
value = data["counters"][key]
|
value = data["counters"][key]
|
||||||
print "%s.value %s" % (nodename, value)
|
print "%s.value %s" % (nodename, value)
|
||||||
|
@ -31,7 +31,7 @@ from twisted.application import internet, service, strports
|
|||||||
from twisted.web import server, resource, http, client
|
from twisted.web import server, resource, http, client
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
import simplejson
|
import json
|
||||||
from axiom.attributes import AND
|
from axiom.attributes import AND
|
||||||
from axiom.store import Store
|
from axiom.store import Store
|
||||||
from epsilon import extime
|
from epsilon import extime
|
||||||
@ -151,7 +151,7 @@ class DiskWatcher(service.MultiService, resource.Resource):
|
|||||||
log.msg("fetched %d of %d" % (fetched, attempts))
|
log.msg("fetched %d of %d" % (fetched, attempts))
|
||||||
|
|
||||||
def got_response(self, data_json, when, url):
|
def got_response(self, data_json, when, url):
|
||||||
data = simplejson.loads(data_json)
|
data = json.loads(data_json)
|
||||||
total = data[u"stats"][u"storage_server.disk_total"]
|
total = data[u"stats"][u"storage_server.disk_total"]
|
||||||
used = data[u"stats"][u"storage_server.disk_used"]
|
used = data[u"stats"][u"storage_server.disk_used"]
|
||||||
avail = data[u"stats"][u"storage_server.disk_avail"]
|
avail = data[u"stats"][u"storage_server.disk_avail"]
|
||||||
@ -374,7 +374,7 @@ class DiskWatcher(service.MultiService, resource.Resource):
|
|||||||
"used": self.find_total_used_space(),
|
"used": self.find_total_used_space(),
|
||||||
"available": self.find_total_available_space(),
|
"available": self.find_total_available_space(),
|
||||||
}
|
}
|
||||||
data = simplejson.dumps(current, indent=True)
|
data = json.dumps(current, indent=True)
|
||||||
else:
|
else:
|
||||||
req.setResponseCode(http.BAD_REQUEST)
|
req.setResponseCode(http.BAD_REQUEST)
|
||||||
data = "Unknown t= %s\n" % t
|
data = "Unknown t= %s\n" % t
|
||||||
|
@ -24,9 +24,6 @@ install_requires = [
|
|||||||
|
|
||||||
"zfec >= 1.1.0",
|
"zfec >= 1.1.0",
|
||||||
|
|
||||||
# Feisty has simplejson 1.4
|
|
||||||
"simplejson >= 1.4",
|
|
||||||
|
|
||||||
# zope.interface >= 3.6.0 is required for Twisted >= 12.1.0.
|
# zope.interface >= 3.6.0 is required for Twisted >= 12.1.0.
|
||||||
# zope.interface 3.6.3 and 3.6.4 are incompatible with Nevow (#1435).
|
# zope.interface 3.6.3 and 3.6.4 are incompatible with Nevow (#1435).
|
||||||
"zope.interface >= 3.6.0, != 3.6.3, != 3.6.4",
|
"zope.interface >= 3.6.0, != 3.6.3, != 3.6.4",
|
||||||
@ -108,7 +105,6 @@ package_imports = [
|
|||||||
('platform', None),
|
('platform', None),
|
||||||
('pyOpenSSL', 'OpenSSL'),
|
('pyOpenSSL', 'OpenSSL'),
|
||||||
('OpenSSL', None),
|
('OpenSSL', None),
|
||||||
('simplejson', 'simplejson'),
|
|
||||||
('pycrypto', 'Crypto'),
|
('pycrypto', 'Crypto'),
|
||||||
('pyasn1', 'pyasn1'),
|
('pyasn1', 'pyasn1'),
|
||||||
('service-identity', 'service_identity'),
|
('service-identity', 'service_identity'),
|
||||||
|
@ -4,7 +4,7 @@ import time, unicodedata
|
|||||||
from zope.interface import implements
|
from zope.interface import implements
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from foolscap.api import fireEventually
|
from foolscap.api import fireEventually
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
from allmydata.deep_stats import DeepStats
|
from allmydata.deep_stats import DeepStats
|
||||||
from allmydata.mutable.common import NotWriteableError
|
from allmydata.mutable.common import NotWriteableError
|
||||||
@ -252,7 +252,7 @@ def _pack_normalized_children(children, writekey, deep_immutable=False):
|
|||||||
entry = "".join([netstring(name.encode("utf-8")),
|
entry = "".join([netstring(name.encode("utf-8")),
|
||||||
netstring(strip_prefix_for_ro(ro_uri, deep_immutable)),
|
netstring(strip_prefix_for_ro(ro_uri, deep_immutable)),
|
||||||
writecap,
|
writecap,
|
||||||
netstring(simplejson.dumps(metadata))])
|
netstring(json.dumps(metadata))])
|
||||||
entries.append(netstring(entry))
|
entries.append(netstring(entry))
|
||||||
return "".join(entries)
|
return "".join(entries)
|
||||||
|
|
||||||
@ -361,7 +361,7 @@ class DirectoryNode:
|
|||||||
try:
|
try:
|
||||||
child = self._create_and_validate_node(rw_uri, ro_uri, name)
|
child = self._create_and_validate_node(rw_uri, ro_uri, name)
|
||||||
if mutable or child.is_allowed_in_immutable_directory():
|
if mutable or child.is_allowed_in_immutable_directory():
|
||||||
metadata = simplejson.loads(metadata_s)
|
metadata = json.loads(metadata_s)
|
||||||
assert isinstance(metadata, dict)
|
assert isinstance(metadata, dict)
|
||||||
children[name] = (child, metadata)
|
children[name] = (child, metadata)
|
||||||
children.set_with_aux(name, (child, metadata), auxilliary=entry)
|
children.set_with_aux(name, (child, metadata), auxilliary=entry)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
|
|
||||||
import re, simplejson
|
import re
|
||||||
|
import json
|
||||||
from allmydata.util import keyutil, base32, rrefutil
|
from allmydata.util import keyutil, base32, rrefutil
|
||||||
|
|
||||||
def get_tubid_string_from_ann(ann):
|
def get_tubid_string_from_ann(ann):
|
||||||
@ -16,7 +17,7 @@ def sign_to_foolscap(ann, sk):
|
|||||||
# return (bytes, sig-str, pubkey-str). A future HTTP-based serialization
|
# return (bytes, sig-str, pubkey-str). A future HTTP-based serialization
|
||||||
# will use JSON({msg:b64(JSON(msg).utf8), sig:v0-b64(sig),
|
# will use JSON({msg:b64(JSON(msg).utf8), sig:v0-b64(sig),
|
||||||
# pubkey:v0-b64(pubkey)}) .
|
# pubkey:v0-b64(pubkey)}) .
|
||||||
msg = simplejson.dumps(ann).encode("utf-8")
|
msg = json.dumps(ann).encode("utf-8")
|
||||||
sig = "v0-"+base32.b2a(sk.sign(msg))
|
sig = "v0-"+base32.b2a(sk.sign(msg))
|
||||||
vk_bytes = sk.get_verifying_key_bytes()
|
vk_bytes = sk.get_verifying_key_bytes()
|
||||||
ann_t = (msg, sig, "v0-"+base32.b2a(vk_bytes))
|
ann_t = (msg, sig, "v0-"+base32.b2a(vk_bytes))
|
||||||
@ -37,7 +38,7 @@ def unsign_from_foolscap(ann_t):
|
|||||||
sig_bytes = base32.a2b(keyutil.remove_prefix(sig_vs, "v0-"))
|
sig_bytes = base32.a2b(keyutil.remove_prefix(sig_vs, "v0-"))
|
||||||
claimed_key.verify(sig_bytes, msg)
|
claimed_key.verify(sig_bytes, msg)
|
||||||
key_vs = claimed_key_vs
|
key_vs = claimed_key_vs
|
||||||
ann = simplejson.loads(msg.decode("utf-8"))
|
ann = json.loads(msg.decode("utf-8"))
|
||||||
return (ann, key_vs)
|
return (ann, key_vs)
|
||||||
|
|
||||||
class SubscriberDescriptor:
|
class SubscriberDescriptor:
|
||||||
|
@ -5,8 +5,8 @@ from sys import stderr
|
|||||||
from types import NoneType
|
from types import NoneType
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
import simplejson
|
|
||||||
|
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
|
|
||||||
@ -265,7 +265,7 @@ def _get_json_for_fragment(options, fragment, method='GET', post_args=None):
|
|||||||
)
|
)
|
||||||
|
|
||||||
data = resp.read()
|
data = resp.read()
|
||||||
parsed = simplejson.loads(data)
|
parsed = json.loads(data)
|
||||||
if parsed is None:
|
if parsed is None:
|
||||||
raise RuntimeError("No data from '%s'" % (nodeurl,))
|
raise RuntimeError("No data from '%s'" % (nodeurl,))
|
||||||
return parsed
|
return parsed
|
||||||
|
@ -6,7 +6,7 @@ from allmydata.scripts.common_http import do_http, format_http_error
|
|||||||
from allmydata.util import base32
|
from allmydata.util import base32
|
||||||
from allmydata.util.encodingutil import quote_output, is_printable_ascii
|
from allmydata.util.encodingutil import quote_output, is_printable_ascii
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
class SlowOperationRunner:
|
class SlowOperationRunner:
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ class SlowOperationRunner:
|
|||||||
print >>stderr, format_http_error("ERROR", resp)
|
print >>stderr, format_http_error("ERROR", resp)
|
||||||
return True
|
return True
|
||||||
jdata = resp.read()
|
jdata = resp.read()
|
||||||
data = simplejson.loads(jdata)
|
data = json.loads(jdata)
|
||||||
if not data["finished"]:
|
if not data["finished"]:
|
||||||
return False
|
return False
|
||||||
if self.options.get("raw"):
|
if self.options.get("raw"):
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
import os.path
|
import os.path
|
||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
import datetime
|
import datetime
|
||||||
from allmydata.scripts.common import get_alias, escape_path, DEFAULT_ALIAS, \
|
from allmydata.scripts.common import get_alias, escape_path, DEFAULT_ALIAS, \
|
||||||
UnknownAliasError
|
UnknownAliasError
|
||||||
@ -40,7 +40,7 @@ def mkdir(contents, options):
|
|||||||
}))
|
}))
|
||||||
for childname in contents
|
for childname in contents
|
||||||
])
|
])
|
||||||
body = simplejson.dumps(kids).encode("utf-8")
|
body = json.dumps(kids).encode("utf-8")
|
||||||
url = options['node-url'] + "uri?t=mkdir-immutable"
|
url = options['node-url'] + "uri?t=mkdir-immutable"
|
||||||
resp = do_http("POST", url, body)
|
resp = do_http("POST", url, body)
|
||||||
if resp.status < 200 or resp.status >= 300:
|
if resp.status < 200 or resp.status >= 300:
|
||||||
@ -245,7 +245,7 @@ class BackerUpper:
|
|||||||
# can't check, so we must assume it's bad
|
# can't check, so we must assume it's bad
|
||||||
return True, r
|
return True, r
|
||||||
|
|
||||||
cr = simplejson.loads(resp.read())
|
cr = json.loads(resp.read())
|
||||||
healthy = cr["results"]["healthy"]
|
healthy = cr["results"]["healthy"]
|
||||||
if not healthy:
|
if not healthy:
|
||||||
# must upload
|
# must upload
|
||||||
@ -278,7 +278,7 @@ class BackerUpper:
|
|||||||
# can't check, so we must assume it's bad
|
# can't check, so we must assume it's bad
|
||||||
return True, r
|
return True, r
|
||||||
|
|
||||||
cr = simplejson.loads(resp.read())
|
cr = json.loads(resp.read())
|
||||||
healthy = cr["results"]["healthy"]
|
healthy = cr["results"]["healthy"]
|
||||||
if not healthy:
|
if not healthy:
|
||||||
# must create
|
# must create
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
from twisted.protocols.basic import LineOnlyReceiver
|
from twisted.protocols.basic import LineOnlyReceiver
|
||||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||||
UnknownAliasError
|
UnknownAliasError
|
||||||
@ -49,7 +49,7 @@ def check_location(options, where):
|
|||||||
stdout.write(jdata)
|
stdout.write(jdata)
|
||||||
stdout.write("\n")
|
stdout.write("\n")
|
||||||
return 0
|
return 0
|
||||||
data = simplejson.loads(jdata)
|
data = json.loads(jdata)
|
||||||
|
|
||||||
if options["repair"]:
|
if options["repair"]:
|
||||||
# show repair status
|
# show repair status
|
||||||
@ -137,7 +137,7 @@ class DeepCheckOutput(LineOnlyReceiver):
|
|||||||
print >>self.stderr, quote_output(line, quotemarks=False)
|
print >>self.stderr, quote_output(line, quotemarks=False)
|
||||||
return
|
return
|
||||||
|
|
||||||
d = simplejson.loads(line)
|
d = json.loads(line)
|
||||||
stdout = self.stdout
|
stdout = self.stdout
|
||||||
if d["type"] not in ("file", "directory"):
|
if d["type"] not in ("file", "directory"):
|
||||||
return
|
return
|
||||||
@ -200,7 +200,7 @@ class DeepCheckAndRepairOutput(LineOnlyReceiver):
|
|||||||
print >>self.stderr, quote_output(line, quotemarks=False)
|
print >>self.stderr, quote_output(line, quotemarks=False)
|
||||||
return
|
return
|
||||||
|
|
||||||
d = simplejson.loads(line)
|
d = json.loads(line)
|
||||||
stdout = self.stdout
|
stdout = self.stdout
|
||||||
if d["type"] not in ("file", "directory"):
|
if d["type"] not in ("file", "directory"):
|
||||||
return
|
return
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
@ -242,7 +242,7 @@ class TahoeDirectorySource:
|
|||||||
resp = do_http("GET", url + "?t=json")
|
resp = do_http("GET", url + "?t=json")
|
||||||
if resp.status != 200:
|
if resp.status != 200:
|
||||||
raise HTTPError("Error examining source directory", resp)
|
raise HTTPError("Error examining source directory", resp)
|
||||||
parsed = simplejson.loads(resp.read())
|
parsed = json.loads(resp.read())
|
||||||
nodetype, d = parsed
|
nodetype, d = parsed
|
||||||
assert nodetype == "dirnode"
|
assert nodetype == "dirnode"
|
||||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||||
@ -339,7 +339,7 @@ class TahoeDirectoryTarget:
|
|||||||
resp = do_http("GET", url + "?t=json")
|
resp = do_http("GET", url + "?t=json")
|
||||||
if resp.status != 200:
|
if resp.status != 200:
|
||||||
raise HTTPError("Error examining target directory", resp)
|
raise HTTPError("Error examining target directory", resp)
|
||||||
parsed = simplejson.loads(resp.read())
|
parsed = json.loads(resp.read())
|
||||||
nodetype, d = parsed
|
nodetype, d = parsed
|
||||||
assert nodetype == "dirnode"
|
assert nodetype == "dirnode"
|
||||||
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
self.mutable = d.get("mutable", False) # older nodes don't provide it
|
||||||
@ -449,7 +449,7 @@ class TahoeDirectoryTarget:
|
|||||||
# TODO: think about how this affects forward-compatibility for
|
# TODO: think about how this affects forward-compatibility for
|
||||||
# unknown caps
|
# unknown caps
|
||||||
set_data[name] = ["filenode", {"rw_uri": filecap}]
|
set_data[name] = ["filenode", {"rw_uri": filecap}]
|
||||||
body = simplejson.dumps(set_data)
|
body = json.dumps(set_data)
|
||||||
POST(url, body)
|
POST(url, body)
|
||||||
|
|
||||||
FileSources = (LocalFileSource, TahoeFileSource)
|
FileSources = (LocalFileSource, TahoeFileSource)
|
||||||
@ -611,7 +611,7 @@ class Copier:
|
|||||||
# doesn't exist yet
|
# doesn't exist yet
|
||||||
t = TahoeMissingTarget(url)
|
t = TahoeMissingTarget(url)
|
||||||
elif resp.status == 200:
|
elif resp.status == 200:
|
||||||
parsed = simplejson.loads(resp.read())
|
parsed = json.loads(resp.read())
|
||||||
nodetype, d = parsed
|
nodetype, d = parsed
|
||||||
if nodetype == "dirnode":
|
if nodetype == "dirnode":
|
||||||
t = TahoeDirectoryTarget(self.nodeurl, self.cache,
|
t = TahoeDirectoryTarget(self.nodeurl, self.cache,
|
||||||
@ -672,7 +672,7 @@ class Copier:
|
|||||||
elif resp.status != 200:
|
elif resp.status != 200:
|
||||||
raise HTTPError("Error examining source %s" % quote_output(source_spec),
|
raise HTTPError("Error examining source %s" % quote_output(source_spec),
|
||||||
resp)
|
resp)
|
||||||
parsed = simplejson.loads(resp.read())
|
parsed = json.loads(resp.read())
|
||||||
nodetype, d = parsed
|
nodetype, d = parsed
|
||||||
if nodetype == "dirnode":
|
if nodetype == "dirnode":
|
||||||
t = TahoeDirectorySource(self.nodeurl, self.cache,
|
t = TahoeDirectorySource(self.nodeurl, self.cache,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import urllib, time
|
import urllib, time
|
||||||
import simplejson
|
import json
|
||||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||||
UnknownAliasError
|
UnknownAliasError
|
||||||
from allmydata.scripts.common_http import do_http, format_http_error
|
from allmydata.scripts.common_http import do_http, format_http_error
|
||||||
@ -52,7 +52,7 @@ def list(options):
|
|||||||
return 1
|
return 1
|
||||||
|
|
||||||
try:
|
try:
|
||||||
parsed = simplejson.loads(data)
|
parsed = json.loads(data)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
print >>stderr, "error: %s" % quote_output(e.args[0], quotemarks=False)
|
print >>stderr, "error: %s" % quote_output(e.args[0], quotemarks=False)
|
||||||
print >>stderr, "Could not parse JSON response:"
|
print >>stderr, "Could not parse JSON response:"
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import urllib, simplejson
|
import urllib, json
|
||||||
from twisted.protocols.basic import LineOnlyReceiver
|
from twisted.protocols.basic import LineOnlyReceiver
|
||||||
from allmydata.util.abbreviate import abbreviate_space_both
|
from allmydata.util.abbreviate import abbreviate_space_both
|
||||||
from allmydata.scripts.slow_operation import SlowOperationRunner
|
from allmydata.scripts.slow_operation import SlowOperationRunner
|
||||||
@ -69,7 +69,7 @@ class ManifestStreamer(LineOnlyReceiver):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
d = simplejson.loads(line.decode('utf-8'))
|
d = json.loads(line.decode('utf-8'))
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
print >>stderr, "ERROR could not decode/parse %s\nERROR %r" % (quote_output(line), e)
|
print >>stderr, "ERROR could not decode/parse %s\nERROR %r" % (quote_output(line), e)
|
||||||
else:
|
else:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import urllib
|
import urllib
|
||||||
import simplejson
|
import json
|
||||||
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
|
||||||
UnknownAliasError
|
UnknownAliasError
|
||||||
from allmydata.scripts.common_http import do_http, format_http_error
|
from allmydata.scripts.common_http import do_http, format_http_error
|
||||||
@ -33,7 +33,7 @@ def mv(options, mode="move"):
|
|||||||
print >>stderr, format_http_error("Error", resp)
|
print >>stderr, format_http_error("Error", resp)
|
||||||
return 1
|
return 1
|
||||||
data = resp.read()
|
data = resp.read()
|
||||||
nodetype, attrs = simplejson.loads(data)
|
nodetype, attrs = json.loads(data)
|
||||||
cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"])
|
cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"])
|
||||||
|
|
||||||
# now get the target
|
# now get the target
|
||||||
|
@ -33,7 +33,7 @@ a mean of 10kB and a max of 100MB, so filesize=min(int(1.0/random(.0002)),1e8)
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import os, sys, httplib, binascii
|
import os, sys, httplib, binascii
|
||||||
import urllib, simplejson, random, time, urlparse
|
import urllib, json, random, time, urlparse
|
||||||
|
|
||||||
if sys.argv[1] == "--stats":
|
if sys.argv[1] == "--stats":
|
||||||
statsfiles = sys.argv[2:]
|
statsfiles = sys.argv[2:]
|
||||||
@ -105,7 +105,7 @@ def listdir(nodeurl, root, remote_pathname):
|
|||||||
url += "?t=json"
|
url += "?t=json"
|
||||||
data = urllib.urlopen(url).read()
|
data = urllib.urlopen(url).read()
|
||||||
try:
|
try:
|
||||||
parsed = simplejson.loads(data)
|
parsed = json.loads(data)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print "URL was", url
|
print "URL was", url
|
||||||
print "DATA was", data
|
print "DATA was", data
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import os.path
|
import os.path
|
||||||
import simplejson
|
import json
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
|
|
||||||
@ -40,7 +40,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
def _check2((rc, out, err)):
|
def _check2((rc, out, err)):
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.failUnlessReallyEqual(err, "")
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
data = simplejson.loads(out)
|
data = json.loads(out)
|
||||||
self.failUnlessReallyEqual(to_str(data["summary"]), "Healthy")
|
self.failUnlessReallyEqual(to_str(data["summary"]), "Healthy")
|
||||||
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
||||||
d.addCallback(_check2)
|
d.addCallback(_check2)
|
||||||
@ -62,7 +62,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
def _check_lit_raw((rc, out, err)):
|
def _check_lit_raw((rc, out, err)):
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.failUnlessReallyEqual(err, "")
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
data = simplejson.loads(out)
|
data = json.loads(out)
|
||||||
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
self.failUnlessReallyEqual(data["results"]["healthy"], True)
|
||||||
d.addCallback(_check_lit_raw)
|
d.addCallback(_check_lit_raw)
|
||||||
|
|
||||||
@ -110,7 +110,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
def _check3_raw((rc, out, err)):
|
def _check3_raw((rc, out, err)):
|
||||||
self.failUnlessReallyEqual(err, "")
|
self.failUnlessReallyEqual(err, "")
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
data = simplejson.loads(out)
|
data = json.loads(out)
|
||||||
self.failUnlessReallyEqual(data["results"]["healthy"], False)
|
self.failUnlessReallyEqual(data["results"]["healthy"], False)
|
||||||
self.failUnlessIn("Unhealthy: 8 shares (enc 3-of-10)", data["summary"])
|
self.failUnlessIn("Unhealthy: 8 shares (enc 3-of-10)", data["summary"])
|
||||||
self.failUnlessReallyEqual(data["results"]["count-shares-good"], 8)
|
self.failUnlessReallyEqual(data["results"]["count-shares-good"], 8)
|
||||||
@ -278,7 +278,7 @@ class Check(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.failUnlessReallyEqual(err, "")
|
self.failUnlessReallyEqual(err, "")
|
||||||
self.failUnlessReallyEqual(rc, 0)
|
self.failUnlessReallyEqual(rc, 0)
|
||||||
lines = out.splitlines()
|
lines = out.splitlines()
|
||||||
units = [simplejson.loads(line) for line in lines]
|
units = [json.loads(line) for line in lines]
|
||||||
# root, small, g\u00F6\u00F6d, mutable, stats
|
# root, small, g\u00F6\u00F6d, mutable, stats
|
||||||
self.failUnlessReallyEqual(len(units), 4+1)
|
self.failUnlessReallyEqual(len(units), 4+1)
|
||||||
d.addCallback(_check5)
|
d.addCallback(_check5)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import os.path, simplejson
|
import os.path, json
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.python import usage
|
from twisted.python import usage
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -254,7 +254,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.do_cli("ls", "--json", "tahoe:test/test.txt"))
|
self.do_cli("ls", "--json", "tahoe:test/test.txt"))
|
||||||
def _get_test_txt_uris((rc, out, err)):
|
def _get_test_txt_uris((rc, out, err)):
|
||||||
self.failUnlessEqual(rc, 0)
|
self.failUnlessEqual(rc, 0)
|
||||||
filetype, data = simplejson.loads(out)
|
filetype, data = json.loads(out)
|
||||||
|
|
||||||
self.failUnlessEqual(filetype, "filenode")
|
self.failUnlessEqual(filetype, "filenode")
|
||||||
self.failUnless(data['mutable'])
|
self.failUnless(data['mutable'])
|
||||||
@ -287,7 +287,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.do_cli("ls", "--json", "tahoe:test/test.txt"))
|
self.do_cli("ls", "--json", "tahoe:test/test.txt"))
|
||||||
def _check_json((rc, out, err)):
|
def _check_json((rc, out, err)):
|
||||||
self.failUnlessEqual(rc, 0)
|
self.failUnlessEqual(rc, 0)
|
||||||
filetype, data = simplejson.loads(out)
|
filetype, data = json.loads(out)
|
||||||
|
|
||||||
self.failUnlessEqual(filetype, "filenode")
|
self.failUnlessEqual(filetype, "filenode")
|
||||||
self.failUnless(data['mutable'])
|
self.failUnless(data['mutable'])
|
||||||
@ -347,7 +347,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
def _process_directory_json((rc, out, err)):
|
def _process_directory_json((rc, out, err)):
|
||||||
self.failUnlessEqual(rc, 0)
|
self.failUnlessEqual(rc, 0)
|
||||||
|
|
||||||
filetype, data = simplejson.loads(out)
|
filetype, data = json.loads(out)
|
||||||
self.failUnlessEqual(filetype, "dirnode")
|
self.failUnlessEqual(filetype, "dirnode")
|
||||||
self.failUnless(data['mutable'])
|
self.failUnless(data['mutable'])
|
||||||
self.failUnlessIn("children", data)
|
self.failUnlessIn("children", data)
|
||||||
@ -389,7 +389,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
# so they'll retain their URIs but have different content.
|
# so they'll retain their URIs but have different content.
|
||||||
def _process_file_json((rc, out, err), fn):
|
def _process_file_json((rc, out, err), fn):
|
||||||
self.failUnlessEqual(rc, 0)
|
self.failUnlessEqual(rc, 0)
|
||||||
filetype, data = simplejson.loads(out)
|
filetype, data = json.loads(out)
|
||||||
self.failUnlessEqual(filetype, "filenode")
|
self.failUnlessEqual(filetype, "filenode")
|
||||||
|
|
||||||
if "mutable" in fn:
|
if "mutable" in fn:
|
||||||
@ -436,7 +436,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.do_cli("ls", "--json", "tahoe:test2/imm2"))
|
self.do_cli("ls", "--json", "tahoe:test2/imm2"))
|
||||||
def _process_imm2_json((rc, out, err)):
|
def _process_imm2_json((rc, out, err)):
|
||||||
self.failUnlessEqual(rc, 0)
|
self.failUnlessEqual(rc, 0)
|
||||||
filetype, data = simplejson.loads(out)
|
filetype, data = json.loads(out)
|
||||||
self.failUnlessEqual(filetype, "filenode")
|
self.failUnlessEqual(filetype, "filenode")
|
||||||
self.failIf(data['mutable'])
|
self.failIf(data['mutable'])
|
||||||
self.failUnlessIn("ro_uri", data)
|
self.failUnlessIn("ro_uri", data)
|
||||||
@ -474,7 +474,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
self.do_cli("ls", "--json", self._test_write_uri))
|
self.do_cli("ls", "--json", self._test_write_uri))
|
||||||
def _process_test_json((rc, out, err)):
|
def _process_test_json((rc, out, err)):
|
||||||
self.failUnlessEqual(rc, 0)
|
self.failUnlessEqual(rc, 0)
|
||||||
filetype, data = simplejson.loads(out)
|
filetype, data = json.loads(out)
|
||||||
|
|
||||||
self.failUnlessEqual(filetype, "filenode")
|
self.failUnlessEqual(filetype, "filenode")
|
||||||
self.failUnless(data['mutable'])
|
self.failUnless(data['mutable'])
|
||||||
@ -492,7 +492,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
def _process_tahoe_json((rc, out, err)):
|
def _process_tahoe_json((rc, out, err)):
|
||||||
self.failUnlessEqual(rc, 0)
|
self.failUnlessEqual(rc, 0)
|
||||||
|
|
||||||
filetype, data = simplejson.loads(out)
|
filetype, data = json.loads(out)
|
||||||
self.failUnlessEqual(filetype, "dirnode")
|
self.failUnlessEqual(filetype, "dirnode")
|
||||||
self.failUnlessIn("children", data)
|
self.failUnlessIn("children", data)
|
||||||
kiddata = data['children']
|
kiddata = data['children']
|
||||||
@ -557,7 +557,7 @@ class Cp(GridTestMixin, CLITestMixin, unittest.TestCase):
|
|||||||
def _got_testdir_json((rc, out, err)):
|
def _got_testdir_json((rc, out, err)):
|
||||||
self.failUnlessEqual(rc, 0)
|
self.failUnlessEqual(rc, 0)
|
||||||
|
|
||||||
filetype, data = simplejson.loads(out)
|
filetype, data = json.loads(out)
|
||||||
self.failUnlessEqual(filetype, "dirnode")
|
self.failUnlessEqual(filetype, "dirnode")
|
||||||
|
|
||||||
self.failUnlessIn("children", data)
|
self.failUnlessIn("children", data)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import simplejson
|
import json
|
||||||
import os.path, shutil
|
import os.path, shutil
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -69,8 +69,8 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
|||||||
html)
|
html)
|
||||||
d.addCallback(_check_return_to)
|
d.addCallback(_check_return_to)
|
||||||
d.addCallback(lambda ignored: self.render_json(lcr))
|
d.addCallback(lambda ignored: self.render_json(lcr))
|
||||||
def _check_json(json):
|
def _check_json(js):
|
||||||
j = simplejson.loads(json)
|
j = json.loads(js)
|
||||||
self.failUnlessEqual(j["storage-index"], "")
|
self.failUnlessEqual(j["storage-index"], "")
|
||||||
self.failUnlessEqual(j["results"]["healthy"], True)
|
self.failUnlessEqual(j["results"]["healthy"], True)
|
||||||
d.addCallback(_check_json)
|
d.addCallback(_check_json)
|
||||||
@ -152,7 +152,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
|||||||
|
|
||||||
d = self.render_json(w)
|
d = self.render_json(w)
|
||||||
def _check_json(jdata):
|
def _check_json(jdata):
|
||||||
j = simplejson.loads(jdata)
|
j = json.loads(jdata)
|
||||||
self.failUnlessEqual(j["summary"], "rather dead")
|
self.failUnlessEqual(j["summary"], "rather dead")
|
||||||
self.failUnlessEqual(j["storage-index"],
|
self.failUnlessEqual(j["storage-index"],
|
||||||
"2k6avpjga3dho3zsjo6nnkt7n4")
|
"2k6avpjga3dho3zsjo6nnkt7n4")
|
||||||
@ -292,7 +292,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
|||||||
|
|
||||||
d = self.render_json(w)
|
d = self.render_json(w)
|
||||||
def _got_json(data):
|
def _got_json(data):
|
||||||
j = simplejson.loads(data)
|
j = json.loads(data)
|
||||||
self.failUnlessEqual(j["repair-attempted"], True)
|
self.failUnlessEqual(j["repair-attempted"], True)
|
||||||
self.failUnlessEqual(j["storage-index"],
|
self.failUnlessEqual(j["storage-index"],
|
||||||
"2k6avpjga3dho3zsjo6nnkt7n4")
|
"2k6avpjga3dho3zsjo6nnkt7n4")
|
||||||
@ -303,7 +303,7 @@ class WebResultsRendering(unittest.TestCase, WebRenderingMixin):
|
|||||||
w2 = web_check_results.CheckAndRepairResultsRenderer(c, None)
|
w2 = web_check_results.CheckAndRepairResultsRenderer(c, None)
|
||||||
d.addCallback(lambda ignored: self.render_json(w2))
|
d.addCallback(lambda ignored: self.render_json(w2))
|
||||||
def _got_lit_results(data):
|
def _got_lit_results(data):
|
||||||
j = simplejson.loads(data)
|
j = json.loads(data)
|
||||||
self.failUnlessEqual(j["repair-attempted"], False)
|
self.failUnlessEqual(j["repair-attempted"], False)
|
||||||
self.failUnlessEqual(j["storage-index"], "")
|
self.failUnlessEqual(j["storage-index"], "")
|
||||||
d.addCallback(_got_lit_results)
|
d.addCallback(_got_lit_results)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import os, simplejson, urllib
|
import os, json, urllib
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from allmydata.immutable import upload
|
from allmydata.immutable import upload
|
||||||
@ -132,7 +132,7 @@ class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin,
|
|||||||
|
|
||||||
def decode_json(self, (s,url)):
|
def decode_json(self, (s,url)):
|
||||||
try:
|
try:
|
||||||
data = simplejson.loads(s)
|
data = json.loads(s)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.fail("%s: not JSON: '%s'" % (url, s))
|
self.fail("%s: not JSON: '%s'" % (url, s))
|
||||||
return data
|
return data
|
||||||
@ -143,7 +143,7 @@ class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin,
|
|||||||
# stream should end with a newline, so split returns ""
|
# stream should end with a newline, so split returns ""
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
yield simplejson.loads(unit)
|
yield json.loads(unit)
|
||||||
except ValueError, le:
|
except ValueError, le:
|
||||||
le.args = tuple(le.args + (unit,))
|
le.args = tuple(le.args + (unit,))
|
||||||
raise
|
raise
|
||||||
@ -162,7 +162,7 @@ class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin,
|
|||||||
d = getPage(url)
|
d = getPage(url)
|
||||||
def _got(res):
|
def _got(res):
|
||||||
try:
|
try:
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.fail("%s: not JSON: '%s'" % (url, res))
|
self.fail("%s: not JSON: '%s'" % (url, res))
|
||||||
if not data["finished"]:
|
if not data["finished"]:
|
||||||
@ -182,7 +182,7 @@ class DeepCheckBase(GridTestMixin, ErrorMixin, StallMixin, ShouldFailMixin,
|
|||||||
def _got(res):
|
def _got(res):
|
||||||
if output and output.lower() == "json":
|
if output and output.lower() == "json":
|
||||||
try:
|
try:
|
||||||
return simplejson.loads(res)
|
return json.loads(res)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.fail("%s: not JSON: '%s'" % (url, res))
|
self.fail("%s: not JSON: '%s'" % (url, res))
|
||||||
return res
|
return res
|
||||||
@ -823,7 +823,7 @@ class DeepCheckWebGood(DeepCheckBase, unittest.TestCase):
|
|||||||
def do_cli_stats2(self):
|
def do_cli_stats2(self):
|
||||||
d = self.do_cli("stats", "--raw", self.root_uri)
|
d = self.do_cli("stats", "--raw", self.root_uri)
|
||||||
def _check4((rc,out,err)):
|
def _check4((rc,out,err)):
|
||||||
data = simplejson.loads(out)
|
data = json.loads(out)
|
||||||
self.failUnlessEqual(data["count-immutable-files"], 1)
|
self.failUnlessEqual(data["count-immutable-files"], 1)
|
||||||
self.failUnlessEqual(data["count-immutable-files"], 1)
|
self.failUnlessEqual(data["count-immutable-files"], 1)
|
||||||
self.failUnlessEqual(data["count-mutable-files"], 1)
|
self.failUnlessEqual(data["count-mutable-files"], 1)
|
||||||
|
@ -407,11 +407,13 @@ class Dirnode(GridTestMixin, unittest.TestCase,
|
|||||||
def _start(res):
|
def _start(res):
|
||||||
self._start_timestamp = time.time()
|
self._start_timestamp = time.time()
|
||||||
d.addCallback(_start)
|
d.addCallback(_start)
|
||||||
# simplejson-1.7.1 (as shipped on Ubuntu 'gutsy') rounds all
|
# a long time ago, we used simplejson-1.7.1 (as shipped on Ubuntu
|
||||||
# floats to hundredeths (it uses str(num) instead of repr(num)).
|
# 'gutsy'), which had a bug/misbehavior in which it would round
|
||||||
# simplejson-1.7.3 does not have this bug. To prevent this bug
|
# all floats to hundredeths (it used str(num) instead of
|
||||||
# from causing the test to fail, stall for more than a few
|
# repr(num)). To prevent this bug from causing the test to fail,
|
||||||
# hundrededths of a second.
|
# we stall for more than a few hundrededths of a second here.
|
||||||
|
# simplejson-1.7.3 does not have this bug, and anyways we've
|
||||||
|
# moved on to stdlib "json" which doesn't have it either.
|
||||||
d.addCallback(self.stall, 0.1)
|
d.addCallback(self.stall, 0.1)
|
||||||
d.addCallback(lambda res: n.add_file(u"timestamps",
|
d.addCallback(lambda res: n.add_file(u"timestamps",
|
||||||
upload.Data("stamp me", convergence="some convergence string")))
|
upload.Data("stamp me", convergence="some convergence string")))
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
import os, re, itertools
|
import os, re, itertools
|
||||||
from base64 import b32decode
|
from base64 import b32decode
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer, address
|
from twisted.internet import defer, address
|
||||||
@ -845,7 +845,7 @@ class ClientSeqnums(unittest.TestCase):
|
|||||||
self.failUnlessEqual(outbound["sA"]["seqnum"], 1)
|
self.failUnlessEqual(outbound["sA"]["seqnum"], 1)
|
||||||
nonce1 = outbound["sA"]["nonce"]
|
nonce1 = outbound["sA"]["nonce"]
|
||||||
self.failUnless(isinstance(nonce1, str))
|
self.failUnless(isinstance(nonce1, str))
|
||||||
self.failUnlessEqual(simplejson.loads(published["sA"][0]),
|
self.failUnlessEqual(json.loads(published["sA"][0]),
|
||||||
outbound["sA"])
|
outbound["sA"])
|
||||||
# [1] is the signature, [2] is the pubkey
|
# [1] is the signature, [2] is the pubkey
|
||||||
|
|
||||||
@ -860,9 +860,9 @@ class ClientSeqnums(unittest.TestCase):
|
|||||||
nonce2 = outbound["sA"]["nonce"]
|
nonce2 = outbound["sA"]["nonce"]
|
||||||
self.failUnless(isinstance(nonce2, str))
|
self.failUnless(isinstance(nonce2, str))
|
||||||
self.failIfEqual(nonce1, nonce2)
|
self.failIfEqual(nonce1, nonce2)
|
||||||
self.failUnlessEqual(simplejson.loads(published["sA"][0]),
|
self.failUnlessEqual(json.loads(published["sA"][0]),
|
||||||
outbound["sA"])
|
outbound["sA"])
|
||||||
self.failUnlessEqual(simplejson.loads(published["sB"][0]),
|
self.failUnlessEqual(json.loads(published["sB"][0]),
|
||||||
outbound["sB"])
|
outbound["sB"])
|
||||||
|
|
||||||
|
|
||||||
@ -933,7 +933,7 @@ class Signatures(unittest.TestCase):
|
|||||||
ann_t = sign_to_foolscap(ann, sk)
|
ann_t = sign_to_foolscap(ann, sk)
|
||||||
(msg, sig, key) = ann_t
|
(msg, sig, key) = ann_t
|
||||||
self.failUnlessEqual(type(msg), type("".encode("utf-8"))) # bytes
|
self.failUnlessEqual(type(msg), type("".encode("utf-8"))) # bytes
|
||||||
self.failUnlessEqual(simplejson.loads(msg.decode("utf-8")), ann)
|
self.failUnlessEqual(json.loads(msg.decode("utf-8")), ann)
|
||||||
self.failUnless(sig.startswith("v0-"))
|
self.failUnless(sig.startswith("v0-"))
|
||||||
self.failUnless(key.startswith("v0-"))
|
self.failUnless(key.startswith("v0-"))
|
||||||
(ann2,key2) = unsign_from_foolscap(ann_t)
|
(ann2,key2) = unsign_from_foolscap(ann_t)
|
||||||
@ -947,7 +947,7 @@ class Signatures(unittest.TestCase):
|
|||||||
unsign_from_foolscap, (msg, sig, None))
|
unsign_from_foolscap, (msg, sig, None))
|
||||||
# bad signature
|
# bad signature
|
||||||
bad_ann = {"key1": "value2"}
|
bad_ann = {"key1": "value2"}
|
||||||
bad_msg = simplejson.dumps(bad_ann).encode("utf-8")
|
bad_msg = json.dumps(bad_ann).encode("utf-8")
|
||||||
self.failUnlessRaises(keyutil.BadSignatureError,
|
self.failUnlessRaises(keyutil.BadSignatureError,
|
||||||
unsign_from_foolscap, (bad_msg,sig,key))
|
unsign_from_foolscap, (bad_msg,sig,key))
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import os, sys, time
|
import os, sys, time
|
||||||
import shutil, simplejson
|
import shutil, json
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer, task, reactor
|
from twisted.internet import defer, task, reactor
|
||||||
@ -1463,7 +1463,7 @@ class MockTest(SingleMagicFolderTestMixin, unittest.TestCase):
|
|||||||
d.addCallback(_got_stats)
|
d.addCallback(_got_stats)
|
||||||
d.addCallback(lambda res: self.GET("statistics?t=json"))
|
d.addCallback(lambda res: self.GET("statistics?t=json"))
|
||||||
def _got_stats_json(res):
|
def _got_stats_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessEqual(data["counters"]["magic_folder.uploader.dirs_monitored"], 1)
|
self.failUnlessEqual(data["counters"]["magic_folder.uploader.dirs_monitored"], 1)
|
||||||
self.failUnlessEqual(data["counters"]["magic_folder.uploader.objects_succeeded"], 1)
|
self.failUnlessEqual(data["counters"]["magic_folder.uploader.objects_succeeded"], 1)
|
||||||
self.failUnlessEqual(data["counters"]["magic_folder.uploader.files_uploaded"], 1)
|
self.failUnlessEqual(data["counters"]["magic_folder.uploader.files_uploaded"], 1)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import time, os.path, platform, stat, re, simplejson, struct, shutil
|
import time, os.path, platform, stat, re, json, struct, shutil
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
|
|
||||||
@ -3279,8 +3279,8 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
|
|||||||
self.failUnlessIn("but expiration was not enabled", s)
|
self.failUnlessIn("but expiration was not enabled", s)
|
||||||
d.addCallback(_check_html)
|
d.addCallback(_check_html)
|
||||||
d.addCallback(lambda ign: self.render_json(webstatus))
|
d.addCallback(lambda ign: self.render_json(webstatus))
|
||||||
def _check_json(json):
|
def _check_json(raw):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
self.failUnlessIn("lease-checker", data)
|
self.failUnlessIn("lease-checker", data)
|
||||||
self.failUnlessIn("lease-checker-progress", data)
|
self.failUnlessIn("lease-checker-progress", data)
|
||||||
d.addCallback(_check_json)
|
d.addCallback(_check_json)
|
||||||
@ -3916,8 +3916,8 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
|
|||||||
d.addCallback(_after_first_bucket)
|
d.addCallback(_after_first_bucket)
|
||||||
|
|
||||||
d.addCallback(lambda ign: self.render_json(w))
|
d.addCallback(lambda ign: self.render_json(w))
|
||||||
def _check_json(json):
|
def _check_json(raw):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
# grr. json turns all dict keys into strings.
|
# grr. json turns all dict keys into strings.
|
||||||
so_far = data["lease-checker"]["cycle-to-date"]
|
so_far = data["lease-checker"]["cycle-to-date"]
|
||||||
corrupt_shares = so_far["corrupt-shares"]
|
corrupt_shares = so_far["corrupt-shares"]
|
||||||
@ -3943,8 +3943,8 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
|
|||||||
self.failUnlessEqual(last["corrupt-shares"], [(first_b32, 0)])
|
self.failUnlessEqual(last["corrupt-shares"], [(first_b32, 0)])
|
||||||
d.addCallback(_after_first_cycle)
|
d.addCallback(_after_first_cycle)
|
||||||
d.addCallback(lambda ign: self.render_json(w))
|
d.addCallback(lambda ign: self.render_json(w))
|
||||||
def _check_json_history(json):
|
def _check_json_history(raw):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
last = data["lease-checker"]["history"]["0"]
|
last = data["lease-checker"]["history"]["0"]
|
||||||
corrupt_shares = last["corrupt-shares"]
|
corrupt_shares = last["corrupt-shares"]
|
||||||
self.failUnlessEqual(corrupt_shares, [[first_b32, 0]])
|
self.failUnlessEqual(corrupt_shares, [[first_b32, 0]])
|
||||||
@ -3996,8 +3996,8 @@ class WebStatus(unittest.TestCase, pollmixin.PollMixin, WebRenderingMixin):
|
|||||||
self.failUnlessIn("Reserved space: - 0 B (0)", s)
|
self.failUnlessIn("Reserved space: - 0 B (0)", s)
|
||||||
d.addCallback(_check_html)
|
d.addCallback(_check_html)
|
||||||
d.addCallback(lambda ign: self.render_json(w))
|
d.addCallback(lambda ign: self.render_json(w))
|
||||||
def _check_json(json):
|
def _check_json(raw):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
s = data["stats"]
|
s = data["stats"]
|
||||||
self.failUnlessEqual(s["storage_server.accepting_immutable_shares"], 1)
|
self.failUnlessEqual(s["storage_server.accepting_immutable_shares"], 1)
|
||||||
self.failUnlessEqual(s["storage_server.reserved_space"], 0)
|
self.failUnlessEqual(s["storage_server.reserved_space"], 0)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import os, re, sys, time, simplejson
|
import os, re, sys, time, json
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -1366,7 +1366,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
|||||||
getPage(self.introweb_url + "?t=json",
|
getPage(self.introweb_url + "?t=json",
|
||||||
method="GET", followRedirect=True))
|
method="GET", followRedirect=True))
|
||||||
def _check_json(res):
|
def _check_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
try:
|
try:
|
||||||
self.failUnlessEqual(data["subscription_summary"],
|
self.failUnlessEqual(data["subscription_summary"],
|
||||||
{"storage": 5})
|
{"storage": 5})
|
||||||
@ -1814,7 +1814,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
|||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.GET("helper_status?t=json", followRedirect=True))
|
self.GET("helper_status?t=json", followRedirect=True))
|
||||||
def _got_helper_status_json(res):
|
def _got_helper_status_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessEqual(data["chk_upload_helper.upload_need_upload"],
|
self.failUnlessEqual(data["chk_upload_helper.upload_need_upload"],
|
||||||
1)
|
1)
|
||||||
self.failUnlessEqual(data["chk_upload_helper.incoming_count"], 1)
|
self.failUnlessEqual(data["chk_upload_helper.incoming_count"], 1)
|
||||||
@ -1838,7 +1838,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
|||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
getPage(self.helper_webish_url + "helper_status?t=json"))
|
getPage(self.helper_webish_url + "helper_status?t=json"))
|
||||||
def _got_non_helper_status_json(res):
|
def _got_non_helper_status_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessEqual(data, {})
|
self.failUnlessEqual(data, {})
|
||||||
d.addCallback(_got_non_helper_status_json)
|
d.addCallback(_got_non_helper_status_json)
|
||||||
|
|
||||||
@ -1850,7 +1850,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
|||||||
d.addCallback(_got_stats)
|
d.addCallback(_got_stats)
|
||||||
d.addCallback(lambda res: self.GET("statistics?t=json"))
|
d.addCallback(lambda res: self.GET("statistics?t=json"))
|
||||||
def _got_stats_json(res):
|
def _got_stats_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessEqual(data["counters"]["uploader.files_uploaded"], 5)
|
self.failUnlessEqual(data["counters"]["uploader.files_uploaded"], 5)
|
||||||
self.failUnlessEqual(data["stats"]["chk_upload_helper.upload_need_upload"], 1)
|
self.failUnlessEqual(data["stats"]["chk_upload_helper.upload_need_upload"], 1)
|
||||||
d.addCallback(_got_stats_json)
|
d.addCallback(_got_stats_json)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import os.path, re, urllib
|
import os.path, re, urllib
|
||||||
import simplejson
|
import json
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
from nevow import rend
|
from nevow import rend
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
@ -96,7 +96,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(_got_html_good_return_to)
|
d.addCallback(_got_html_good_return_to)
|
||||||
d.addCallback(self.CHECK, "good", "t=check&output=json")
|
d.addCallback(self.CHECK, "good", "t=check&output=json")
|
||||||
def _got_json_good(res):
|
def _got_json_good(res):
|
||||||
r = simplejson.loads(res)
|
r = json.loads(res)
|
||||||
self.failUnlessEqual(r["summary"], "Healthy")
|
self.failUnlessEqual(r["summary"], "Healthy")
|
||||||
self.failUnless(r["results"]["healthy"])
|
self.failUnless(r["results"]["healthy"])
|
||||||
self.failIfIn("needs-rebalancing", r["results"])
|
self.failIfIn("needs-rebalancing", r["results"])
|
||||||
@ -116,7 +116,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(_got_html_small_return_to)
|
d.addCallback(_got_html_small_return_to)
|
||||||
d.addCallback(self.CHECK, "small", "t=check&output=json")
|
d.addCallback(self.CHECK, "small", "t=check&output=json")
|
||||||
def _got_json_small(res):
|
def _got_json_small(res):
|
||||||
r = simplejson.loads(res)
|
r = json.loads(res)
|
||||||
self.failUnlessEqual(r["storage-index"], "")
|
self.failUnlessEqual(r["storage-index"], "")
|
||||||
self.failUnless(r["results"]["healthy"])
|
self.failUnless(r["results"]["healthy"])
|
||||||
d.addCallback(_got_json_small)
|
d.addCallback(_got_json_small)
|
||||||
@ -128,7 +128,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(_got_html_smalldir)
|
d.addCallback(_got_html_smalldir)
|
||||||
d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
|
d.addCallback(self.CHECK, "smalldir", "t=check&output=json")
|
||||||
def _got_json_smalldir(res):
|
def _got_json_smalldir(res):
|
||||||
r = simplejson.loads(res)
|
r = json.loads(res)
|
||||||
self.failUnlessEqual(r["storage-index"], "")
|
self.failUnlessEqual(r["storage-index"], "")
|
||||||
self.failUnless(r["results"]["healthy"])
|
self.failUnless(r["results"]["healthy"])
|
||||||
d.addCallback(_got_json_smalldir)
|
d.addCallback(_got_json_smalldir)
|
||||||
@ -139,7 +139,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(_got_html_sick)
|
d.addCallback(_got_html_sick)
|
||||||
d.addCallback(self.CHECK, "sick", "t=check&output=json")
|
d.addCallback(self.CHECK, "sick", "t=check&output=json")
|
||||||
def _got_json_sick(res):
|
def _got_json_sick(res):
|
||||||
r = simplejson.loads(res)
|
r = json.loads(res)
|
||||||
self.failUnlessEqual(r["summary"],
|
self.failUnlessEqual(r["summary"],
|
||||||
"Not Healthy: 9 shares (enc 3-of-10)")
|
"Not Healthy: 9 shares (enc 3-of-10)")
|
||||||
self.failIf(r["results"]["healthy"])
|
self.failIf(r["results"]["healthy"])
|
||||||
@ -153,7 +153,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(_got_html_dead)
|
d.addCallback(_got_html_dead)
|
||||||
d.addCallback(self.CHECK, "dead", "t=check&output=json")
|
d.addCallback(self.CHECK, "dead", "t=check&output=json")
|
||||||
def _got_json_dead(res):
|
def _got_json_dead(res):
|
||||||
r = simplejson.loads(res)
|
r = json.loads(res)
|
||||||
self.failUnlessEqual(r["summary"],
|
self.failUnlessEqual(r["summary"],
|
||||||
"Not Healthy: 1 shares (enc 3-of-10)")
|
"Not Healthy: 1 shares (enc 3-of-10)")
|
||||||
self.failIf(r["results"]["healthy"])
|
self.failIf(r["results"]["healthy"])
|
||||||
@ -167,7 +167,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(_got_html_corrupt)
|
d.addCallback(_got_html_corrupt)
|
||||||
d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
|
d.addCallback(self.CHECK, "corrupt", "t=check&verify=true&output=json")
|
||||||
def _got_json_corrupt(res):
|
def _got_json_corrupt(res):
|
||||||
r = simplejson.loads(res)
|
r = json.loads(res)
|
||||||
self.failUnlessIn("Unhealthy: 9 shares (enc 3-of-10)", r["summary"])
|
self.failUnlessIn("Unhealthy: 9 shares (enc 3-of-10)", r["summary"])
|
||||||
self.failIf(r["results"]["healthy"])
|
self.failIf(r["results"]["healthy"])
|
||||||
self.failUnless(r["results"]["recoverable"])
|
self.failUnless(r["results"]["recoverable"])
|
||||||
@ -285,7 +285,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
|
|
||||||
d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
|
d.addCallback(self.CHECK, "sick", "t=check&repair=true&output=json")
|
||||||
def _got_json_sick(res):
|
def _got_json_sick(res):
|
||||||
r = simplejson.loads(res)
|
r = json.loads(res)
|
||||||
self.failUnlessReallyEqual(r["repair-attempted"], True)
|
self.failUnlessReallyEqual(r["repair-attempted"], True)
|
||||||
self.failUnlessReallyEqual(r["repair-successful"], True)
|
self.failUnlessReallyEqual(r["repair-successful"], True)
|
||||||
self.failUnlessEqual(r["pre-repair-results"]["summary"],
|
self.failUnlessEqual(r["pre-repair-results"]["summary"],
|
||||||
@ -347,7 +347,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
|
|
||||||
d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
|
d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
|
||||||
def _check_directory_json(res, expect_rw_uri):
|
def _check_directory_json(res, expect_rw_uri):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessEqual(data[0], "dirnode")
|
self.failUnlessEqual(data[0], "dirnode")
|
||||||
f = data[1]["children"][name]
|
f = data[1]["children"][name]
|
||||||
self.failUnlessEqual(f[0], "unknown")
|
self.failUnlessEqual(f[0], "unknown")
|
||||||
@ -388,7 +388,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
|
d.addCallback(_check_info, expect_rw_uri=False, expect_ro_uri=True)
|
||||||
|
|
||||||
def _check_json(res, expect_rw_uri):
|
def _check_json(res, expect_rw_uri):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessEqual(data[0], "unknown")
|
self.failUnlessEqual(data[0], "unknown")
|
||||||
if expect_rw_uri:
|
if expect_rw_uri:
|
||||||
self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
|
self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), unknown_rwcap, data)
|
||||||
@ -550,7 +550,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
# ... and in JSON.
|
# ... and in JSON.
|
||||||
d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
|
d.addCallback(lambda ign: self.GET(self.rooturl+"?t=json"))
|
||||||
def _check_json(res):
|
def _check_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessEqual(data[0], "dirnode")
|
self.failUnlessEqual(data[0], "dirnode")
|
||||||
listed_children = data[1]["children"]
|
listed_children = data[1]["children"]
|
||||||
self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
|
self.failUnlessReallyEqual(sorted(listed_children.keys()), [u"lonely"])
|
||||||
@ -607,7 +607,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(self.CHECK, "root", "t=stream-deep-check")
|
d.addCallback(self.CHECK, "root", "t=stream-deep-check")
|
||||||
def _done(res):
|
def _done(res):
|
||||||
try:
|
try:
|
||||||
units = [simplejson.loads(line)
|
units = [json.loads(line)
|
||||||
for line in res.splitlines()
|
for line in res.splitlines()
|
||||||
if line]
|
if line]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@ -643,7 +643,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(self.CHECK, "root", "t=stream-manifest")
|
d.addCallback(self.CHECK, "root", "t=stream-manifest")
|
||||||
def _check_manifest(res):
|
def _check_manifest(res):
|
||||||
self.failUnless(res.endswith("\n"))
|
self.failUnless(res.endswith("\n"))
|
||||||
units = [simplejson.loads(t) for t in res[:-1].split("\n")]
|
units = [json.loads(t) for t in res[:-1].split("\n")]
|
||||||
self.failUnlessReallyEqual(len(units), 5+1)
|
self.failUnlessReallyEqual(len(units), 5+1)
|
||||||
self.failUnlessEqual(units[-1]["type"], "stats")
|
self.failUnlessEqual(units[-1]["type"], "stats")
|
||||||
first = units[0]
|
first = units[0]
|
||||||
@ -700,7 +700,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
|
self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
|
||||||
error_line)
|
error_line)
|
||||||
self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
|
self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
|
||||||
units = [simplejson.loads(line) for line in lines[:first_error]]
|
units = [json.loads(line) for line in lines[:first_error]]
|
||||||
self.failUnlessReallyEqual(len(units), 6) # includes subdir
|
self.failUnlessReallyEqual(len(units), 6) # includes subdir
|
||||||
last_unit = units[-1]
|
last_unit = units[-1]
|
||||||
self.failUnlessEqual(last_unit["path"], ["subdir"])
|
self.failUnlessEqual(last_unit["path"], ["subdir"])
|
||||||
@ -721,7 +721,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
|
self.failUnlessIn("ERROR: UnrecoverableFileError(no recoverable versions)",
|
||||||
error_line)
|
error_line)
|
||||||
self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
|
self.failUnless(len(error_msg) > 2, error_msg_s) # some traceback
|
||||||
units = [simplejson.loads(line) for line in lines[:first_error]]
|
units = [json.loads(line) for line in lines[:first_error]]
|
||||||
self.failUnlessReallyEqual(len(units), 6) # includes subdir
|
self.failUnlessReallyEqual(len(units), 6) # includes subdir
|
||||||
last_unit = units[-1]
|
last_unit = units[-1]
|
||||||
self.failUnlessEqual(last_unit["path"], ["subdir"])
|
self.failUnlessEqual(last_unit["path"], ["subdir"])
|
||||||
@ -794,7 +794,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
|
|
||||||
d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
|
d.addCallback(self.CHECK, "root", "t=stream-deep-check&repair=true")
|
||||||
def _done(res):
|
def _done(res):
|
||||||
units = [simplejson.loads(line)
|
units = [json.loads(line)
|
||||||
for line in res.splitlines()
|
for line in res.splitlines()
|
||||||
if line]
|
if line]
|
||||||
self.failUnlessReallyEqual(len(units), 4+1)
|
self.failUnlessReallyEqual(len(units), 4+1)
|
||||||
@ -977,7 +977,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
|
|
||||||
d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
|
d.addCallback(self.CHECK, "root", "t=stream-deep-check") # no add-lease
|
||||||
def _done(res):
|
def _done(res):
|
||||||
units = [simplejson.loads(line)
|
units = [json.loads(line)
|
||||||
for line in res.splitlines()
|
for line in res.splitlines()
|
||||||
if line]
|
if line]
|
||||||
# root, one, small, mutable, stats
|
# root, one, small, mutable, stats
|
||||||
@ -1290,7 +1290,7 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
# ... and in JSON (used by CLI).
|
# ... and in JSON (used by CLI).
|
||||||
d.addCallback(lambda ign: self.GET(self.dir_url+"?t=json", followRedirect=True))
|
d.addCallback(lambda ign: self.GET(self.dir_url+"?t=json", followRedirect=True))
|
||||||
def _check_dir_json(res):
|
def _check_dir_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless(isinstance(data, list), data)
|
self.failUnless(isinstance(data, list), data)
|
||||||
self.failUnlessEqual(data[0], "dirnode")
|
self.failUnlessEqual(data[0], "dirnode")
|
||||||
self.failUnless(isinstance(data[1], dict), data)
|
self.failUnless(isinstance(data[1], dict), data)
|
||||||
@ -1336,11 +1336,11 @@ class Grid(GridTestMixin, WebErrorMixin, ShouldFailMixin, testutil.ReallyEqualMi
|
|||||||
d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True))
|
d.addCallback(lambda ign: self.GET(self.dir_url_base, followRedirect=True))
|
||||||
d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, body))
|
d.addCallback(lambda body: self.failUnlessIn(DIR_HTML_TAG, body))
|
||||||
d.addCallback(lambda ign: self.GET(self.dir_url_json1))
|
d.addCallback(lambda ign: self.GET(self.dir_url_json1))
|
||||||
d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
|
d.addCallback(lambda res: json.loads(res)) # just check it decodes
|
||||||
d.addCallback(lambda ign: self.GET(self.dir_url_json2))
|
d.addCallback(lambda ign: self.GET(self.dir_url_json2))
|
||||||
d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
|
d.addCallback(lambda res: json.loads(res)) # just check it decodes
|
||||||
d.addCallback(lambda ign: self.GET(self.dir_url_json_ro))
|
d.addCallback(lambda ign: self.GET(self.dir_url_json_ro))
|
||||||
d.addCallback(lambda res: simplejson.loads(res)) # just check it decodes
|
d.addCallback(lambda res: json.loads(res)) # just check it decodes
|
||||||
d.addCallback(lambda ign: self.GET(self.child_url))
|
d.addCallback(lambda ign: self.GET(self.child_url))
|
||||||
d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
|
d.addCallback(lambda body: self.failUnlessEqual(DATA, body))
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import os.path, re, urllib, time, cgi
|
import os.path, re, urllib, time, cgi
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
@ -403,7 +403,7 @@ class WebMixin(testutil.TimezoneMixin):
|
|||||||
self.failUnlessReallyEqual(res, self.SUBBAZ_CONTENTS, res)
|
self.failUnlessReallyEqual(res, self.SUBBAZ_CONTENTS, res)
|
||||||
|
|
||||||
def failUnlessIsBarJSON(self, res):
|
def failUnlessIsBarJSON(self, res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless(isinstance(data, list))
|
self.failUnless(isinstance(data, list))
|
||||||
self.failUnlessEqual(data[0], "filenode")
|
self.failUnlessEqual(data[0], "filenode")
|
||||||
self.failUnless(isinstance(data[1], dict))
|
self.failUnless(isinstance(data[1], dict))
|
||||||
@ -414,7 +414,7 @@ class WebMixin(testutil.TimezoneMixin):
|
|||||||
self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
|
self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
|
||||||
|
|
||||||
def failUnlessIsQuuxJSON(self, res, readonly=False):
|
def failUnlessIsQuuxJSON(self, res, readonly=False):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless(isinstance(data, list))
|
self.failUnless(isinstance(data, list))
|
||||||
self.failUnlessEqual(data[0], "filenode")
|
self.failUnlessEqual(data[0], "filenode")
|
||||||
self.failUnless(isinstance(data[1], dict))
|
self.failUnless(isinstance(data[1], dict))
|
||||||
@ -433,7 +433,7 @@ class WebMixin(testutil.TimezoneMixin):
|
|||||||
self.failUnlessReallyEqual(metadata['size'], len(self.QUUX_CONTENTS))
|
self.failUnlessReallyEqual(metadata['size'], len(self.QUUX_CONTENTS))
|
||||||
|
|
||||||
def failUnlessIsFooJSON(self, res):
|
def failUnlessIsFooJSON(self, res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless(isinstance(data, list))
|
self.failUnless(isinstance(data, list))
|
||||||
self.failUnlessEqual(data[0], "dirnode", res)
|
self.failUnlessEqual(data[0], "dirnode", res)
|
||||||
self.failUnless(isinstance(data[1], dict))
|
self.failUnless(isinstance(data[1], dict))
|
||||||
@ -778,7 +778,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(_check)
|
d.addCallback(_check)
|
||||||
d.addCallback(lambda res: self.GET("/status/?t=json"))
|
d.addCallback(lambda res: self.GET("/status/?t=json"))
|
||||||
def _check_json(res):
|
def _check_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless(isinstance(data, dict))
|
self.failUnless(isinstance(data, dict))
|
||||||
#active = data["active"]
|
#active = data["active"]
|
||||||
# TODO: test more. We need a way to fake an active operation
|
# TODO: test more. We need a way to fake an active operation
|
||||||
@ -791,7 +791,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(_check_dl)
|
d.addCallback(_check_dl)
|
||||||
d.addCallback(lambda res: self.GET("/status/down-%d/event_json" % dl_num))
|
d.addCallback(lambda res: self.GET("/status/down-%d/event_json" % dl_num))
|
||||||
def _check_dl_json(res):
|
def _check_dl_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless(isinstance(data, dict))
|
self.failUnless(isinstance(data, dict))
|
||||||
self.failUnlessIn("read", data)
|
self.failUnlessIn("read", data)
|
||||||
self.failUnlessEqual(data["read"][0]["length"], 120)
|
self.failUnlessEqual(data["read"][0]["length"], 120)
|
||||||
@ -1165,7 +1165,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
# Check that etags work with immutable directories
|
# Check that etags work with immutable directories
|
||||||
(newkids, caps) = self._create_immutable_children()
|
(newkids, caps) = self._create_immutable_children()
|
||||||
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
|
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
def _stash_immdir_uri(uri):
|
def _stash_immdir_uri(uri):
|
||||||
self._immdir_uri = uri
|
self._immdir_uri = uri
|
||||||
return uri
|
return uri
|
||||||
@ -1449,7 +1449,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def failUnlessHasBarDotTxtMetadata(self, res):
|
def failUnlessHasBarDotTxtMetadata(self, res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless(isinstance(data, list))
|
self.failUnless(isinstance(data, list))
|
||||||
self.failUnlessIn("metadata", data[1])
|
self.failUnlessIn("metadata", data[1])
|
||||||
self.failUnlessIn("tahoe", data[1]["metadata"])
|
self.failUnlessIn("tahoe", data[1]["metadata"])
|
||||||
@ -1477,8 +1477,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d = self.PUT("/uri?format=mdmf",
|
d = self.PUT("/uri?format=mdmf",
|
||||||
self.NEWFILE_CONTENTS * 300000)
|
self.NEWFILE_CONTENTS * 300000)
|
||||||
d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
|
d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
|
||||||
def _got_json(json, version):
|
def _got_json(raw, version):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
assert "filenode" == data[0]
|
assert "filenode" == data[0]
|
||||||
data = data[1]
|
data = data[1]
|
||||||
assert isinstance(data, dict)
|
assert isinstance(data, dict)
|
||||||
@ -1692,8 +1692,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
# its JSON, we should see their encodings.
|
# its JSON, we should see their encodings.
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.GET(self.public_url + "/foo?t=json"))
|
self.GET(self.public_url + "/foo?t=json"))
|
||||||
def _got_json(json):
|
def _got_json(raw):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
assert data[0] == "dirnode"
|
assert data[0] == "dirnode"
|
||||||
|
|
||||||
data = data[1]
|
data = data[1]
|
||||||
@ -1830,7 +1830,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d = self.POST(self.public_url + "/foo/?t=stream-manifest")
|
d = self.POST(self.public_url + "/foo/?t=stream-manifest")
|
||||||
def _check(res):
|
def _check(res):
|
||||||
self.failUnless(res.endswith("\n"))
|
self.failUnless(res.endswith("\n"))
|
||||||
units = [simplejson.loads(t) for t in res[:-1].split("\n")]
|
units = [json.loads(t) for t in res[:-1].split("\n")]
|
||||||
self.failUnlessReallyEqual(len(units), 10)
|
self.failUnlessReallyEqual(len(units), 10)
|
||||||
self.failUnlessEqual(units[-1]["type"], "stats")
|
self.failUnlessEqual(units[-1]["type"], "stats")
|
||||||
first = units[0]
|
first = units[0]
|
||||||
@ -1944,7 +1944,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
else:
|
else:
|
||||||
version = SDMF_VERSION # for later
|
version = SDMF_VERSION # for later
|
||||||
d = self.POST2(self.public_url + query,
|
d = self.POST2(self.public_url + query,
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
def _check(uri):
|
def _check(uri):
|
||||||
n = self.s.create_node_from_uri(uri.strip())
|
n = self.s.create_node_from_uri(uri.strip())
|
||||||
d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
|
d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
|
||||||
@ -2001,12 +2001,12 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
400, "Bad Request", "Unknown format: foo",
|
400, "Bad Request", "Unknown format: foo",
|
||||||
self.POST2, self.public_url + \
|
self.POST2, self.public_url + \
|
||||||
"/foo/newdir?t=mkdir-with-children&format=foo",
|
"/foo/newdir?t=mkdir-with-children&format=foo",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
|
|
||||||
def test_POST_NEWDIRURL_immutable(self):
|
def test_POST_NEWDIRURL_immutable(self):
|
||||||
(newkids, caps) = self._create_immutable_children()
|
(newkids, caps) = self._create_immutable_children()
|
||||||
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
|
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
def _check(uri):
|
def _check(uri):
|
||||||
n = self.s.create_node_from_uri(uri.strip())
|
n = self.s.create_node_from_uri(uri.strip())
|
||||||
d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
|
d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
|
||||||
@ -2051,7 +2051,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
"needed to be immutable but was not",
|
"needed to be immutable but was not",
|
||||||
self.POST2,
|
self.POST2,
|
||||||
self.public_url + "/foo/newdir?t=mkdir-immutable",
|
self.public_url + "/foo/newdir?t=mkdir-immutable",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_PUT_NEWDIRURL_exists(self):
|
def test_PUT_NEWDIRURL_exists(self):
|
||||||
@ -2392,8 +2392,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
(uri_prefix, filecap))
|
(uri_prefix, filecap))
|
||||||
return self.GET("/uri/%s?t=json" % filecap)
|
return self.GET("/uri/%s?t=json" % filecap)
|
||||||
d.addCallback(_got_results)
|
d.addCallback(_got_results)
|
||||||
def _got_json(json):
|
def _got_json(raw):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
data = data[1]
|
data = data[1]
|
||||||
self.failUnlessIn("format", data)
|
self.failUnlessIn("format", data)
|
||||||
self.failUnlessEqual(data["format"], format.upper())
|
self.failUnlessEqual(data["format"], format.upper())
|
||||||
@ -2426,8 +2426,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
self.failUnless(filecap.startswith(uri_prefix))
|
self.failUnless(filecap.startswith(uri_prefix))
|
||||||
return self.GET(self.public_url + "/foo/%s?t=json" % filename)
|
return self.GET(self.public_url + "/foo/%s?t=json" % filename)
|
||||||
d.addCallback(_got_filecap)
|
d.addCallback(_got_filecap)
|
||||||
def _got_json(json):
|
def _got_json(raw):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
data = data[1]
|
data = data[1]
|
||||||
self.failUnlessIn("format", data)
|
self.failUnlessIn("format", data)
|
||||||
self.failUnlessEqual(data["format"], format.upper())
|
self.failUnlessEqual(data["format"], format.upper())
|
||||||
@ -2518,7 +2518,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
self.GET(self.public_url + "/foo/?t=json",
|
self.GET(self.public_url + "/foo/?t=json",
|
||||||
followRedirect=True))
|
followRedirect=True))
|
||||||
def _check_page_json(res):
|
def _check_page_json(res):
|
||||||
parsed = simplejson.loads(res)
|
parsed = json.loads(res)
|
||||||
self.failUnlessEqual(parsed[0], "dirnode")
|
self.failUnlessEqual(parsed[0], "dirnode")
|
||||||
children = dict( [(unicode(name),value)
|
children = dict( [(unicode(name),value)
|
||||||
for (name,value)
|
for (name,value)
|
||||||
@ -2536,7 +2536,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.GET(self.public_url + "/foo/new.txt?t=json"))
|
self.GET(self.public_url + "/foo/new.txt?t=json"))
|
||||||
def _check_file_json(res):
|
def _check_file_json(res):
|
||||||
parsed = simplejson.loads(res)
|
parsed = json.loads(res)
|
||||||
self.failUnlessEqual(parsed[0], "filenode")
|
self.failUnlessEqual(parsed[0], "filenode")
|
||||||
self.failUnless(parsed[1]["mutable"])
|
self.failUnless(parsed[1]["mutable"])
|
||||||
self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
|
self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
|
||||||
@ -2710,7 +2710,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.POST(bar_url, t="check", output="JSON"))
|
self.POST(bar_url, t="check", output="JSON"))
|
||||||
def _check_json(res):
|
def _check_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessIn("storage-index", data)
|
self.failUnlessIn("storage-index", data)
|
||||||
self.failUnless(data["results"]["healthy"])
|
self.failUnless(data["results"]["healthy"])
|
||||||
d.addCallback(_check_json)
|
d.addCallback(_check_json)
|
||||||
@ -2769,7 +2769,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.POST(foo_url, t="check", output="JSON"))
|
self.POST(foo_url, t="check", output="JSON"))
|
||||||
def _check_json(res):
|
def _check_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessIn("storage-index", data)
|
self.failUnlessIn("storage-index", data)
|
||||||
self.failUnless(data["results"]["healthy"])
|
self.failUnless(data["results"]["healthy"])
|
||||||
d.addCallback(_check_json)
|
d.addCallback(_check_json)
|
||||||
@ -2830,7 +2830,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
url += "?t=status&output=JSON"
|
url += "?t=status&output=JSON"
|
||||||
d = self.GET(url)
|
d = self.GET(url)
|
||||||
def _got(res):
|
def _got(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
if not data["finished"]:
|
if not data["finished"]:
|
||||||
d = self.stall(delay=1.0)
|
d = self.stall(delay=1.0)
|
||||||
d.addCallback(self.wait_for_operation, ophandle)
|
d.addCallback(self.wait_for_operation, ophandle)
|
||||||
@ -2847,7 +2847,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d = self.GET(url)
|
d = self.GET(url)
|
||||||
def _got(res):
|
def _got(res):
|
||||||
if output and output.lower() == "json":
|
if output and output.lower() == "json":
|
||||||
return simplejson.loads(res)
|
return json.loads(res)
|
||||||
return res
|
return res
|
||||||
d.addCallback(_got)
|
d.addCallback(_got)
|
||||||
return d
|
return d
|
||||||
@ -2894,7 +2894,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.GET("/operations/123/%s?output=JSON" % foo_si_s))
|
self.GET("/operations/123/%s?output=JSON" % foo_si_s))
|
||||||
def _check_foo_json(res):
|
def _check_foo_json(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessEqual(data["storage-index"], foo_si_s)
|
self.failUnlessEqual(data["storage-index"], foo_si_s)
|
||||||
self.failUnless(data["results"]["healthy"])
|
self.failUnless(data["results"]["healthy"])
|
||||||
d.addCallback(_check_foo_json)
|
d.addCallback(_check_foo_json)
|
||||||
@ -2974,7 +2974,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
(newkids, caps) = self._create_initial_children()
|
(newkids, caps) = self._create_initial_children()
|
||||||
d = self.POST2(self.public_url +
|
d = self.POST2(self.public_url +
|
||||||
"/foo?t=mkdir-with-children&name=newdir",
|
"/foo?t=mkdir-with-children&name=newdir",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
||||||
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
||||||
@ -2987,7 +2987,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
(newkids, caps) = self._create_initial_children()
|
(newkids, caps) = self._create_initial_children()
|
||||||
d = self.POST2(self.public_url +
|
d = self.POST2(self.public_url +
|
||||||
"/foo?t=mkdir-with-children&name=newdir&format=mdmf",
|
"/foo?t=mkdir-with-children&name=newdir&format=mdmf",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
||||||
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
||||||
@ -3003,7 +3003,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
(newkids, caps) = self._create_initial_children()
|
(newkids, caps) = self._create_initial_children()
|
||||||
d = self.POST2(self.public_url +
|
d = self.POST2(self.public_url +
|
||||||
"/foo?t=mkdir-with-children&name=newdir&format=sdmf",
|
"/foo?t=mkdir-with-children&name=newdir&format=sdmf",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
||||||
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
||||||
@ -3020,13 +3020,13 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
400, "Bad Request", "Unknown format: foo",
|
400, "Bad Request", "Unknown format: foo",
|
||||||
self.POST, self.public_url + \
|
self.POST, self.public_url + \
|
||||||
"/foo?t=mkdir-with-children&name=newdir&format=foo",
|
"/foo?t=mkdir-with-children&name=newdir&format=foo",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
|
|
||||||
def test_POST_mkdir_immutable(self):
|
def test_POST_mkdir_immutable(self):
|
||||||
(newkids, caps) = self._create_immutable_children()
|
(newkids, caps) = self._create_immutable_children()
|
||||||
d = self.POST2(self.public_url +
|
d = self.POST2(self.public_url +
|
||||||
"/foo?t=mkdir-immutable&name=newdir",
|
"/foo?t=mkdir-immutable&name=newdir",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
||||||
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
||||||
@ -3051,7 +3051,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
self.POST2,
|
self.POST2,
|
||||||
self.public_url +
|
self.public_url +
|
||||||
"/foo?t=mkdir-immutable&name=newdir",
|
"/foo?t=mkdir-immutable&name=newdir",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_POST_mkdir_2(self):
|
def test_POST_mkdir_2(self):
|
||||||
@ -3199,7 +3199,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
|
|
||||||
def test_POST_mkdir_no_parentdir_initial_children(self):
|
def test_POST_mkdir_no_parentdir_initial_children(self):
|
||||||
(newkids, caps) = self._create_initial_children()
|
(newkids, caps) = self._create_initial_children()
|
||||||
d = self.POST2("/uri?t=mkdir-with-children", simplejson.dumps(newkids))
|
d = self.POST2("/uri?t=mkdir-with-children", json.dumps(newkids))
|
||||||
def _after_mkdir(res):
|
def _after_mkdir(res):
|
||||||
self.failUnless(res.startswith("URI:DIR"), res)
|
self.failUnless(res.startswith("URI:DIR"), res)
|
||||||
n = self.s.create_node_from_uri(res)
|
n = self.s.create_node_from_uri(res)
|
||||||
@ -3238,7 +3238,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
"t=mkdir does not accept children=, "
|
"t=mkdir does not accept children=, "
|
||||||
"try t=mkdir-with-children instead",
|
"try t=mkdir-with-children instead",
|
||||||
self.POST2, "/uri?t=mkdir", # without children
|
self.POST2, "/uri?t=mkdir", # without children
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_POST_noparent_bad(self):
|
def test_POST_noparent_bad(self):
|
||||||
@ -3251,7 +3251,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
|
|
||||||
def test_POST_mkdir_no_parentdir_immutable(self):
|
def test_POST_mkdir_no_parentdir_immutable(self):
|
||||||
(newkids, caps) = self._create_immutable_children()
|
(newkids, caps) = self._create_immutable_children()
|
||||||
d = self.POST2("/uri?t=mkdir-immutable", simplejson.dumps(newkids))
|
d = self.POST2("/uri?t=mkdir-immutable", json.dumps(newkids))
|
||||||
def _after_mkdir(res):
|
def _after_mkdir(res):
|
||||||
self.failUnless(res.startswith("URI:DIR"), res)
|
self.failUnless(res.startswith("URI:DIR"), res)
|
||||||
n = self.s.create_node_from_uri(res)
|
n = self.s.create_node_from_uri(res)
|
||||||
@ -3283,7 +3283,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
"needed to be immutable but was not",
|
"needed to be immutable but was not",
|
||||||
self.POST2,
|
self.POST2,
|
||||||
"/uri?t=mkdir-immutable",
|
"/uri?t=mkdir-immutable",
|
||||||
simplejson.dumps(newkids))
|
json.dumps(newkids))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_welcome_page_mkdir_button(self):
|
def test_welcome_page_mkdir_button(self):
|
||||||
@ -3582,7 +3582,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def failUnlessIsEmptyJSON(self, res):
|
def failUnlessIsEmptyJSON(self, res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnlessEqual(data[0], "dirnode", data)
|
self.failUnlessEqual(data[0], "dirnode", data)
|
||||||
self.failUnlessReallyEqual(len(data[1]["children"]), 0)
|
self.failUnlessReallyEqual(len(data[1]["children"]), 0)
|
||||||
|
|
||||||
@ -4006,8 +4006,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
new_contents)
|
new_contents)
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.GET(self.public_url + "/foo/mdmf.txt?t=json"))
|
self.GET(self.public_url + "/foo/mdmf.txt?t=json"))
|
||||||
def _got_json(json):
|
def _got_json(raw):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
data = data[1]
|
data = data[1]
|
||||||
self.failUnlessIn("format", data)
|
self.failUnlessIn("format", data)
|
||||||
self.failUnlessEqual(data["format"], "MDMF")
|
self.failUnlessEqual(data["format"], "MDMF")
|
||||||
@ -4023,8 +4023,8 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
new_contents)
|
new_contents)
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.GET(self.public_url + "/foo/sdmf.txt?t=json"))
|
self.GET(self.public_url + "/foo/sdmf.txt?t=json"))
|
||||||
def _got_json(json):
|
def _got_json(raw):
|
||||||
data = simplejson.loads(json)
|
data = json.loads(raw)
|
||||||
data = data[1]
|
data = data[1]
|
||||||
self.failUnlessIn("format", data)
|
self.failUnlessIn("format", data)
|
||||||
self.failUnlessEqual(data["format"], "SDMF")
|
self.failUnlessEqual(data["format"], "SDMF")
|
||||||
@ -4281,12 +4281,12 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.GET("/operations/128?t=status&output=JSON"))
|
self.GET("/operations/128?t=status&output=JSON"))
|
||||||
def _check1(res):
|
def _check1(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless("finished" in data, res)
|
self.failUnless("finished" in data, res)
|
||||||
monitor = self.ws.root.child_operations.handles["128"][0]
|
monitor = self.ws.root.child_operations.handles["128"][0]
|
||||||
d = self.POST("/operations/128?t=cancel&output=JSON")
|
d = self.POST("/operations/128?t=cancel&output=JSON")
|
||||||
def _check2(res):
|
def _check2(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless("finished" in data, res)
|
self.failUnless("finished" in data, res)
|
||||||
# t=cancel causes the handle to be forgotten
|
# t=cancel causes the handle to be forgotten
|
||||||
self.failUnless(monitor.is_cancelled())
|
self.failUnless(monitor.is_cancelled())
|
||||||
@ -4307,7 +4307,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
|
self.GET("/operations/129?t=status&output=JSON&retain-for=0"))
|
||||||
def _check1(res):
|
def _check1(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless("finished" in data, res)
|
self.failUnless("finished" in data, res)
|
||||||
d.addCallback(_check1)
|
d.addCallback(_check1)
|
||||||
# the retain-for=0 will cause the handle to be expired very soon
|
# the retain-for=0 will cause the handle to be expired very soon
|
||||||
@ -4361,7 +4361,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(lambda ign:
|
d.addCallback(lambda ign:
|
||||||
self.GET("/operations/131?t=status&output=JSON"))
|
self.GET("/operations/131?t=status&output=JSON"))
|
||||||
def _check1(res):
|
def _check1(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless("finished" in data, res)
|
self.failUnless("finished" in data, res)
|
||||||
d.addCallback(_check1)
|
d.addCallback(_check1)
|
||||||
# Create an ophandle, don't collect it, then try to collect it
|
# Create an ophandle, don't collect it, then try to collect it
|
||||||
@ -4395,7 +4395,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
|||||||
d.addCallback(lambda ign:
|
d.addCallback(lambda ign:
|
||||||
self.GET("/operations/133?t=status&output=JSON"))
|
self.GET("/operations/133?t=status&output=JSON"))
|
||||||
def _check1(res):
|
def _check1(res):
|
||||||
data = simplejson.loads(res)
|
data = json.loads(res)
|
||||||
self.failUnless("finished" in data, res)
|
self.failUnless("finished" in data, res)
|
||||||
d.addCallback(_check1)
|
d.addCallback(_check1)
|
||||||
# Create another uncollected ophandle, then try to collect it
|
# Create another uncollected ophandle, then try to collect it
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
import simplejson
|
import json
|
||||||
from nevow import rend, inevow, tags as T
|
from nevow import rend, inevow, tags as T
|
||||||
from twisted.web import http, html
|
from twisted.web import http, html
|
||||||
from allmydata.web.common import getxmlfile, get_arg, get_root, WebError
|
from allmydata.web.common import getxmlfile, get_arg, get_root, WebError
|
||||||
@ -196,7 +196,7 @@ class LiteralCheckResultsRenderer(rend.Page, ResultsBase):
|
|||||||
def json(self, ctx):
|
def json(self, ctx):
|
||||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||||
data = json_check_results(None)
|
data = json_check_results(None)
|
||||||
return simplejson.dumps(data, indent=1) + "\n"
|
return json.dumps(data, indent=1) + "\n"
|
||||||
|
|
||||||
def render_return(self, ctx, data):
|
def render_return(self, ctx, data):
|
||||||
req = inevow.IRequest(ctx)
|
req = inevow.IRequest(ctx)
|
||||||
@ -233,7 +233,7 @@ class CheckResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
|||||||
def json(self, ctx):
|
def json(self, ctx):
|
||||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||||
data = json_check_results(self.r)
|
data = json_check_results(self.r)
|
||||||
return simplejson.dumps(data, indent=1) + "\n"
|
return json.dumps(data, indent=1) + "\n"
|
||||||
|
|
||||||
def render_summary(self, ctx, data):
|
def render_summary(self, ctx, data):
|
||||||
results = []
|
results = []
|
||||||
@ -278,7 +278,7 @@ class CheckAndRepairResultsRenderer(CheckerBase, rend.Page, ResultsBase):
|
|||||||
def json(self, ctx):
|
def json(self, ctx):
|
||||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||||
data = json_check_and_repair_results(self.r)
|
data = json_check_and_repair_results(self.r)
|
||||||
return simplejson.dumps(data, indent=1) + "\n"
|
return json.dumps(data, indent=1) + "\n"
|
||||||
|
|
||||||
def render_summary(self, ctx, data):
|
def render_summary(self, ctx, data):
|
||||||
cr = data.get_post_repair_results()
|
cr = data.get_post_repair_results()
|
||||||
@ -359,7 +359,7 @@ class DeepCheckResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
|||||||
in res.get_all_results().items()
|
in res.get_all_results().items()
|
||||||
if not r.is_healthy() ]
|
if not r.is_healthy() ]
|
||||||
data["stats"] = res.get_stats()
|
data["stats"] = res.get_stats()
|
||||||
return simplejson.dumps(data, indent=1) + "\n"
|
return json.dumps(data, indent=1) + "\n"
|
||||||
|
|
||||||
def render_root_storage_index(self, ctx, data):
|
def render_root_storage_index(self, ctx, data):
|
||||||
return self.monitor.get_status().get_root_storage_index_string()
|
return self.monitor.get_status().get_root_storage_index_string()
|
||||||
@ -529,7 +529,7 @@ class DeepCheckAndRepairResultsRenderer(rend.Page, ResultsBase, ReloadMixin):
|
|||||||
if not crr.get_pre_repair_results().is_healthy() ]
|
if not crr.get_pre_repair_results().is_healthy() ]
|
||||||
data["list-unhealthy-files"] = unhealthy
|
data["list-unhealthy-files"] = unhealthy
|
||||||
data["stats"] = res.get_stats()
|
data["stats"] = res.get_stats()
|
||||||
return simplejson.dumps(data, indent=1) + "\n"
|
return json.dumps(data, indent=1) + "\n"
|
||||||
|
|
||||||
def render_root_storage_index(self, ctx, data):
|
def render_root_storage_index(self, ctx, data):
|
||||||
return self.monitor.get_status().get_root_storage_index_string()
|
return self.monitor.get_status().get_root_storage_index_string()
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import time
|
import time
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
from twisted.web import http, server, resource
|
from twisted.web import http, server, resource
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
@ -134,7 +134,7 @@ def convert_children_json(nodemaker, children_json):
|
|||||||
t=mkdir-with-children and t=mkdir-immutable"""
|
t=mkdir-with-children and t=mkdir-immutable"""
|
||||||
children = {}
|
children = {}
|
||||||
if children_json:
|
if children_json:
|
||||||
data = simplejson.loads(children_json)
|
data = json.loads(children_json)
|
||||||
for (namex, (ctype, propdict)) in data.iteritems():
|
for (namex, (ctype, propdict)) in data.iteritems():
|
||||||
namex = unicode(namex)
|
namex = unicode(namex)
|
||||||
writecap = to_str(propdict.get("rw_uri"))
|
writecap = to_str(propdict.get("rw_uri"))
|
||||||
@ -432,6 +432,6 @@ class TokenOnlyWebApi(resource.Resource):
|
|||||||
except Exception:
|
except Exception:
|
||||||
message, code = humanize_failure(Failure())
|
message, code = humanize_failure(Failure())
|
||||||
req.setResponseCode(code)
|
req.setResponseCode(code)
|
||||||
return simplejson.dumps({"error": message})
|
return json.dumps({"error": message})
|
||||||
else:
|
else:
|
||||||
raise WebError("'%s' invalid type for 't' arg" % (t,), http.BAD_REQUEST)
|
raise WebError("'%s' invalid type for 't' arg" % (t,), http.BAD_REQUEST)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import simplejson
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
from zope.interface import implements
|
from zope.interface import implements
|
||||||
@ -549,14 +549,14 @@ class DirectoryNodeHandler(RenderMixin, rend.Page, ReplaceMeMixin):
|
|||||||
req.content.seek(0)
|
req.content.seek(0)
|
||||||
body = req.content.read()
|
body = req.content.read()
|
||||||
try:
|
try:
|
||||||
children = simplejson.loads(body)
|
children = json.loads(body)
|
||||||
except ValueError, le:
|
except ValueError, le:
|
||||||
le.args = tuple(le.args + (body,))
|
le.args = tuple(le.args + (body,))
|
||||||
# TODO test handling of bad JSON
|
# TODO test handling of bad JSON
|
||||||
raise
|
raise
|
||||||
cs = {}
|
cs = {}
|
||||||
for name, (file_or_dir, mddict) in children.iteritems():
|
for name, (file_or_dir, mddict) in children.iteritems():
|
||||||
name = unicode(name) # simplejson-2.0.1 returns str *or* unicode
|
name = unicode(name) # json returns str *or* unicode
|
||||||
writecap = mddict.get('rw_uri')
|
writecap = mddict.get('rw_uri')
|
||||||
if writecap is not None:
|
if writecap is not None:
|
||||||
writecap = str(writecap)
|
writecap = str(writecap)
|
||||||
@ -907,8 +907,7 @@ def DirectoryJSONMetadata(ctx, dirnode):
|
|||||||
contents['verify_uri'] = verifycap.to_string()
|
contents['verify_uri'] = verifycap.to_string()
|
||||||
contents['mutable'] = dirnode.is_mutable()
|
contents['mutable'] = dirnode.is_mutable()
|
||||||
data = ("dirnode", contents)
|
data = ("dirnode", contents)
|
||||||
json = simplejson.dumps(data, indent=1) + "\n"
|
return json.dumps(data, indent=1) + "\n"
|
||||||
return json
|
|
||||||
d.addCallback(_got)
|
d.addCallback(_got)
|
||||||
d.addCallback(text_plain, ctx)
|
d.addCallback(text_plain, ctx)
|
||||||
|
|
||||||
@ -916,7 +915,7 @@ def DirectoryJSONMetadata(ctx, dirnode):
|
|||||||
message, code = humanize_failure(f)
|
message, code = humanize_failure(f)
|
||||||
req = IRequest(ctx)
|
req = IRequest(ctx)
|
||||||
req.setResponseCode(code)
|
req.setResponseCode(code)
|
||||||
return simplejson.dumps({
|
return json.dumps({
|
||||||
"error": message,
|
"error": message,
|
||||||
})
|
})
|
||||||
d.addErrback(error)
|
d.addErrback(error)
|
||||||
@ -1014,7 +1013,7 @@ class ManifestResults(rend.Page, ReloadMixin):
|
|||||||
# generator that walks the set rather than list(setofthing) to
|
# generator that walks the set rather than list(setofthing) to
|
||||||
# save a small amount of memory (4B*len) and a moderate amount of
|
# save a small amount of memory (4B*len) and a moderate amount of
|
||||||
# CPU.
|
# CPU.
|
||||||
return simplejson.dumps(status, indent=1)
|
return json.dumps(status, indent=1)
|
||||||
|
|
||||||
def _si_abbrev(self):
|
def _si_abbrev(self):
|
||||||
si = self.monitor.origin_si
|
si = self.monitor.origin_si
|
||||||
@ -1073,7 +1072,7 @@ class DeepSizeResults(rend.Page):
|
|||||||
status = {"finished": self.monitor.is_finished(),
|
status = {"finished": self.monitor.is_finished(),
|
||||||
"size": self.monitor.get_status(),
|
"size": self.monitor.get_status(),
|
||||||
}
|
}
|
||||||
return simplejson.dumps(status)
|
return json.dumps(status)
|
||||||
|
|
||||||
class DeepStatsResults(rend.Page):
|
class DeepStatsResults(rend.Page):
|
||||||
def __init__(self, client, monitor):
|
def __init__(self, client, monitor):
|
||||||
@ -1085,7 +1084,7 @@ class DeepStatsResults(rend.Page):
|
|||||||
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
inevow.IRequest(ctx).setHeader("content-type", "text/plain")
|
||||||
s = self.monitor.get_status().copy()
|
s = self.monitor.get_status().copy()
|
||||||
s["finished"] = self.monitor.is_finished()
|
s["finished"] = self.monitor.is_finished()
|
||||||
return simplejson.dumps(s, indent=1)
|
return json.dumps(s, indent=1)
|
||||||
|
|
||||||
class ManifestStreamer(dirnode.DeepStats):
|
class ManifestStreamer(dirnode.DeepStats):
|
||||||
implements(IPushProducer)
|
implements(IPushProducer)
|
||||||
@ -1130,7 +1129,7 @@ class ManifestStreamer(dirnode.DeepStats):
|
|||||||
si = base32.b2a(si)
|
si = base32.b2a(si)
|
||||||
d["storage-index"] = si or ""
|
d["storage-index"] = si or ""
|
||||||
|
|
||||||
j = simplejson.dumps(d, ensure_ascii=True)
|
j = json.dumps(d, ensure_ascii=True)
|
||||||
assert "\n" not in j
|
assert "\n" not in j
|
||||||
self.req.write(j+"\n")
|
self.req.write(j+"\n")
|
||||||
|
|
||||||
@ -1139,7 +1138,7 @@ class ManifestStreamer(dirnode.DeepStats):
|
|||||||
d = {"type": "stats",
|
d = {"type": "stats",
|
||||||
"stats": stats,
|
"stats": stats,
|
||||||
}
|
}
|
||||||
j = simplejson.dumps(d, ensure_ascii=True)
|
j = json.dumps(d, ensure_ascii=True)
|
||||||
assert "\n" not in j
|
assert "\n" not in j
|
||||||
self.req.write(j+"\n")
|
self.req.write(j+"\n")
|
||||||
return ""
|
return ""
|
||||||
@ -1208,7 +1207,7 @@ class DeepCheckStreamer(dirnode.DeepStats):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
def write_line(self, data):
|
def write_line(self, data):
|
||||||
j = simplejson.dumps(data, ensure_ascii=True)
|
j = json.dumps(data, ensure_ascii=True)
|
||||||
assert "\n" not in j
|
assert "\n" not in j
|
||||||
self.req.write(j+"\n")
|
self.req.write(j+"\n")
|
||||||
|
|
||||||
@ -1217,7 +1216,7 @@ class DeepCheckStreamer(dirnode.DeepStats):
|
|||||||
d = {"type": "stats",
|
d = {"type": "stats",
|
||||||
"stats": stats,
|
"stats": stats,
|
||||||
}
|
}
|
||||||
j = simplejson.dumps(d, ensure_ascii=True)
|
j = json.dumps(d, ensure_ascii=True)
|
||||||
assert "\n" not in j
|
assert "\n" not in j
|
||||||
self.req.write(j+"\n")
|
self.req.write(j+"\n")
|
||||||
return ""
|
return ""
|
||||||
@ -1263,4 +1262,4 @@ def UnknownJSONMetadata(ctx, node, edge_metadata, is_parent_known_immutable):
|
|||||||
|
|
||||||
if edge_metadata is not None:
|
if edge_metadata is not None:
|
||||||
data[1]['metadata'] = edge_metadata
|
data[1]['metadata'] = edge_metadata
|
||||||
return text_plain(simplejson.dumps(data, indent=1) + "\n", ctx)
|
return text_plain(json.dumps(data, indent=1) + "\n", ctx)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import simplejson
|
import json
|
||||||
|
|
||||||
from twisted.web import http, static
|
from twisted.web import http, static
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -509,7 +509,7 @@ def FileJSONMetadata(ctx, filenode, edge_metadata):
|
|||||||
if edge_metadata is not None:
|
if edge_metadata is not None:
|
||||||
data[1]['metadata'] = edge_metadata
|
data[1]['metadata'] = edge_metadata
|
||||||
|
|
||||||
return text_plain(simplejson.dumps(data, indent=1) + "\n", ctx)
|
return text_plain(json.dumps(data, indent=1) + "\n", ctx)
|
||||||
|
|
||||||
def FileURI(ctx, filenode):
|
def FileURI(ctx, filenode):
|
||||||
return text_plain(filenode.get_uri(), ctx)
|
return text_plain(filenode.get_uri(), ctx)
|
||||||
|
@ -4,7 +4,7 @@ from nevow import rend, inevow
|
|||||||
from nevow.static import File as nevow_File
|
from nevow.static import File as nevow_File
|
||||||
from nevow.util import resource_filename
|
from nevow.util import resource_filename
|
||||||
import allmydata
|
import allmydata
|
||||||
import simplejson
|
import json
|
||||||
from allmydata import get_package_versions_string
|
from allmydata import get_package_versions_string
|
||||||
from allmydata.util import idlib
|
from allmydata.util import idlib
|
||||||
from allmydata.web.common import getxmlfile, get_arg, render_time
|
from allmydata.web.common import getxmlfile, get_arg, render_time
|
||||||
@ -49,7 +49,7 @@ class IntroducerRoot(rend.Page):
|
|||||||
announcement_summary[service_name] += 1
|
announcement_summary[service_name] += 1
|
||||||
res["announcement_summary"] = announcement_summary
|
res["announcement_summary"] = announcement_summary
|
||||||
|
|
||||||
return simplejson.dumps(res, indent=1) + "\n"
|
return json.dumps(res, indent=1) + "\n"
|
||||||
|
|
||||||
# FIXME: This code is duplicated in root.py and introweb.py.
|
# FIXME: This code is duplicated in root.py and introweb.py.
|
||||||
def data_rendered_at(self, ctx, data):
|
def data_rendered_at(self, ctx, data):
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import simplejson
|
import json
|
||||||
|
|
||||||
from allmydata.web.common import TokenOnlyWebApi
|
from allmydata.web.common import TokenOnlyWebApi
|
||||||
|
|
||||||
@ -38,4 +38,4 @@ class MagicFolderWebApi(TokenOnlyWebApi):
|
|||||||
d['percent_done'] = item.progress.progress
|
d['percent_done'] = item.progress.progress
|
||||||
data.append(d)
|
data.append(d)
|
||||||
|
|
||||||
return simplejson.dumps(data)
|
return json.dumps(data)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import pprint, itertools, hashlib
|
import pprint, itertools, hashlib
|
||||||
import simplejson
|
import json
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from nevow import rend, inevow, tags as T
|
from nevow import rend, inevow, tags as T
|
||||||
from allmydata.util import base32, idlib
|
from allmydata.util import base32, idlib
|
||||||
@ -470,7 +470,7 @@ class DownloadStatusPage(DownloadResultsRendererMixin, rend.Page):
|
|||||||
# so they get converted to strings. Stupid javascript.
|
# so they get converted to strings. Stupid javascript.
|
||||||
data["serverids"] = server_shortnames
|
data["serverids"] = server_shortnames
|
||||||
data["bounds"] = {"min": ds.first_timestamp, "max": ds.last_timestamp}
|
data["bounds"] = {"min": ds.first_timestamp, "max": ds.last_timestamp}
|
||||||
return simplejson.dumps(data, indent=1) + "\n"
|
return json.dumps(data, indent=1) + "\n"
|
||||||
|
|
||||||
def render_timeline_link(self, ctx, data):
|
def render_timeline_link(self, ctx, data):
|
||||||
from nevow import url
|
from nevow import url
|
||||||
@ -991,7 +991,7 @@ class Status(rend.Page):
|
|||||||
"progress": s.get_progress(),
|
"progress": s.get_progress(),
|
||||||
})
|
})
|
||||||
|
|
||||||
return simplejson.dumps(data, indent=1) + "\n"
|
return json.dumps(data, indent=1) + "\n"
|
||||||
|
|
||||||
def _get_all_statuses(self):
|
def _get_all_statuses(self):
|
||||||
h = self.history
|
h = self.history
|
||||||
@ -1124,8 +1124,8 @@ class HelperStatus(rend.Page):
|
|||||||
req.setHeader("content-type", "text/plain")
|
req.setHeader("content-type", "text/plain")
|
||||||
if self.helper:
|
if self.helper:
|
||||||
stats = self.helper.get_stats()
|
stats = self.helper.get_stats()
|
||||||
return simplejson.dumps(stats, indent=1) + "\n"
|
return json.dumps(stats, indent=1) + "\n"
|
||||||
return simplejson.dumps({}) + "\n"
|
return json.dumps({}) + "\n"
|
||||||
|
|
||||||
def render_active_uploads(self, ctx, data):
|
def render_active_uploads(self, ctx, data):
|
||||||
return data["chk_upload_helper.active_uploads"]
|
return data["chk_upload_helper.active_uploads"]
|
||||||
@ -1167,7 +1167,7 @@ class Statistics(rend.Page):
|
|||||||
if t == "json":
|
if t == "json":
|
||||||
stats = self.provider.get_stats()
|
stats = self.provider.get_stats()
|
||||||
req.setHeader("content-type", "text/plain")
|
req.setHeader("content-type", "text/plain")
|
||||||
return simplejson.dumps(stats, indent=1) + "\n"
|
return json.dumps(stats, indent=1) + "\n"
|
||||||
return rend.Page.renderHTTP(self, ctx)
|
return rend.Page.renderHTTP(self, ctx)
|
||||||
|
|
||||||
def data_get_stats(self, ctx, data):
|
def data_get_stats(self, ctx, data):
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import time, simplejson
|
import time, json
|
||||||
from nevow import rend, tags as T, inevow
|
from nevow import rend, tags as T, inevow
|
||||||
from allmydata.web.common import getxmlfile, abbreviate_time, get_arg
|
from allmydata.web.common import getxmlfile, abbreviate_time, get_arg
|
||||||
from allmydata.util.abbreviate import abbreviate_space
|
from allmydata.util.abbreviate import abbreviate_space
|
||||||
@ -33,7 +33,7 @@ class StorageStatus(rend.Page):
|
|||||||
"lease-checker": self.storage.lease_checker.get_state(),
|
"lease-checker": self.storage.lease_checker.get_state(),
|
||||||
"lease-checker-progress": self.storage.lease_checker.get_progress(),
|
"lease-checker-progress": self.storage.lease_checker.get_progress(),
|
||||||
}
|
}
|
||||||
return simplejson.dumps(d, indent=1) + "\n"
|
return json.dumps(d, indent=1) + "\n"
|
||||||
|
|
||||||
def data_nickname(self, ctx, storage):
|
def data_nickname(self, ctx, storage):
|
||||||
return self.nickname
|
return self.nickname
|
||||||
|
Loading…
x
Reference in New Issue
Block a user