explicit 'migrate pickle files' command

This commit is contained in:
meejah 2021-11-30 15:59:27 -07:00
parent 5855a30e34
commit 49f2489321

View File

@ -18,7 +18,17 @@ except ImportError:
pass
from twisted.python import usage
from allmydata.scripts.common import BaseOptions
from twisted.python.filepath import (
FilePath,
)
from allmydata.scripts.common import (
BaseOptions,
BasedirOptions,
)
from allmydata.storage import (
crawler,
expirer,
)
class GenerateKeypairOptions(BaseOptions):
@ -65,12 +75,54 @@ def derive_pubkey(options):
print("public:", str(ed25519.string_from_verifying_key(public_key), "ascii"), file=out)
return 0
class MigrateCrawlerOptions(BasedirOptions):
def getSynopsis(self):
return "Usage: tahoe [global-options] admin migrate-crawler"
def getUsage(self, width=None):
t = BasedirOptions.getUsage(self, width)
t += (
"The crawler data is now stored as JSON to avoid"
" potential security issues with pickle files.\n\nIf"
" you are confident the state files in the 'storage/'"
" subdirectory of your node are trustworthy, run this"
" command to upgrade them to JSON.\n\nThe files are:"
" lease_checker.history, lease_checker.state, and"
" bucket_counter.state"
)
return t
def migrate_crawler(options):
out = options.stdout
storage = FilePath(options['basedir']).child("storage")
conversions = [
(storage.child("lease_checker.state"), crawler._convert_pickle_state_to_json),
(storage.child("bucket_counter.state"), crawler._convert_pickle_state_to_json),
(storage.child("lease_checker.history"), expirer._convert_pickle_state_to_json),
]
for fp, converter in conversions:
existed = fp.exists()
newfp = crawler._maybe_upgrade_pickle_to_json(fp, converter)
if existed:
print("Converted '{}' to '{}'".format(fp.path, newfp.path))
else:
if newfp.exists():
print("Already converted: '{}'".format(newfp.path))
else:
print("Not found: '{}'".format(fp.path))
class AdminCommand(BaseOptions):
subCommands = [
("generate-keypair", None, GenerateKeypairOptions,
"Generate a public/private keypair, write to stdout."),
("derive-pubkey", None, DerivePubkeyOptions,
"Derive a public key from a private key."),
("migrate-crawler", None, MigrateCrawlerOptions,
"Write the crawler-history data as JSON."),
]
def postOptions(self):
if not hasattr(self, 'subOptions'):
@ -88,6 +140,7 @@ each subcommand.
subDispatch = {
"generate-keypair": print_keypair,
"derive-pubkey": derive_pubkey,
"migrate-crawler": migrate_crawler,
}
def do_admin(options):