fix tests to use migrate command

This commit is contained in:
meejah 2021-11-30 18:00:58 -07:00
parent 1b8ae8039e
commit 0a4bc385c5
3 changed files with 47 additions and 7 deletions

View File

@ -93,6 +93,7 @@ class MigrateCrawlerOptions(BasedirOptions):
)
return t
def migrate_crawler(options):
out = options.stdout
storage = FilePath(options['basedir']).child("storage")
@ -107,12 +108,12 @@ def migrate_crawler(options):
existed = fp.exists()
newfp = crawler._maybe_upgrade_pickle_to_json(fp, converter)
if existed:
print("Converted '{}' to '{}'".format(fp.path, newfp.path))
print("Converted '{}' to '{}'".format(fp.path, newfp.path), file=out)
else:
if newfp.exists():
print("Already converted: '{}'".format(newfp.path))
print("Already converted: '{}'".format(newfp.path), file=out)
else:
print("Not found: '{}'".format(fp.path))
print("Not found: '{}'".format(fp.path), file=out)
class AdminCommand(BaseOptions):

View File

@ -148,6 +148,8 @@ def _confirm_json_format(fp):
:returns FilePath: the JSON name of a state file
"""
if fp.path.endswith(".json"):
return fp
jsonfp = fp.siblingExtension(".json")
if fp.exists():
raise MigratePickleFileError(fp)

View File

@ -19,6 +19,7 @@ import time
import os.path
import re
import json
from six.moves import StringIO
from twisted.trial import unittest
@ -45,6 +46,13 @@ from allmydata.web.storage import (
StorageStatusElement,
remove_prefix
)
from allmydata.scripts.admin import (
MigrateCrawlerOptions,
migrate_crawler,
)
from allmydata.scripts.runner import (
Options,
)
from .common_util import FakeCanary
from .common_web import (
@ -1152,15 +1160,29 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
"""
# this file came from an "in the wild" tahoe version 1.16.0
original_pickle = FilePath(__file__).parent().child("data").child("lease_checker.state.txt")
test_pickle = FilePath("lease_checker.state")
root = FilePath(self.mktemp())
storage = root.child("storage")
storage.makedirs()
test_pickle = storage.child("lease_checker.state")
with test_pickle.open("w") as local, original_pickle.open("r") as remote:
local.write(remote.read())
serial = _LeaseStateSerializer(test_pickle.path)
# convert from pickle format to JSON
top = Options()
top.parseOptions([
"admin", "migrate-crawler",
"--basedir", storage.parent().path,
])
options = top.subOptions
while hasattr(options, "subOptions"):
options = options.subOptions
options.stdout = StringIO()
migrate_crawler(options)
# the (existing) state file should have been upgraded to JSON
self.assertNot(test_pickle.exists())
self.assertFalse(test_pickle.exists())
self.assertTrue(test_pickle.siblingExtension(".json").exists())
serial = _LeaseStateSerializer(test_pickle.path)
self.assertEqual(
serial.load(),
@ -1340,10 +1362,25 @@ class LeaseCrawler(unittest.TestCase, pollmixin.PollMixin):
"""
# this file came from an "in the wild" tahoe version 1.16.0
original_pickle = FilePath(__file__).parent().child("data").child("lease_checker.history.txt")
test_pickle = FilePath("lease_checker.history")
root = FilePath(self.mktemp())
storage = root.child("storage")
storage.makedirs()
test_pickle = storage.child("lease_checker.history")
with test_pickle.open("w") as local, original_pickle.open("r") as remote:
local.write(remote.read())
# convert from pickle format to JSON
top = Options()
top.parseOptions([
"admin", "migrate-crawler",
"--basedir", storage.parent().path,
])
options = top.subOptions
while hasattr(options, "subOptions"):
options = options.subOptions
options.stdout = StringIO()
migrate_crawler(options)
serial = _HistorySerializer(test_pickle.path)
self.maxDiff = None