Offload slow file operations to threads for snapshots and project "save as". Ref #1187 #1307.

This commit is contained in:
grossmj
2018-04-28 16:01:43 +07:00
parent 20294e284c
commit 50a922f83e
10 changed files with 246 additions and 221 deletions

View File

@ -29,122 +29,125 @@ log = logging.getLogger(__name__)
@asyncio.coroutine
def export_project(project, temporary_dir, include_images=False, keep_compute_id=False,
allow_all_nodes=False, ignore_prefixes=None):
def export_project(project, temporary_dir, include_images=False, keep_compute_id=False, allow_all_nodes=False):
"""
Export the project as zip. It's a ZipStream object.
The file will be read chunk by chunk when you iterate on
the zip.
Export a project to a zip file.
It will ignore some files like snapshots and
The file will be read chunk by chunk when you iterate over the zip stream.
Some files like snapshots and packet captures are ignored.
:param temporary_dir: A temporary dir where to store intermediate data
:param keep_compute_id: If false replace all compute id by local it's the standard behavior for .gns3project to make them portable
:param allow_all_nodes: Allow all nodes type to be include in the zip even if not portable default False
:param include images: save OS images to the zip file
:param keep_compute_id: If false replace all compute id by local (standard behavior for .gns3project to make it portable)
:param allow_all_nodes: Allow all nodes type to be include in the zip even if not portable
:returns: ZipStream object
"""
# To avoid issue with data not saved we disallow the export of a running topologie
# To avoid issue with data not saved we disallow the export of a running project
if project.is_running():
raise aiohttp.web.HTTPConflict(text="Running topology could not be exported")
raise aiohttp.web.HTTPConflict(text="Project must be stopped in order to export it")
# Make sure we save the project
project.dump()
z = zipstream.ZipFile(allowZip64=True)
zstream = zipstream.ZipFile(allowZip64=True)
if not os.path.exists(project._path):
raise aiohttp.web.HTTPNotFound(text="The project doesn't exist at location {}".format(project._path))
raise aiohttp.web.HTTPNotFound(text="Project could not be found at '{}'".format(project._path))
# First we process the .gns3 in order to be sure we don't have an error
for file in os.listdir(project._path):
if file.endswith(".gns3"):
images = yield from _export_project_file(project, os.path.join(project._path, file),
z, include_images, keep_compute_id, allow_all_nodes, temporary_dir)
yield from _patch_project_file(project, os.path.join(project._path, file), zstream, include_images, keep_compute_id, allow_all_nodes, temporary_dir)
# Export the local files
for root, dirs, files in os.walk(project._path, topdown=True):
files = [f for f in files if not _filter_files(os.path.join(root, f))]
files = [f for f in files if _is_exportable(os.path.join(root, f))]
for file in files:
path = os.path.join(root, file)
# Try open the file
# check if we can export the file
try:
open(path).close()
except OSError as e:
msg = "Could not export file {}: {}".format(path, e)
log.warn(msg)
msg = "Could not export file '{}': {}".format(path, e)
log.warning(msg)
project.controller.notification.emit("log.warning", {"message": msg})
continue
# ignore the .gns3 file
if file.endswith(".gns3"):
pass
else:
z.write(path, os.path.relpath(path, project._path), compress_type=zipfile.ZIP_DEFLATED)
continue
zstream.write(path, os.path.relpath(path, project._path), compress_type=zipfile.ZIP_DEFLATED)
# Export files from remote computes
downloaded_files = set()
for compute in project.computes:
if compute.id != "local":
compute_files = yield from compute.list_files(project)
for compute_file in compute_files:
if not _filter_files(compute_file["path"]):
if _is_exportable(compute_file["path"]):
(fd, temp_path) = tempfile.mkstemp(dir=temporary_dir)
f = open(fd, "wb", closefd=True)
response = yield from compute.download_file(project, compute_file["path"])
while True:
data = yield from response.content.read(512)
data = yield from response.content.read(1024)
if not data:
break
f.write(data)
response.close()
f.close()
z.write(temp_path, arcname=compute_file["path"], compress_type=zipfile.ZIP_DEFLATED)
zstream.write(temp_path, arcname=compute_file["path"], compress_type=zipfile.ZIP_DEFLATED)
downloaded_files.add(compute_file['path'])
return z
return zstream
def _filter_files(path):
def _is_exportable(path):
"""
:returns: True if file should not be included in the final archive
"""
s = os.path.normpath(path).split(os.path.sep)
# do not export snapshots
if path.endswith("snapshots"):
return True
return False
# filter directory of snapshots
# do not export directories of snapshots
if "{sep}snapshots{sep}".format(sep=os.path.sep) in path:
return True
return False
try:
# do not export captures and other temporary directory
s = os.path.normpath(path).split(os.path.sep)
i = s.index("project-files")
if s[i + 1] in ("tmp", "captures", "snapshots"):
return True
return False
except (ValueError, IndexError):
pass
file_name = os.path.basename(path)
# Ignore log files and OS noises
if file_name.endswith('_log.txt') or file_name.endswith('.log') or file_name == '.DS_Store':
return True
return False
# do not export log files and OS noise
filename = os.path.basename(path)
if filename.endswith('_log.txt') or filename.endswith('.log') or filename == '.DS_Store':
return False
return True
@asyncio.coroutine
def _export_project_file(project, path, z, include_images, keep_compute_id, allow_all_nodes, temporary_dir):
def _patch_project_file(project, path, zstream, include_images, keep_compute_id, allow_all_nodes, temporary_dir):
"""
Take a project file (.gns3) and patch it for the export
Patch a project file (.gns3) to export a project.
The .gns3 file is renamed to project.gns3
We rename the .gns3 project.gns3 to avoid the task to the client to guess the file name
:param path: Path of the .gns3
:param path: path of the .gns3 file
"""
# Image file that we need to include in the exported archive
# image files that we need to include in the exported archive
images = []
with open(path) as f:
topology = json.load(f)
try:
with open(path) as f:
topology = json.load(f)
except (OSError, ValueError) as e:
raise aiohttp.web.HTTPConflict(text="Project file '{}' cannot be read: {}".format(path, e))
if "topology" in topology:
if "nodes" in topology["topology"]:
@ -152,9 +155,9 @@ def _export_project_file(project, path, z, include_images, keep_compute_id, allo
compute_id = node.get('compute_id', 'local')
if node["node_type"] == "virtualbox" and node.get("properties", {}).get("linked_clone"):
raise aiohttp.web.HTTPConflict(text="Topology with a linked {} clone could not be exported. Use qemu instead.".format(node["node_type"]))
raise aiohttp.web.HTTPConflict(text="Projects with a linked {} clone node cannot not be exported. Please use Qemu instead.".format(node["node_type"]))
if not allow_all_nodes and node["node_type"] in ["virtualbox", "vmware", "cloud"]:
raise aiohttp.web.HTTPConflict(text="Topology with a {} could not be exported".format(node["node_type"]))
raise aiohttp.web.HTTPConflict(text="Projects with a {} node cannot be exported".format(node["node_type"]))
if not keep_compute_id:
node["compute_id"] = "local" # To make project portable all node by default run on local
@ -186,78 +189,69 @@ def _export_project_file(project, path, z, include_images, keep_compute_id, allo
local_images = set([i['image'] for i in images if i['compute_id'] == 'local'])
for image in local_images:
_export_local_images(project, image, z)
_export_local_image(image, zstream)
remote_images = set([
(i['compute_id'], i['image_type'], i['image'])
for i in images if i['compute_id'] != 'local'])
for compute_id, image_type, image in remote_images:
yield from _export_remote_images(project, compute_id, image_type, image, z, temporary_dir)
yield from _export_remote_images(project, compute_id, image_type, image, zstream, temporary_dir)
z.writestr("project.gns3", json.dumps(topology).encode())
zstream.writestr("project.gns3", json.dumps(topology).encode())
return images
def _export_local_images(project, image, z):
def _export_local_image(image, zstream):
"""
Take a project file (.gns3) and export images to the zip
Exports a local image to the zip file.
:param image: Image path
:param z: Zipfile instance for the export
:param image: image path
:param zstream: Zipfile instance for the export
"""
from ..compute import MODULES
for module in MODULES:
try:
img_directory = module.instance().get_images_directory()
images_directory = module.instance().get_images_directory()
except NotImplementedError:
# Some modules don't have images
continue
directory = os.path.split(img_directory)[-1:][0]
directory = os.path.split(images_directory)[-1:][0]
if os.path.exists(image):
path = image
else:
path = os.path.join(img_directory, image)
path = os.path.join(images_directory, image)
if os.path.exists(path):
arcname = os.path.join("images", directory, os.path.basename(image))
z.write(path, arcname)
zstream.write(path, arcname)
return
@asyncio.coroutine
def _export_remote_images(project, compute_id, image_type, image, project_zipfile, temporary_dir):
"""
Export specific image from remote compute
:param project:
:param compute_id:
:param image_type:
:param image:
:param project_zipfile:
:return:
Export specific image from remote compute.
"""
log.info("Obtaining image `{}` from `{}`".format(image, compute_id))
log.info("Downloading image '{}' from compute server '{}'".format(image, compute_id))
try:
compute = [compute for compute in project.computes if compute.id == compute_id][0]
except IndexError:
raise aiohttp.web.HTTPConflict(
text="Cannot export image from `{}` compute. Compute doesn't exist.".format(compute_id))
raise aiohttp.web.HTTPConflict(text="Cannot export image from '{}' compute. Compute doesn't exist.".format(compute_id))
(fd, temp_path) = tempfile.mkstemp(dir=temporary_dir)
f = open(fd, "wb", closefd=True)
response = yield from compute.download_image(image_type, image)
if response.status != 200:
raise aiohttp.web.HTTPConflict(
text="Cannot export image from `{}` compute. Compute sent `{}` status.".format(
compute_id, response.status))
raise aiohttp.web.HTTPConflict(text="Cannot export image from '{}' compute. Compute returned status code {}.".format(compute_id, response.status))
while True:
data = yield from response.content.read(512)
data = yield from response.content.read(1024)
if not data:
break
f.write(data)