mirror of
https://github.com/GNS3/gns3-server.git
synced 2024-12-23 14:42:28 +00:00
Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
acb5103119
@ -29,7 +29,7 @@ import logging
|
|||||||
from io import StringIO, BytesIO
|
from io import StringIO, BytesIO
|
||||||
|
|
||||||
from libcloud.compute.base import NodeAuthSSHKey
|
from libcloud.compute.base import NodeAuthSSHKey
|
||||||
from libcloud.storage.types import ContainerAlreadyExistsError, ContainerDoesNotExistError
|
from libcloud.storage.types import ContainerAlreadyExistsError, ContainerDoesNotExistError, ObjectDoesNotExistError
|
||||||
|
|
||||||
from .exceptions import ItemNotFound, KeyPairExists, MethodNotAllowed
|
from .exceptions import ItemNotFound, KeyPairExists, MethodNotAllowed
|
||||||
from .exceptions import OverLimit, BadRequest, ServiceUnavailable
|
from .exceptions import OverLimit, BadRequest, ServiceUnavailable
|
||||||
@ -216,11 +216,11 @@ class BaseCloudCtrl(object):
|
|||||||
|
|
||||||
return self.driver.list_key_pairs()
|
return self.driver.list_key_pairs()
|
||||||
|
|
||||||
def upload_file(self, file_path, folder):
|
def upload_file(self, file_path, cloud_object_name):
|
||||||
"""
|
"""
|
||||||
Uploads file to cloud storage (if it is not identical to a file already in cloud storage).
|
Uploads file to cloud storage (if it is not identical to a file already in cloud storage).
|
||||||
:param file_path: path to file to upload
|
:param file_path: path to file to upload
|
||||||
:param folder: folder in cloud storage to save file in
|
:param cloud_object_name: name of file saved in cloud storage
|
||||||
:return: True if file was uploaded, False if it was skipped because it already existed and was identical
|
:return: True if file was uploaded, False if it was skipped because it already existed and was identical
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
@ -231,7 +231,6 @@ class BaseCloudCtrl(object):
|
|||||||
with open(file_path, 'rb') as file:
|
with open(file_path, 'rb') as file:
|
||||||
local_file_hash = hashlib.md5(file.read()).hexdigest()
|
local_file_hash = hashlib.md5(file.read()).hexdigest()
|
||||||
|
|
||||||
cloud_object_name = folder + '/' + os.path.basename(file_path)
|
|
||||||
cloud_hash_name = cloud_object_name + '.md5'
|
cloud_hash_name = cloud_object_name + '.md5'
|
||||||
cloud_objects = [obj.name for obj in gns3_container.list_objects()]
|
cloud_objects = [obj.name for obj in gns3_container.list_objects()]
|
||||||
|
|
||||||
@ -254,23 +253,24 @@ class BaseCloudCtrl(object):
|
|||||||
def list_projects(self):
|
def list_projects(self):
|
||||||
"""
|
"""
|
||||||
Lists projects in cloud storage
|
Lists projects in cloud storage
|
||||||
:return: List of (project name, object name in storage)
|
:return: Dictionary where project names are keys and values are names of objects in storage
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
|
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
|
||||||
projects = [
|
projects = {
|
||||||
(obj.name.replace('projects/', '').replace('.zip', ''), obj.name)
|
obj.name.replace('projects/', '').replace('.zip', ''): obj.name
|
||||||
for obj in gns3_container.list_objects()
|
for obj in gns3_container.list_objects()
|
||||||
if obj.name.startswith('projects/') and obj.name[-4:] == '.zip'
|
if obj.name.startswith('projects/') and obj.name[-4:] == '.zip'
|
||||||
]
|
}
|
||||||
return projects
|
return projects
|
||||||
except ContainerDoesNotExistError:
|
except ContainerDoesNotExistError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def download_file(self, file_name, destination=None):
|
def download_file(self, file_name, destination=None):
|
||||||
"""
|
"""
|
||||||
Downloads file from cloud storage
|
Downloads file from cloud storage. If a file exists at destination, and it is identical to the file in cloud
|
||||||
|
storage, it is not downloaded.
|
||||||
:param file_name: name of file in cloud storage to download
|
:param file_name: name of file in cloud storage to download
|
||||||
:param destination: local path to save file to (if None, returns file contents as a file-like object)
|
:param destination: local path to save file to (if None, returns file contents as a file-like object)
|
||||||
:return: A file-like object if file contents are returned, or None if file is saved to filesystem
|
:return: A file-like object if file contents are returned, or None if file is saved to filesystem
|
||||||
@ -278,7 +278,22 @@ class BaseCloudCtrl(object):
|
|||||||
|
|
||||||
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
|
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
|
||||||
storage_object = gns3_container.get_object(file_name)
|
storage_object = gns3_container.get_object(file_name)
|
||||||
|
|
||||||
if destination is not None:
|
if destination is not None:
|
||||||
|
if os.path.isfile(destination):
|
||||||
|
# if a file exists at destination and its hash matches that of the
|
||||||
|
# file in cloud storage, don't download it
|
||||||
|
with open(destination, 'rb') as f:
|
||||||
|
local_file_hash = hashlib.md5(f.read()).hexdigest()
|
||||||
|
|
||||||
|
hash_object = gns3_container.get_object(file_name + '.md5')
|
||||||
|
cloud_object_hash = ''
|
||||||
|
for chunk in hash_object.as_stream():
|
||||||
|
cloud_object_hash += chunk.decode('utf8')
|
||||||
|
|
||||||
|
if local_file_hash == cloud_object_hash:
|
||||||
|
return
|
||||||
|
|
||||||
storage_object.download(destination)
|
storage_object.download(destination)
|
||||||
else:
|
else:
|
||||||
contents = b''
|
contents = b''
|
||||||
@ -287,3 +302,40 @@ class BaseCloudCtrl(object):
|
|||||||
contents += chunk
|
contents += chunk
|
||||||
|
|
||||||
return BytesIO(contents)
|
return BytesIO(contents)
|
||||||
|
|
||||||
|
def find_storage_image_names(self, images_to_find):
|
||||||
|
"""
|
||||||
|
Maps names of image files to their full name in cloud storage
|
||||||
|
:param images_to_find: list of image names to find
|
||||||
|
:return: A dictionary where keys are image names, and values are the corresponding names of
|
||||||
|
the files in cloud storage
|
||||||
|
"""
|
||||||
|
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
|
||||||
|
images_in_storage = [obj.name for obj in gns3_container.list_objects() if obj.name.startswith('images/')]
|
||||||
|
|
||||||
|
images = {}
|
||||||
|
for image_name in images_to_find:
|
||||||
|
images_with_same_name =\
|
||||||
|
list(filter(lambda storage_image_name: storage_image_name.endswith(image_name), images_in_storage))
|
||||||
|
|
||||||
|
if len(images_with_same_name) == 1:
|
||||||
|
images[image_name] = images_with_same_name[0]
|
||||||
|
else:
|
||||||
|
raise Exception('Image does not exist in cloud storage or is duplicated')
|
||||||
|
|
||||||
|
return images
|
||||||
|
|
||||||
|
def delete_file(self, file_name):
|
||||||
|
gns3_container = self.storage_driver.get_container(self.GNS3_CONTAINER_NAME)
|
||||||
|
|
||||||
|
try:
|
||||||
|
object_to_delete = gns3_container.get_object(file_name)
|
||||||
|
object_to_delete.delete()
|
||||||
|
except ObjectDoesNotExistError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
hash_object = gns3_container.get_object(file_name + '.md5')
|
||||||
|
hash_object.delete()
|
||||||
|
except ObjectDoesNotExistError:
|
||||||
|
pass
|
||||||
|
@ -42,11 +42,9 @@ class RackspaceCtrl(BaseCloudCtrl):
|
|||||||
|
|
||||||
""" Controller class for interacting with Rackspace API. """
|
""" Controller class for interacting with Rackspace API. """
|
||||||
|
|
||||||
def __init__(self, username, api_key, gns3_ias_url):
|
def __init__(self, username, api_key, *args, **kwargs):
|
||||||
super(RackspaceCtrl, self).__init__(username, api_key)
|
super(RackspaceCtrl, self).__init__(username, api_key)
|
||||||
|
|
||||||
self.gns3_ias_url = gns3_ias_url
|
|
||||||
|
|
||||||
# set this up so it can be swapped out with a mock for testing
|
# set this up so it can be swapped out with a mock for testing
|
||||||
self.post_fn = requests.post
|
self.post_fn = requests.post
|
||||||
self.driver_cls = get_driver(Provider.RACKSPACE)
|
self.driver_cls = get_driver(Provider.RACKSPACE)
|
||||||
@ -225,55 +223,6 @@ class RackspaceCtrl(BaseCloudCtrl):
|
|||||||
self.region = region
|
self.region = region
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _get_shared_images(self, username, region, gns3_version):
|
|
||||||
"""
|
|
||||||
Given a GNS3 version, ask gns3-ias to share compatible images
|
|
||||||
|
|
||||||
Response:
|
|
||||||
[{"created_at": "", "schema": "", "status": "", "member_id": "", "image_id": "", "updated_at": ""},]
|
|
||||||
or, if access was already asked
|
|
||||||
[{"image_id": "", "member_id": "", "status": "ALREADYREQUESTED"},]
|
|
||||||
"""
|
|
||||||
endpoint = self.gns3_ias_url+"/images/grant_access"
|
|
||||||
params = {
|
|
||||||
"user_id": username,
|
|
||||||
"user_region": region.upper(),
|
|
||||||
"gns3_version": gns3_version,
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
response = requests.get(endpoint, params=params)
|
|
||||||
except requests.ConnectionError:
|
|
||||||
raise ApiError("Unable to connect to IAS")
|
|
||||||
|
|
||||||
status = response.status_code
|
|
||||||
|
|
||||||
if status == 200:
|
|
||||||
return response.json()
|
|
||||||
elif status == 404:
|
|
||||||
raise ItemNotFound()
|
|
||||||
else:
|
|
||||||
raise ApiError("IAS status code: %d" % status)
|
|
||||||
|
|
||||||
def list_images(self):
|
|
||||||
"""
|
|
||||||
Return a dictionary containing RackSpace server images
|
|
||||||
retrieved from gns3-ias server
|
|
||||||
"""
|
|
||||||
if not (self.tenant_id and self.region):
|
|
||||||
return {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
shared_images = self._get_shared_images(self.tenant_id, self.region, __version__)
|
|
||||||
images = {}
|
|
||||||
for i in shared_images:
|
|
||||||
images[i['image_id']] = i['image_name']
|
|
||||||
return images
|
|
||||||
except ItemNotFound:
|
|
||||||
return {}
|
|
||||||
except ApiError as e:
|
|
||||||
log.error('Error while retrieving image list: %s' % e)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def get_image(self, image_id):
|
def get_image(self, image_id):
|
||||||
return self.driver.get_image(image_id)
|
return self.driver.get_image(image_id)
|
||||||
|
|
||||||
@ -290,12 +239,11 @@ def get_provider(cloud_settings):
|
|||||||
username = cloud_settings['cloud_user_name']
|
username = cloud_settings['cloud_user_name']
|
||||||
apikey = cloud_settings['cloud_api_key']
|
apikey = cloud_settings['cloud_api_key']
|
||||||
region = cloud_settings['cloud_region']
|
region = cloud_settings['cloud_region']
|
||||||
ias_url = cloud_settings.get('gns3_ias_url', '')
|
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
log.error("Unable to create cloud provider: {}".format(e))
|
log.error("Unable to create cloud provider: {}".format(e))
|
||||||
return
|
return
|
||||||
|
|
||||||
provider = RackspaceCtrl(username, apikey, ias_url)
|
provider = RackspaceCtrl(username, apikey)
|
||||||
|
|
||||||
if not provider.authenticate():
|
if not provider.authenticate():
|
||||||
log.error("Authentication failed for cloud provider")
|
log.error("Authentication failed for cloud provider")
|
||||||
|
@ -79,12 +79,12 @@ Options:
|
|||||||
--instance_id ID of the Rackspace instance to terminate
|
--instance_id ID of the Rackspace instance to terminate
|
||||||
--cloud_region Region of instance
|
--cloud_region Region of instance
|
||||||
|
|
||||||
--deadtime How long in seconds can the communication lose exist before we
|
--dead_time How long in seconds can the communication lose exist before we
|
||||||
shutdown this instance.
|
shutdown this instance.
|
||||||
Default:
|
Default:
|
||||||
Example --deadtime=3600 (60 minutes)
|
Example --dead_time=3600 (60 minutes)
|
||||||
|
|
||||||
--check-interval Defaults to --deadtime, used for debugging
|
--check-interval Defaults to --dead_time, used for debugging
|
||||||
|
|
||||||
--init-wait Inital wait time, how long before we start pulling the file.
|
--init-wait Inital wait time, how long before we start pulling the file.
|
||||||
Default: 300 (5 min)
|
Default: 300 (5 min)
|
||||||
@ -113,7 +113,7 @@ def parse_cmd_line(argv):
|
|||||||
"cloud_api_key=",
|
"cloud_api_key=",
|
||||||
"instance_id=",
|
"instance_id=",
|
||||||
"region=",
|
"region=",
|
||||||
"deadtime=",
|
"dead_time=",
|
||||||
"init-wait=",
|
"init-wait=",
|
||||||
"check-interval=",
|
"check-interval=",
|
||||||
"file=",
|
"file=",
|
||||||
@ -133,7 +133,7 @@ def parse_cmd_line(argv):
|
|||||||
cmd_line_option_list["cloud_api_key"] = None
|
cmd_line_option_list["cloud_api_key"] = None
|
||||||
cmd_line_option_list["instance_id"] = None
|
cmd_line_option_list["instance_id"] = None
|
||||||
cmd_line_option_list["region"] = None
|
cmd_line_option_list["region"] = None
|
||||||
cmd_line_option_list["deadtime"] = 60 * 60 #minutes
|
cmd_line_option_list["dead_time"] = 60 * 60 #minutes
|
||||||
cmd_line_option_list["check-interval"] = None
|
cmd_line_option_list["check-interval"] = None
|
||||||
cmd_line_option_list["init-wait"] = 5 * 60
|
cmd_line_option_list["init-wait"] = 5 * 60
|
||||||
cmd_line_option_list["file"] = None
|
cmd_line_option_list["file"] = None
|
||||||
@ -150,6 +150,7 @@ def parse_cmd_line(argv):
|
|||||||
|
|
||||||
|
|
||||||
get_gns3secrets(cmd_line_option_list)
|
get_gns3secrets(cmd_line_option_list)
|
||||||
|
cmd_line_option_list["dead_time"] = int(cmd_line_option_list["dead_time"])
|
||||||
|
|
||||||
for opt, val in opts:
|
for opt, val in opts:
|
||||||
if (opt in ("-h", "--help")):
|
if (opt in ("-h", "--help")):
|
||||||
@ -167,8 +168,8 @@ def parse_cmd_line(argv):
|
|||||||
cmd_line_option_list["instance_id"] = val
|
cmd_line_option_list["instance_id"] = val
|
||||||
elif (opt in ("--region")):
|
elif (opt in ("--region")):
|
||||||
cmd_line_option_list["region"] = val
|
cmd_line_option_list["region"] = val
|
||||||
elif (opt in ("--deadtime")):
|
elif (opt in ("--dead_time")):
|
||||||
cmd_line_option_list["deadtime"] = int(val)
|
cmd_line_option_list["dead_time"] = int(val)
|
||||||
elif (opt in ("--check-interval")):
|
elif (opt in ("--check-interval")):
|
||||||
cmd_line_option_list["check-interval"] = int(val)
|
cmd_line_option_list["check-interval"] = int(val)
|
||||||
elif (opt in ("--init-wait")):
|
elif (opt in ("--init-wait")):
|
||||||
@ -183,7 +184,7 @@ def parse_cmd_line(argv):
|
|||||||
if cmd_line_option_list["shutdown"] == False:
|
if cmd_line_option_list["shutdown"] == False:
|
||||||
|
|
||||||
if cmd_line_option_list["check-interval"] is None:
|
if cmd_line_option_list["check-interval"] is None:
|
||||||
cmd_line_option_list["check-interval"] = cmd_line_option_list["deadtime"] + 120
|
cmd_line_option_list["check-interval"] = cmd_line_option_list["dead_time"] + 120
|
||||||
|
|
||||||
if cmd_line_option_list["cloud_user_name"] is None:
|
if cmd_line_option_list["cloud_user_name"] is None:
|
||||||
print("You need to specify a username!!!!")
|
print("You need to specify a username!!!!")
|
||||||
@ -317,9 +318,9 @@ def monitor_loop(options):
|
|||||||
delta = now - file_last_modified
|
delta = now - file_last_modified
|
||||||
log.debug("File last updated: %s seconds ago" % (delta.seconds))
|
log.debug("File last updated: %s seconds ago" % (delta.seconds))
|
||||||
|
|
||||||
if delta.seconds > options["deadtime"]:
|
if delta.seconds > options["dead_time"]:
|
||||||
log.warning("Deadtime exceeded, terminating instance ...")
|
log.warning("Dead time exceeded, terminating instance ...")
|
||||||
#Terminate involes many layers of HTTP / API calls, lots of
|
#Terminate involves many layers of HTTP / API calls, lots of
|
||||||
#different errors types could occur here.
|
#different errors types could occur here.
|
||||||
try:
|
try:
|
||||||
rksp = Rackspace(options)
|
rksp = Rackspace(options)
|
||||||
|
@ -41,7 +41,7 @@ class Rackspace(object):
|
|||||||
self.authenticated = False
|
self.authenticated = False
|
||||||
self.hostname = socket.gethostname()
|
self.hostname = socket.gethostname()
|
||||||
self.instance_id = options["instance_id"]
|
self.instance_id = options["instance_id"]
|
||||||
self.region = options["region"]
|
self.region = options["cloud_region"]
|
||||||
|
|
||||||
log.debug("Authenticating with Rackspace")
|
log.debug("Authenticating with Rackspace")
|
||||||
log.debug("My hostname: %s" % (self.hostname))
|
log.debug("My hostname: %s" % (self.hostname))
|
||||||
@ -59,7 +59,7 @@ class Rackspace(object):
|
|||||||
self.rksp.set_region(self.region)
|
self.rksp.set_region(self.region)
|
||||||
for server in self.rksp.list_instances():
|
for server in self.rksp.list_instances():
|
||||||
log.debug("Checking server: %s" % (server.name))
|
log.debug("Checking server: %s" % (server.name))
|
||||||
if server.name.lower() == self.hostname.lower() and server.id == self.instance_id:
|
if server.id == self.instance_id:
|
||||||
log.info("Found matching instance: %s" % (server.id))
|
log.info("Found matching instance: %s" % (server.id))
|
||||||
log.info("Startup id: %s" % (self.instance_id))
|
log.info("Startup id: %s" % (self.instance_id))
|
||||||
return server
|
return server
|
||||||
|
@ -83,6 +83,7 @@ class DeadMan(IModule):
|
|||||||
cmd.append("--file")
|
cmd.append("--file")
|
||||||
cmd.append("%s" % (self._heartbeat_file))
|
cmd.append("%s" % (self._heartbeat_file))
|
||||||
cmd.append("--background")
|
cmd.append("--background")
|
||||||
|
cmd.append("--debug")
|
||||||
log.info("Deadman: Running command: %s"%(cmd))
|
log.info("Deadman: Running command: %s"%(cmd))
|
||||||
|
|
||||||
process = subprocess.Popen(cmd, stderr=subprocess.STDOUT, shell=False)
|
process = subprocess.Popen(cmd, stderr=subprocess.STDOUT, shell=False)
|
||||||
@ -94,7 +95,6 @@ class DeadMan(IModule):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
cmd = []
|
cmd = []
|
||||||
|
|
||||||
cmd.append("gns3dms")
|
cmd.append("gns3dms")
|
||||||
cmd.append("-k")
|
cmd.append("-k")
|
||||||
log.info("Deadman: Running command: %s"%(cmd))
|
log.info("Deadman: Running command: %s"%(cmd))
|
||||||
|
@ -760,7 +760,6 @@ class VM(object):
|
|||||||
if was_auto_started:
|
if was_auto_started:
|
||||||
router.stop()
|
router.stop()
|
||||||
|
|
||||||
validated_idlepc = "0x0"
|
|
||||||
response = {"id": router.id,
|
response = {"id": router.id,
|
||||||
"logs": logs,
|
"logs": logs,
|
||||||
"idlepc": validated_idlepc}
|
"idlepc": validated_idlepc}
|
||||||
|
@ -761,6 +761,29 @@ class QemuVM(object):
|
|||||||
log.debug("Download of {} complete.".format(src))
|
log.debug("Download of {} complete.".format(src))
|
||||||
self.hdb_disk_image = dst
|
self.hdb_disk_image = dst
|
||||||
|
|
||||||
|
if self.initrd != "":
|
||||||
|
_, filename = ntpath.split(self.initrd)
|
||||||
|
src = '{}/{}'.format(self.cloud_path, filename)
|
||||||
|
dst = os.path.join(self.working_dir, filename)
|
||||||
|
if not os.path.isfile(dst):
|
||||||
|
cloud_settings = Config.instance().cloud_settings()
|
||||||
|
provider = get_provider(cloud_settings)
|
||||||
|
log.debug("Downloading file from {} to {}...".format(src, dst))
|
||||||
|
provider.download_file(src, dst)
|
||||||
|
log.debug("Download of {} complete.".format(src))
|
||||||
|
self.initrd = dst
|
||||||
|
if self.kernel_image != "":
|
||||||
|
_, filename = ntpath.split(self.kernel_image)
|
||||||
|
src = '{}/{}'.format(self.cloud_path, filename)
|
||||||
|
dst = os.path.join(self.working_dir, filename)
|
||||||
|
if not os.path.isfile(dst):
|
||||||
|
cloud_settings = Config.instance().cloud_settings()
|
||||||
|
provider = get_provider(cloud_settings)
|
||||||
|
log.debug("Downloading file from {} to {}...".format(src, dst))
|
||||||
|
provider.download_file(src, dst)
|
||||||
|
log.debug("Download of {} complete.".format(src))
|
||||||
|
self.kernel_image = dst
|
||||||
|
|
||||||
self._command = self._build_command()
|
self._command = self._build_command()
|
||||||
try:
|
try:
|
||||||
log.info("starting QEMU: {}".format(self._command))
|
log.info("starting QEMU: {}".format(self._command))
|
||||||
|
@ -44,7 +44,7 @@ import uuid
|
|||||||
|
|
||||||
SCRIPT_NAME = os.path.basename(__file__)
|
SCRIPT_NAME = os.path.basename(__file__)
|
||||||
|
|
||||||
#Is the full path when used as an import
|
# This is the full path when used as an import
|
||||||
SCRIPT_PATH = os.path.dirname(__file__)
|
SCRIPT_PATH = os.path.dirname(__file__)
|
||||||
|
|
||||||
if not SCRIPT_PATH:
|
if not SCRIPT_PATH:
|
||||||
|
@ -14,7 +14,7 @@ def test_port(hypervisor):
|
|||||||
|
|
||||||
def test_host(hypervisor):
|
def test_host(hypervisor):
|
||||||
|
|
||||||
assert hypervisor.host == "127.0.0.1"
|
assert hypervisor.host == "0.0.0.0"
|
||||||
|
|
||||||
|
|
||||||
def test_working_dir(hypervisor):
|
def test_working_dir(hypervisor):
|
||||||
|
Loading…
Reference in New Issue
Block a user