2015-01-14 01:26:32 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
|
|
|
# Copyright (C) 2015 GNS3 Technologies Inc.
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2015-01-23 01:04:24 +00:00
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import struct
|
|
|
|
import stat
|
2015-01-14 01:26:32 +00:00
|
|
|
import asyncio
|
2019-03-06 16:00:01 +00:00
|
|
|
import aiofiles
|
2018-01-29 09:18:13 +00:00
|
|
|
|
2015-01-23 01:04:24 +00:00
|
|
|
import socket
|
2015-02-09 01:10:04 +00:00
|
|
|
import shutil
|
2015-11-12 14:37:34 +00:00
|
|
|
import re
|
2015-01-23 01:04:24 +00:00
|
|
|
|
|
|
|
import logging
|
2018-01-29 09:18:13 +00:00
|
|
|
|
|
|
|
from gns3server.utils.asyncio import cancellable_wait_run_in_executor
|
2020-10-02 06:37:50 +00:00
|
|
|
from gns3server.compute.compute_error import ComputeError, ComputeForbiddenError, ComputeNotFoundError
|
2018-01-29 09:18:13 +00:00
|
|
|
|
2015-01-23 01:04:24 +00:00
|
|
|
log = logging.getLogger(__name__)
|
2015-01-14 01:26:32 +00:00
|
|
|
|
2015-01-20 01:30:57 +00:00
|
|
|
from uuid import UUID, uuid4
|
2015-06-07 19:51:33 +00:00
|
|
|
from gns3server.utils.interfaces import is_interface_up
|
2015-01-21 02:02:22 +00:00
|
|
|
from ..config import Config
|
2015-02-09 01:10:04 +00:00
|
|
|
from ..utils.asyncio import wait_run_in_executor
|
2015-10-05 18:12:20 +00:00
|
|
|
from ..utils import force_unix_path
|
2015-01-20 11:46:15 +00:00
|
|
|
from .project_manager import ProjectManager
|
2016-10-24 19:39:35 +00:00
|
|
|
from .port_manager import PortManager
|
2015-01-14 01:26:32 +00:00
|
|
|
|
2015-02-24 02:00:34 +00:00
|
|
|
from .nios.nio_udp import NIOUDP
|
|
|
|
from .nios.nio_tap import NIOTAP
|
2016-06-01 23:50:31 +00:00
|
|
|
from .nios.nio_ethernet import NIOEthernet
|
2016-11-28 18:49:50 +00:00
|
|
|
from ..utils.images import md5sum, remove_checksum, images_directories, default_images_directory, list_images
|
2016-06-07 13:34:04 +00:00
|
|
|
from .error import NodeError, ImageMissingError
|
2015-01-23 01:04:24 +00:00
|
|
|
|
2019-03-06 16:00:01 +00:00
|
|
|
CHUNK_SIZE = 1024 * 8 # 8KB
|
|
|
|
|
2015-01-14 01:26:32 +00:00
|
|
|
|
2015-01-14 17:52:02 +00:00
|
|
|
class BaseManager:
|
2015-01-20 12:24:00 +00:00
|
|
|
|
2015-01-14 01:26:32 +00:00
|
|
|
"""
|
2015-04-08 17:17:34 +00:00
|
|
|
Base class for all Manager classes.
|
2016-05-11 17:35:36 +00:00
|
|
|
Responsible of management of a node pool of the same type.
|
2015-01-14 01:26:32 +00:00
|
|
|
"""
|
|
|
|
|
2015-02-28 05:12:43 +00:00
|
|
|
_convert_lock = None
|
2015-02-26 01:55:35 +00:00
|
|
|
|
2015-01-14 01:26:32 +00:00
|
|
|
def __init__(self):
|
2015-01-19 21:43:35 +00:00
|
|
|
|
2015-02-28 05:12:43 +00:00
|
|
|
BaseManager._convert_lock = asyncio.Lock()
|
2016-05-11 17:35:36 +00:00
|
|
|
self._nodes = {}
|
2015-01-19 21:43:35 +00:00
|
|
|
self._port_manager = None
|
2015-01-21 02:02:22 +00:00
|
|
|
self._config = Config.instance()
|
2015-01-14 01:26:32 +00:00
|
|
|
|
2016-08-29 13:53:10 +00:00
|
|
|
@classmethod
|
|
|
|
def node_types(cls):
|
|
|
|
"""
|
|
|
|
:returns: Array of supported node type on this computer
|
|
|
|
"""
|
2018-03-15 07:17:39 +00:00
|
|
|
|
2016-08-29 13:53:10 +00:00
|
|
|
# By default we transform DockerVM => docker but you can override this (see builtins)
|
|
|
|
return [cls._NODE_CLASS.__name__.rstrip('VM').lower()]
|
|
|
|
|
2016-08-16 17:41:59 +00:00
|
|
|
@property
|
|
|
|
def nodes(self):
|
|
|
|
"""
|
|
|
|
List of nodes manage by the module
|
|
|
|
"""
|
2018-03-15 07:17:39 +00:00
|
|
|
|
2016-08-16 17:41:59 +00:00
|
|
|
return self._nodes.values()
|
|
|
|
|
2015-01-14 01:26:32 +00:00
|
|
|
@classmethod
|
|
|
|
def instance(cls):
|
|
|
|
"""
|
2015-01-18 22:41:53 +00:00
|
|
|
Singleton to return only one instance of BaseManager.
|
2015-01-14 01:26:32 +00:00
|
|
|
|
2015-01-19 21:43:35 +00:00
|
|
|
:returns: instance of BaseManager
|
2015-01-14 01:26:32 +00:00
|
|
|
"""
|
|
|
|
|
2015-01-16 16:09:45 +00:00
|
|
|
if not hasattr(cls, "_instance") or cls._instance is None:
|
2015-01-14 01:26:32 +00:00
|
|
|
cls._instance = cls()
|
|
|
|
return cls._instance
|
|
|
|
|
2015-01-21 22:21:15 +00:00
|
|
|
@property
|
|
|
|
def module_name(self):
|
|
|
|
"""
|
|
|
|
Returns the module name.
|
|
|
|
|
|
|
|
:returns: module name
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self.__class__.__name__
|
|
|
|
|
2015-01-19 10:22:24 +00:00
|
|
|
@property
|
|
|
|
def port_manager(self):
|
|
|
|
"""
|
2015-01-21 02:02:22 +00:00
|
|
|
Returns the port manager.
|
2015-01-19 10:22:24 +00:00
|
|
|
|
|
|
|
:returns: Port manager
|
|
|
|
"""
|
2018-03-15 07:17:39 +00:00
|
|
|
|
2016-10-24 19:39:35 +00:00
|
|
|
if self._port_manager is None:
|
|
|
|
self._port_manager = PortManager.instance()
|
2015-01-19 10:22:24 +00:00
|
|
|
return self._port_manager
|
|
|
|
|
|
|
|
@port_manager.setter
|
|
|
|
def port_manager(self, new_port_manager):
|
|
|
|
|
2015-01-19 21:43:35 +00:00
|
|
|
self._port_manager = new_port_manager
|
2015-01-19 10:22:24 +00:00
|
|
|
|
2015-01-21 02:02:22 +00:00
|
|
|
@property
|
|
|
|
def config(self):
|
|
|
|
"""
|
|
|
|
Returns the server config.
|
|
|
|
|
|
|
|
:returns: Config
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self._config
|
|
|
|
|
2018-10-15 10:05:49 +00:00
|
|
|
async def unload(self):
|
2015-01-23 06:40:51 +00:00
|
|
|
|
2015-02-05 21:24:06 +00:00
|
|
|
tasks = []
|
2016-05-11 17:35:36 +00:00
|
|
|
for node_id in self._nodes.keys():
|
2018-10-15 10:05:49 +00:00
|
|
|
tasks.append(asyncio.ensure_future(self.close_node(node_id)))
|
2015-02-05 21:24:06 +00:00
|
|
|
|
|
|
|
if tasks:
|
2018-10-15 10:05:49 +00:00
|
|
|
done, _ = await asyncio.wait(tasks)
|
2015-02-05 21:24:06 +00:00
|
|
|
for future in done:
|
|
|
|
try:
|
|
|
|
future.result()
|
2015-07-25 22:46:23 +00:00
|
|
|
except (Exception, GeneratorExit) as e:
|
2016-05-11 17:35:36 +00:00
|
|
|
log.error("Could not close node {}".format(e), exc_info=1)
|
2015-02-05 21:24:06 +00:00
|
|
|
continue
|
2015-01-20 01:30:57 +00:00
|
|
|
|
2015-01-23 06:40:51 +00:00
|
|
|
if hasattr(BaseManager, "_instance"):
|
|
|
|
BaseManager._instance = None
|
2015-01-23 20:01:23 +00:00
|
|
|
log.debug("Module {} unloaded".format(self.module_name))
|
2015-01-14 01:26:32 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
def get_node(self, node_id, project_id=None):
|
2015-01-14 01:26:32 +00:00
|
|
|
"""
|
2016-05-11 17:35:36 +00:00
|
|
|
Returns a Node instance.
|
2015-01-14 01:26:32 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
:param node_id: Node identifier
|
2015-02-05 00:13:35 +00:00
|
|
|
:param project_id: Project identifier
|
2015-01-14 01:26:32 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
:returns: Node instance
|
2015-01-14 01:26:32 +00:00
|
|
|
"""
|
|
|
|
|
2015-02-05 00:13:35 +00:00
|
|
|
if project_id:
|
|
|
|
# check the project_id exists
|
|
|
|
project = ProjectManager.instance().get_project(project_id)
|
|
|
|
|
2015-01-20 01:30:57 +00:00
|
|
|
try:
|
2016-05-11 17:35:36 +00:00
|
|
|
UUID(node_id, version=4)
|
2015-01-20 01:30:57 +00:00
|
|
|
except ValueError:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeError("Node ID {} is not a valid UUID".format(node_id))
|
2015-01-20 01:30:57 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
if node_id not in self._nodes:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeNotFoundError("Node ID {} doesn't exist".format(node_id))
|
2015-02-05 00:13:35 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
node = self._nodes[node_id]
|
2015-02-05 00:13:35 +00:00
|
|
|
if project_id:
|
2016-05-11 17:35:36 +00:00
|
|
|
if node.project.id != project.id:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeNotFoundError("Project ID {} doesn't belong to node {}".format(project_id, node.name))
|
2015-02-05 00:13:35 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
return node
|
2015-01-14 01:26:32 +00:00
|
|
|
|
2018-10-15 10:05:49 +00:00
|
|
|
async def create_node(self, name, project_id, node_id, *args, **kwargs):
|
2015-01-20 11:46:15 +00:00
|
|
|
"""
|
2016-05-11 17:35:36 +00:00
|
|
|
Create a new node
|
2015-01-20 11:46:15 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
:param name: Node name
|
2015-02-04 20:48:29 +00:00
|
|
|
:param project_id: Project identifier
|
2016-05-11 17:35:36 +00:00
|
|
|
:param node_id: restore a node identifier
|
2015-01-20 11:46:15 +00:00
|
|
|
"""
|
2015-01-20 22:28:40 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
if node_id in self._nodes:
|
|
|
|
return self._nodes[node_id]
|
2015-03-02 14:35:36 +00:00
|
|
|
|
2015-02-04 20:48:29 +00:00
|
|
|
project = ProjectManager.instance().get_project(project_id)
|
2016-05-11 17:35:36 +00:00
|
|
|
if not node_id:
|
|
|
|
node_id = str(uuid4())
|
2015-01-20 01:30:57 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
node = self._NODE_CLASS(name, node_id, project, self, *args, **kwargs)
|
|
|
|
if asyncio.iscoroutinefunction(node.create):
|
2018-10-15 10:05:49 +00:00
|
|
|
await node.create()
|
2015-01-22 00:41:35 +00:00
|
|
|
else:
|
2016-05-11 17:35:36 +00:00
|
|
|
node.create()
|
|
|
|
self._nodes[node.id] = node
|
|
|
|
project.add_node(node)
|
|
|
|
return node
|
2015-01-22 10:34:10 +00:00
|
|
|
|
2018-10-15 10:05:49 +00:00
|
|
|
async def duplicate_node(self, source_node_id, destination_node_id):
|
2017-07-20 15:29:42 +00:00
|
|
|
"""
|
|
|
|
Duplicate a node
|
|
|
|
|
|
|
|
:param source_node_id: Source node identifier
|
|
|
|
:param destination_node_id: Destination node identifier
|
|
|
|
:returns: New node instance
|
|
|
|
"""
|
2018-03-15 07:17:39 +00:00
|
|
|
|
2017-07-20 15:29:42 +00:00
|
|
|
source_node = self.get_node(source_node_id)
|
|
|
|
destination_node = self.get_node(destination_node_id)
|
|
|
|
|
|
|
|
# Some node don't have working dir like switch
|
|
|
|
if not hasattr(destination_node, "working_dir"):
|
|
|
|
return destination_node
|
|
|
|
|
|
|
|
destination_dir = destination_node.working_dir
|
|
|
|
try:
|
|
|
|
shutil.rmtree(destination_dir)
|
2019-02-18 16:09:59 +00:00
|
|
|
shutil.copytree(source_node.working_dir, destination_dir, symlinks=True, ignore_dangling_symlinks=True)
|
2017-07-20 15:29:42 +00:00
|
|
|
except OSError as e:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeError("Cannot duplicate node data: {}".format(e))
|
2017-07-20 15:29:42 +00:00
|
|
|
|
2018-03-15 07:17:39 +00:00
|
|
|
# We force a refresh of the name. This forces the rewrite
|
2017-07-20 15:29:42 +00:00
|
|
|
# of some configuration files
|
|
|
|
node_name = destination_node.name
|
2017-07-24 08:52:14 +00:00
|
|
|
destination_node.name = node_name + str(uuid4())
|
2017-07-20 15:29:42 +00:00
|
|
|
destination_node.name = node_name
|
|
|
|
|
|
|
|
return destination_node
|
|
|
|
|
2018-10-15 10:05:49 +00:00
|
|
|
async def close_node(self, node_id):
|
2015-01-22 10:34:10 +00:00
|
|
|
"""
|
2016-05-11 17:35:36 +00:00
|
|
|
Close a node
|
2015-01-22 10:34:10 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
:param node_id: Node identifier
|
2015-02-04 20:48:29 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
:returns: Node instance
|
2015-01-22 10:34:10 +00:00
|
|
|
"""
|
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
node = self.get_node(node_id)
|
|
|
|
if asyncio.iscoroutinefunction(node.close):
|
2018-10-15 10:05:49 +00:00
|
|
|
await node.close()
|
2015-01-22 10:34:10 +00:00
|
|
|
else:
|
2016-05-11 17:35:36 +00:00
|
|
|
node.close()
|
|
|
|
return node
|
2015-01-23 10:28:58 +00:00
|
|
|
|
2018-10-15 10:05:49 +00:00
|
|
|
async def project_closing(self, project):
|
2015-03-02 02:20:33 +00:00
|
|
|
"""
|
|
|
|
Called when a project is about to be closed.
|
|
|
|
|
|
|
|
:param project: Project instance
|
|
|
|
"""
|
|
|
|
|
|
|
|
pass
|
|
|
|
|
2018-10-15 10:05:49 +00:00
|
|
|
async def project_closed(self, project):
|
2015-02-16 05:13:24 +00:00
|
|
|
"""
|
|
|
|
Called when a project is closed.
|
|
|
|
|
2015-02-27 23:51:17 +00:00
|
|
|
:param project: Project instance
|
|
|
|
"""
|
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
for node in project.nodes:
|
|
|
|
if node.id in self._nodes:
|
|
|
|
del self._nodes[node.id]
|
2015-02-27 23:51:17 +00:00
|
|
|
|
2018-10-15 10:05:49 +00:00
|
|
|
async def delete_node(self, node_id):
|
2015-01-23 10:28:58 +00:00
|
|
|
"""
|
2016-05-11 17:35:36 +00:00
|
|
|
Delete a node. The node working directory will be destroyed when a commit is received.
|
2015-01-23 10:28:58 +00:00
|
|
|
|
2016-05-11 17:35:36 +00:00
|
|
|
:param node_id: Node identifier
|
|
|
|
:returns: Node instance
|
2015-01-23 10:28:58 +00:00
|
|
|
"""
|
|
|
|
|
2018-01-14 12:06:35 +00:00
|
|
|
node = None
|
2017-11-23 04:19:41 +00:00
|
|
|
try:
|
2018-01-14 12:06:35 +00:00
|
|
|
node = self.get_node(node_id)
|
2018-10-15 10:05:49 +00:00
|
|
|
await self.close_node(node_id)
|
2017-11-23 04:19:41 +00:00
|
|
|
finally:
|
2018-01-14 12:06:35 +00:00
|
|
|
if node:
|
|
|
|
node.project.emit("node.deleted", node)
|
2018-10-15 10:05:49 +00:00
|
|
|
await node.project.remove_node(node)
|
2016-05-11 17:35:36 +00:00
|
|
|
if node.id in self._nodes:
|
|
|
|
del self._nodes[node.id]
|
|
|
|
return node
|
2015-01-23 01:04:24 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2015-09-14 21:05:25 +00:00
|
|
|
def has_privileged_access(executable):
|
2015-01-23 01:04:24 +00:00
|
|
|
"""
|
2016-06-01 23:50:31 +00:00
|
|
|
Check if an executable have the right to attach to Ethernet and TAP adapters.
|
2015-01-23 01:04:24 +00:00
|
|
|
|
|
|
|
:param executable: executable path
|
|
|
|
|
|
|
|
:returns: True or False
|
|
|
|
"""
|
|
|
|
|
|
|
|
if sys.platform.startswith("win"):
|
|
|
|
# do not check anything on Windows
|
|
|
|
return True
|
|
|
|
|
2016-12-19 08:28:27 +00:00
|
|
|
if sys.platform.startswith("darwin"):
|
|
|
|
if os.stat(executable).st_uid == 0:
|
|
|
|
return True
|
|
|
|
|
2015-01-23 01:04:24 +00:00
|
|
|
if os.geteuid() == 0:
|
|
|
|
# we are root, so we should have privileged access.
|
|
|
|
return True
|
2015-09-14 21:05:25 +00:00
|
|
|
|
|
|
|
if os.stat(executable).st_uid == 0 and (os.stat(executable).st_mode & stat.S_ISUID or os.stat(executable).st_mode & stat.S_ISGID):
|
2015-01-23 01:04:24 +00:00
|
|
|
# the executable has set UID bit.
|
|
|
|
return True
|
|
|
|
|
|
|
|
# test if the executable has the CAP_NET_RAW capability (Linux only)
|
2015-09-14 21:05:25 +00:00
|
|
|
try:
|
|
|
|
if sys.platform.startswith("linux") and "security.capability" in os.listxattr(executable):
|
2015-01-23 01:04:24 +00:00
|
|
|
caps = os.getxattr(executable, "security.capability")
|
|
|
|
# test the 2nd byte and check if the 13th bit (CAP_NET_RAW) is set
|
|
|
|
if struct.unpack("<IIIII", caps)[1] & 1 << 13:
|
|
|
|
return True
|
2017-05-08 14:02:23 +00:00
|
|
|
except (AttributeError, OSError) as e:
|
2015-09-14 21:05:25 +00:00
|
|
|
log.error("could not determine if CAP_NET_RAW capability is set for {}: {}".format(executable, e))
|
2015-01-23 01:04:24 +00:00
|
|
|
|
|
|
|
return False
|
|
|
|
|
2016-06-25 00:35:39 +00:00
|
|
|
def create_nio(self, nio_settings):
|
2015-01-23 01:04:24 +00:00
|
|
|
"""
|
|
|
|
Creates a new NIO.
|
|
|
|
|
|
|
|
:param nio_settings: information to create the NIO
|
|
|
|
|
|
|
|
:returns: a NIO object
|
|
|
|
"""
|
|
|
|
|
|
|
|
nio = None
|
|
|
|
if nio_settings["type"] == "nio_udp":
|
|
|
|
lport = nio_settings["lport"]
|
|
|
|
rhost = nio_settings["rhost"]
|
|
|
|
rport = nio_settings["rport"]
|
|
|
|
try:
|
2015-06-07 03:37:34 +00:00
|
|
|
info = socket.getaddrinfo(rhost, rport, socket.AF_UNSPEC, socket.SOCK_DGRAM, 0, socket.AI_PASSIVE)
|
|
|
|
if not info:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeError("getaddrinfo returns an empty list on {}:{}".format(rhost, rport))
|
2015-06-07 03:37:34 +00:00
|
|
|
for res in info:
|
|
|
|
af, socktype, proto, _, sa = res
|
|
|
|
with socket.socket(af, socktype, proto) as sock:
|
|
|
|
sock.connect(sa)
|
2015-01-23 01:04:24 +00:00
|
|
|
except OSError as e:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeError("Could not create an UDP connection to {}:{}: {}".format(rhost, rport, e))
|
2018-03-19 09:26:12 +00:00
|
|
|
nio = NIOUDP(lport, rhost, rport)
|
|
|
|
nio.filters = nio_settings.get("filters", {})
|
|
|
|
nio.suspend = nio_settings.get("suspend", False)
|
2015-01-23 01:04:24 +00:00
|
|
|
elif nio_settings["type"] == "nio_tap":
|
|
|
|
tap_device = nio_settings["tap_device"]
|
2015-10-05 18:12:20 +00:00
|
|
|
# if not is_interface_up(tap_device):
|
2015-09-26 16:09:50 +00:00
|
|
|
# raise aiohttp.web.HTTPConflict(text="TAP interface {} does not exist or is down".format(tap_device))
|
2015-04-28 08:43:27 +00:00
|
|
|
# FIXME: check for permissions on tap device
|
2015-09-14 21:05:25 +00:00
|
|
|
# if not self.has_privileged_access(executable):
|
2015-04-27 20:38:15 +00:00
|
|
|
# raise aiohttp.web.HTTPForbidden(text="{} has no privileged access to {}.".format(executable, tap_device))
|
2015-02-24 02:00:34 +00:00
|
|
|
nio = NIOTAP(tap_device)
|
2016-06-01 23:50:31 +00:00
|
|
|
elif nio_settings["type"] in ("nio_generic_ethernet", "nio_ethernet"):
|
2015-06-07 19:51:33 +00:00
|
|
|
ethernet_device = nio_settings["ethernet_device"]
|
|
|
|
if not is_interface_up(ethernet_device):
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeError("Ethernet interface {} does not exist or is down".format(ethernet_device))
|
2016-06-01 23:50:31 +00:00
|
|
|
nio = NIOEthernet(ethernet_device)
|
2015-01-23 01:04:24 +00:00
|
|
|
assert nio is not None
|
|
|
|
return nio
|
2015-04-14 16:46:55 +00:00
|
|
|
|
2020-10-02 06:37:50 +00:00
|
|
|
async def stream_pcap_file(self, nio, project_id):
|
2018-10-27 07:47:17 +00:00
|
|
|
"""
|
|
|
|
Streams a PCAP file.
|
|
|
|
|
|
|
|
:param nio: NIO object
|
|
|
|
:param project_id: Project identifier
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not nio.capturing:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeError("Nothing to stream because there is no packet capture active")
|
2018-10-27 07:47:17 +00:00
|
|
|
|
|
|
|
project = ProjectManager.instance().get_project(project_id)
|
|
|
|
path = os.path.normpath(os.path.join(project.capture_working_directory(), nio.pcap_output_file))
|
|
|
|
|
2020-10-02 06:37:50 +00:00
|
|
|
# Raise an error if user try to escape
|
|
|
|
if path[0] == ".":
|
|
|
|
raise ComputeForbiddenError("Cannot stream PCAP file outside the capture working directory")
|
2018-10-27 07:47:17 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
with open(path, "rb") as f:
|
|
|
|
while nio.capturing:
|
2019-03-06 16:00:01 +00:00
|
|
|
data = f.read(CHUNK_SIZE)
|
2018-10-27 07:47:17 +00:00
|
|
|
if not data:
|
|
|
|
await asyncio.sleep(0.1)
|
|
|
|
continue
|
2020-10-02 06:37:50 +00:00
|
|
|
yield data
|
2018-10-27 07:47:17 +00:00
|
|
|
except FileNotFoundError:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeNotFoundError("File '{}' not found".format(path))
|
2018-10-27 07:47:17 +00:00
|
|
|
except PermissionError:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeForbiddenError("File '{}' cannot be accessed".format(path))
|
|
|
|
|
2018-11-19 08:53:43 +00:00
|
|
|
def get_abs_image_path(self, path, extra_dir=None):
|
2015-04-14 16:46:55 +00:00
|
|
|
"""
|
|
|
|
Get the absolute path of an image
|
|
|
|
|
|
|
|
:param path: file path
|
2018-11-19 08:53:43 +00:00
|
|
|
:param extra_dir: an additional directory to be added to the search path
|
|
|
|
|
|
|
|
:returns: file path
|
2015-04-14 16:46:55 +00:00
|
|
|
"""
|
|
|
|
|
2020-11-11 06:48:41 +00:00
|
|
|
if not path or path == ".":
|
2015-04-24 23:27:32 +00:00
|
|
|
return ""
|
2016-06-07 17:38:01 +00:00
|
|
|
orig_path = path
|
2015-11-12 14:37:34 +00:00
|
|
|
|
2015-04-14 16:46:55 +00:00
|
|
|
img_directory = self.get_images_directory()
|
2018-11-19 08:53:43 +00:00
|
|
|
valid_directory_prefices = images_directories(self._NODE_TYPE)
|
|
|
|
if extra_dir:
|
|
|
|
valid_directory_prefices.append(extra_dir)
|
2015-11-12 14:37:34 +00:00
|
|
|
|
|
|
|
# Windows path should not be send to a unix server
|
|
|
|
if not sys.platform.startswith("win"):
|
|
|
|
if re.match(r"^[A-Z]:", path) is not None:
|
2019-07-10 09:16:50 +00:00
|
|
|
raise NodeError("{} is not allowed on this remote server. Please only use a file from '{}'".format(path, img_directory))
|
2015-11-12 14:37:34 +00:00
|
|
|
|
2015-04-14 16:46:55 +00:00
|
|
|
if not os.path.isabs(path):
|
2018-11-19 08:53:43 +00:00
|
|
|
for directory in valid_directory_prefices:
|
2019-07-10 09:16:50 +00:00
|
|
|
log.debug("Searching for image '{}' in '{}'".format(orig_path, directory))
|
2016-06-02 14:44:38 +00:00
|
|
|
path = self._recursive_search_file_in_directory(directory, orig_path)
|
|
|
|
if path:
|
2016-06-02 13:19:34 +00:00
|
|
|
return force_unix_path(path)
|
2016-06-07 13:34:04 +00:00
|
|
|
|
|
|
|
# Not found we try the default directory
|
2019-07-10 09:16:50 +00:00
|
|
|
log.debug("Searching for image '{}' in default directory".format(orig_path))
|
2016-06-02 14:44:38 +00:00
|
|
|
s = os.path.split(orig_path)
|
2017-01-18 11:39:10 +00:00
|
|
|
path = force_unix_path(os.path.join(img_directory, *s))
|
2016-06-07 13:34:04 +00:00
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
2016-06-07 17:38:01 +00:00
|
|
|
raise ImageMissingError(orig_path)
|
2015-05-04 08:57:08 +00:00
|
|
|
|
2018-11-19 08:53:43 +00:00
|
|
|
# For local server we allow using absolute path outside image directory
|
2021-04-12 07:32:23 +00:00
|
|
|
if Config.instance().settings.Server.local is True:
|
2019-07-10 09:16:50 +00:00
|
|
|
log.debug("Searching for '{}'".format(orig_path))
|
2016-06-07 13:34:04 +00:00
|
|
|
path = force_unix_path(path)
|
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
2016-06-07 17:38:01 +00:00
|
|
|
raise ImageMissingError(orig_path)
|
2015-11-09 18:02:10 +00:00
|
|
|
|
2018-11-19 08:53:43 +00:00
|
|
|
# Check to see if path is an absolute path to a valid directory
|
2016-06-02 13:19:34 +00:00
|
|
|
path = force_unix_path(path)
|
2018-11-19 08:53:43 +00:00
|
|
|
for directory in valid_directory_prefices:
|
2019-07-10 09:16:50 +00:00
|
|
|
log.debug("Searching for image '{}' in '{}'".format(orig_path, directory))
|
2016-06-02 13:19:34 +00:00
|
|
|
if os.path.commonprefix([directory, path]) == directory:
|
2016-06-07 13:34:04 +00:00
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
2016-06-07 17:38:01 +00:00
|
|
|
raise ImageMissingError(orig_path)
|
2019-07-10 09:16:50 +00:00
|
|
|
raise NodeError("{} is not allowed on this remote server. Please only use a file from '{}'".format(path, img_directory))
|
2015-04-14 16:46:55 +00:00
|
|
|
|
2016-06-02 14:44:38 +00:00
|
|
|
def _recursive_search_file_in_directory(self, directory, searched_file):
|
|
|
|
"""
|
|
|
|
Search for a file in directory and is subdirectories
|
|
|
|
|
|
|
|
:returns: Path or None if not found
|
|
|
|
"""
|
|
|
|
s = os.path.split(searched_file)
|
|
|
|
|
|
|
|
for root, dirs, files in os.walk(directory):
|
|
|
|
for file in files:
|
|
|
|
# If filename is the same
|
|
|
|
if s[1] == file and (s[0] == '' or s[0] == os.path.basename(root)):
|
|
|
|
path = os.path.normpath(os.path.join(root, s[1]))
|
|
|
|
if os.path.exists(path):
|
|
|
|
return path
|
|
|
|
return None
|
|
|
|
|
2018-11-19 08:53:43 +00:00
|
|
|
def get_relative_image_path(self, path, extra_dir=None):
|
2015-04-14 16:46:55 +00:00
|
|
|
"""
|
|
|
|
Get a path relative to images directory path
|
|
|
|
or an abspath if the path is not located inside
|
|
|
|
image directory
|
|
|
|
|
|
|
|
:param path: file path
|
2018-11-19 08:53:43 +00:00
|
|
|
:param extra_dir: an additional directory to be added to the search path
|
|
|
|
|
|
|
|
:returns: file path
|
2015-04-14 16:46:55 +00:00
|
|
|
"""
|
|
|
|
|
2015-04-24 23:27:32 +00:00
|
|
|
if not path:
|
|
|
|
return ""
|
2017-01-18 11:39:10 +00:00
|
|
|
|
2018-11-19 08:53:43 +00:00
|
|
|
path = force_unix_path(self.get_abs_image_path(path, extra_dir))
|
2017-01-18 11:39:10 +00:00
|
|
|
img_directory = self.get_images_directory()
|
|
|
|
|
2018-11-19 08:53:43 +00:00
|
|
|
valid_directory_prefices = images_directories(self._NODE_TYPE)
|
|
|
|
if extra_dir:
|
|
|
|
valid_directory_prefices.append(extra_dir)
|
|
|
|
|
|
|
|
for directory in valid_directory_prefices:
|
2016-06-02 13:19:34 +00:00
|
|
|
if os.path.commonprefix([directory, path]) == directory:
|
2017-01-18 11:39:10 +00:00
|
|
|
relpath = os.path.relpath(path, directory)
|
|
|
|
# We don't allow to recurse search from the top image directory just for image type directory (compatibility with old releases)
|
|
|
|
if os.sep not in relpath or directory == img_directory:
|
|
|
|
return relpath
|
2015-04-14 16:46:55 +00:00
|
|
|
return path
|
|
|
|
|
2018-10-15 10:05:49 +00:00
|
|
|
async def list_images(self):
|
2015-04-13 12:33:13 +00:00
|
|
|
"""
|
2016-05-11 17:35:36 +00:00
|
|
|
Return the list of available images for this node type
|
2015-04-13 12:33:13 +00:00
|
|
|
|
|
|
|
:returns: Array of hash
|
|
|
|
"""
|
|
|
|
|
2016-12-21 08:33:44 +00:00
|
|
|
try:
|
|
|
|
return list_images(self._NODE_TYPE)
|
|
|
|
except OSError as e:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeError("Can not list images {}".format(e))
|
2015-04-13 12:33:13 +00:00
|
|
|
|
2015-04-14 16:46:55 +00:00
|
|
|
def get_images_directory(self):
|
|
|
|
"""
|
|
|
|
Get the image directory on disk
|
|
|
|
"""
|
2018-03-15 07:17:39 +00:00
|
|
|
|
2016-11-28 18:49:50 +00:00
|
|
|
if hasattr(self, "_NODE_TYPE"):
|
|
|
|
return default_images_directory(self._NODE_TYPE)
|
2015-04-14 16:46:55 +00:00
|
|
|
raise NotImplementedError
|
2015-04-24 08:15:23 +00:00
|
|
|
|
2018-10-15 10:05:49 +00:00
|
|
|
async def write_image(self, filename, stream):
|
2018-03-15 07:17:39 +00:00
|
|
|
|
2015-04-24 08:15:23 +00:00
|
|
|
directory = self.get_images_directory()
|
2015-10-05 18:12:20 +00:00
|
|
|
path = os.path.abspath(os.path.join(directory, *os.path.split(filename)))
|
|
|
|
if os.path.commonprefix([directory, path]) != directory:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeForbiddenError("Could not write image: {}, {} is forbidden".format(filename, path))
|
2019-03-06 16:00:01 +00:00
|
|
|
log.info("Writing image file to '{}'".format(path))
|
2015-04-24 08:15:23 +00:00
|
|
|
try:
|
2015-06-17 15:11:25 +00:00
|
|
|
remove_checksum(path)
|
2016-05-02 15:25:46 +00:00
|
|
|
# We store the file under his final name only when the upload is finished
|
|
|
|
tmp_path = path + ".tmp"
|
2015-10-05 18:12:20 +00:00
|
|
|
os.makedirs(os.path.dirname(path), exist_ok=True)
|
2019-03-06 16:00:01 +00:00
|
|
|
async with aiofiles.open(tmp_path, 'wb') as f:
|
2020-10-02 06:37:50 +00:00
|
|
|
async for chunk in stream:
|
2019-03-06 16:00:01 +00:00
|
|
|
await f.write(chunk)
|
2016-05-02 15:25:46 +00:00
|
|
|
os.chmod(tmp_path, stat.S_IWRITE | stat.S_IREAD | stat.S_IEXEC)
|
|
|
|
shutil.move(tmp_path, path)
|
2018-10-15 10:05:49 +00:00
|
|
|
await cancellable_wait_run_in_executor(md5sum, path)
|
2015-04-24 08:15:23 +00:00
|
|
|
except OSError as e:
|
2020-10-02 06:37:50 +00:00
|
|
|
raise ComputeError("Could not write image: {} because {}".format(filename, e))
|
2016-10-24 19:39:35 +00:00
|
|
|
|
|
|
|
def reset(self):
|
|
|
|
"""
|
|
|
|
Reset module for tests
|
|
|
|
"""
|
|
|
|
self._nodes = {}
|