mirror of
https://github.com/GNS3/gns3-server.git
synced 2025-02-05 10:39:10 +00:00
Improvements for built-in disks
* Checksum is updated in the database for updated disks. * It is not possible to prune them.
This commit is contained in:
parent
b472f6dbf8
commit
4758431c76
@ -27,11 +27,11 @@ from fastapi.encoders import jsonable_encoder
|
|||||||
from starlette.requests import ClientDisconnect
|
from starlette.requests import ClientDisconnect
|
||||||
from sqlalchemy.orm.exc import MultipleResultsFound
|
from sqlalchemy.orm.exc import MultipleResultsFound
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from gns3server import schemas
|
|
||||||
|
|
||||||
|
from gns3server import schemas
|
||||||
from gns3server.config import Config
|
from gns3server.config import Config
|
||||||
from gns3server.compute.qemu import Qemu
|
from gns3server.compute.qemu import Qemu
|
||||||
from gns3server.utils.images import InvalidImageError, write_image, read_image_info, default_images_directory
|
from gns3server.utils.images import InvalidImageError, write_image, read_image_info, default_images_directory, get_builtin_disks
|
||||||
from gns3server.db.repositories.images import ImagesRepository
|
from gns3server.db.repositories.images import ImagesRepository
|
||||||
from gns3server.db.repositories.templates import TemplatesRepository
|
from gns3server.db.repositories.templates import TemplatesRepository
|
||||||
from gns3server.db.repositories.rbac import RbacRepository
|
from gns3server.db.repositories.rbac import RbacRepository
|
||||||
@ -51,7 +51,6 @@ log = logging.getLogger(__name__)
|
|||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
@router.post(
|
||||||
"/qemu/{image_path:path}",
|
"/qemu/{image_path:path}",
|
||||||
response_model=schemas.Image,
|
response_model=schemas.Image,
|
||||||
@ -175,6 +174,24 @@ async def upload_image(
|
|||||||
return image
|
return image
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/prune",
|
||||||
|
status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
dependencies=[Depends(has_privilege("Image.Allocate"))]
|
||||||
|
)
|
||||||
|
async def prune_images(
|
||||||
|
images_repo: ImagesRepository = Depends(get_repository(ImagesRepository)),
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Prune images not attached to any template.
|
||||||
|
|
||||||
|
Required privilege: Image.Allocate
|
||||||
|
"""
|
||||||
|
|
||||||
|
skip_images = get_builtin_disks()
|
||||||
|
await images_repo.prune_images(skip_images)
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/{image_path:path}",
|
"/{image_path:path}",
|
||||||
response_model=schemas.Image,
|
response_model=schemas.Image,
|
||||||
@ -218,7 +235,7 @@ async def delete_image(
|
|||||||
image = await images_repo.get_image(image_path)
|
image = await images_repo.get_image(image_path)
|
||||||
except MultipleResultsFound:
|
except MultipleResultsFound:
|
||||||
raise ControllerBadRequestError(f"Image '{image_path}' matches multiple images. "
|
raise ControllerBadRequestError(f"Image '{image_path}' matches multiple images. "
|
||||||
f"Please include the relative path of the image")
|
f"Please include the absolute path of the image")
|
||||||
|
|
||||||
if not image:
|
if not image:
|
||||||
raise ControllerNotFoundError(f"Image '{image_path}' not found")
|
raise ControllerNotFoundError(f"Image '{image_path}' not found")
|
||||||
@ -236,20 +253,3 @@ async def delete_image(
|
|||||||
success = await images_repo.delete_image(image_path)
|
success = await images_repo.delete_image(image_path)
|
||||||
if not success:
|
if not success:
|
||||||
raise ControllerError(f"Image '{image_path}' could not be deleted")
|
raise ControllerError(f"Image '{image_path}' could not be deleted")
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
|
||||||
"/prune",
|
|
||||||
status_code=status.HTTP_204_NO_CONTENT,
|
|
||||||
dependencies=[Depends(has_privilege("Image.Allocate"))]
|
|
||||||
)
|
|
||||||
async def prune_images(
|
|
||||||
images_repo: ImagesRepository = Depends(get_repository(ImagesRepository)),
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Prune images not attached to any template.
|
|
||||||
|
|
||||||
Required privilege: Image.Allocate
|
|
||||||
"""
|
|
||||||
|
|
||||||
await images_repo.prune_images()
|
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
API routes for templates.
|
API routes for templates.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@ -34,6 +35,8 @@ from gns3server.db.repositories.templates import TemplatesRepository
|
|||||||
from gns3server.services.templates import TemplatesService
|
from gns3server.services.templates import TemplatesService
|
||||||
from gns3server.db.repositories.rbac import RbacRepository
|
from gns3server.db.repositories.rbac import RbacRepository
|
||||||
from gns3server.db.repositories.images import ImagesRepository
|
from gns3server.db.repositories.images import ImagesRepository
|
||||||
|
from gns3server.controller.controller_error import ControllerError
|
||||||
|
from gns3server.utils.images import get_builtin_disks
|
||||||
|
|
||||||
from .dependencies.authentication import get_current_active_user
|
from .dependencies.authentication import get_current_active_user
|
||||||
from .dependencies.rbac import has_privilege
|
from .dependencies.rbac import has_privilege
|
||||||
@ -132,10 +135,28 @@ async def delete_template(
|
|||||||
Required privilege: Template.Allocate
|
Required privilege: Template.Allocate
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
images = await templates_repo.get_template_images(template_id)
|
||||||
await TemplatesService(templates_repo).delete_template(template_id)
|
await TemplatesService(templates_repo).delete_template(template_id)
|
||||||
await rbac_repo.delete_all_ace_starting_with_path(f"/templates/{template_id}")
|
await rbac_repo.delete_all_ace_starting_with_path(f"/templates/{template_id}")
|
||||||
if prune_images:
|
if prune_images and images:
|
||||||
await images_repo.prune_images()
|
skip_images = get_builtin_disks()
|
||||||
|
for image in images:
|
||||||
|
if image.filename in skip_images:
|
||||||
|
continue
|
||||||
|
templates = await images_repo.get_image_templates(image.image_id)
|
||||||
|
if templates:
|
||||||
|
template_names = ", ".join([template.name for template in templates])
|
||||||
|
raise ControllerError(f"Image '{image.path}' is used by one or more templates: {template_names}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.remove(image.path)
|
||||||
|
except OSError:
|
||||||
|
log.warning(f"Could not delete image file {image.path}")
|
||||||
|
|
||||||
|
print(f"Deleting image '{image.path}'")
|
||||||
|
success = await images_repo.delete_image(image.path)
|
||||||
|
if not success:
|
||||||
|
raise ControllerError(f"Image '{image.path}' could not removed from the database")
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
|
@ -115,7 +115,7 @@ class Docker(BaseManager):
|
|||||||
dst_path = self.resources_path()
|
dst_path = self.resources_path()
|
||||||
log.info(f"Installing Docker resources in '{dst_path}'")
|
log.info(f"Installing Docker resources in '{dst_path}'")
|
||||||
from gns3server.controller import Controller
|
from gns3server.controller import Controller
|
||||||
Controller.instance().install_resource_files(dst_path, "compute/docker/resources")
|
await Controller.instance().install_resource_files(dst_path, "compute/docker/resources")
|
||||||
await self.install_busybox(dst_path)
|
await self.install_busybox(dst_path)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise DockerError(f"Could not install Docker resources to {dst_path}: {e}")
|
raise DockerError(f"Could not install Docker resources to {dst_path}: {e}")
|
||||||
|
@ -28,10 +28,10 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
from importlib import resources as importlib_resources
|
from importlib import resources as importlib_resources
|
||||||
|
|
||||||
|
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from ..utils import parse_version, md5sum
|
from ..utils import parse_version, md5sum
|
||||||
from ..utils.images import default_images_directory
|
from ..utils.images import default_images_directory
|
||||||
|
from ..utils.asyncio import wait_run_in_executor
|
||||||
|
|
||||||
from .project import Project
|
from .project import Project
|
||||||
from .appliance import Appliance
|
from .appliance import Appliance
|
||||||
@ -43,6 +43,7 @@ from .topology import load_topology
|
|||||||
from .gns3vm import GNS3VM
|
from .gns3vm import GNS3VM
|
||||||
from .gns3vm.gns3_vm_error import GNS3VMError
|
from .gns3vm.gns3_vm_error import GNS3VMError
|
||||||
from .controller_error import ControllerError, ControllerNotFoundError
|
from .controller_error import ControllerError, ControllerNotFoundError
|
||||||
|
from ..db.tasks import update_disk_checksums
|
||||||
from ..version import __version__
|
from ..version import __version__
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -72,8 +73,11 @@ class Controller:
|
|||||||
async def start(self, computes=None):
|
async def start(self, computes=None):
|
||||||
|
|
||||||
log.info("Controller is starting")
|
log.info("Controller is starting")
|
||||||
self._install_base_configs()
|
await self._install_base_configs()
|
||||||
self._install_builtin_disks()
|
installed_disks = await self._install_builtin_disks()
|
||||||
|
if installed_disks:
|
||||||
|
await update_disk_checksums(installed_disks)
|
||||||
|
|
||||||
server_config = Config.instance().settings.Server
|
server_config = Config.instance().settings.Server
|
||||||
Config.instance().listen_for_config_changes(self._update_config)
|
Config.instance().listen_for_config_changes(self._update_config)
|
||||||
name = server_config.name
|
name = server_config.name
|
||||||
@ -86,7 +90,7 @@ class Controller:
|
|||||||
if host == "0.0.0.0":
|
if host == "0.0.0.0":
|
||||||
host = "127.0.0.1"
|
host = "127.0.0.1"
|
||||||
|
|
||||||
self._load_controller_vars()
|
await self._load_controller_vars()
|
||||||
|
|
||||||
if server_config.enable_ssl:
|
if server_config.enable_ssl:
|
||||||
self._ssl_context = self._create_ssl_context(server_config)
|
self._ssl_context = self._create_ssl_context(server_config)
|
||||||
@ -190,7 +194,7 @@ class Controller:
|
|||||||
async def reload(self):
|
async def reload(self):
|
||||||
|
|
||||||
log.info("Controller is reloading")
|
log.info("Controller is reloading")
|
||||||
self._load_controller_vars()
|
await self._load_controller_vars()
|
||||||
|
|
||||||
# remove all projects deleted from disk.
|
# remove all projects deleted from disk.
|
||||||
for project in self._projects.copy().values():
|
for project in self._projects.copy().values():
|
||||||
@ -234,7 +238,7 @@ class Controller:
|
|||||||
except OSError as e:
|
except OSError as e:
|
||||||
log.error(f"Cannot write controller vars file '{self._vars_file}': {e}")
|
log.error(f"Cannot write controller vars file '{self._vars_file}': {e}")
|
||||||
|
|
||||||
def _load_controller_vars(self):
|
async def _load_controller_vars(self):
|
||||||
"""
|
"""
|
||||||
Reload the controller vars from disk
|
Reload the controller vars from disk
|
||||||
"""
|
"""
|
||||||
@ -274,9 +278,9 @@ class Controller:
|
|||||||
builtin_appliances_path = self._appliance_manager.builtin_appliances_path()
|
builtin_appliances_path = self._appliance_manager.builtin_appliances_path()
|
||||||
if not previous_version or \
|
if not previous_version or \
|
||||||
parse_version(__version__.split("+")[0]) > parse_version(previous_version.split("+")[0]):
|
parse_version(__version__.split("+")[0]) > parse_version(previous_version.split("+")[0]):
|
||||||
self._appliance_manager.install_builtin_appliances()
|
await self._appliance_manager.install_builtin_appliances()
|
||||||
elif not os.listdir(builtin_appliances_path):
|
elif not os.listdir(builtin_appliances_path):
|
||||||
self._appliance_manager.install_builtin_appliances()
|
await self._appliance_manager.install_builtin_appliances()
|
||||||
else:
|
else:
|
||||||
log.info(f"Built-in appliances are installed in '{builtin_appliances_path}'")
|
log.info(f"Built-in appliances are installed in '{builtin_appliances_path}'")
|
||||||
|
|
||||||
@ -307,18 +311,21 @@ class Controller:
|
|||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def install_resource_files(dst_path, resource_name, upgrade_resources=True):
|
async def install_resource_files(dst_path, resource_name, upgrade_resources=True):
|
||||||
"""
|
"""
|
||||||
Install files from resources to user's file system
|
Install files from resources to user's file system
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def should_copy(src, dst, upgrade_resources):
|
installed_resources = []
|
||||||
|
async def should_copy(src, dst, upgrade_resources):
|
||||||
if not os.path.exists(dst):
|
if not os.path.exists(dst):
|
||||||
return True
|
return True
|
||||||
if upgrade_resources is False:
|
if upgrade_resources is False:
|
||||||
return False
|
return False
|
||||||
# copy the resource if it is different
|
# copy the resource if it is different
|
||||||
return md5sum(src) != md5sum(dst)
|
src_md5 = await wait_run_in_executor(md5sum, src)
|
||||||
|
dst_md5 = await wait_run_in_executor(md5sum, dst)
|
||||||
|
return src_md5 != dst_md5
|
||||||
|
|
||||||
if hasattr(sys, "frozen") and sys.platform.startswith("win"):
|
if hasattr(sys, "frozen") and sys.platform.startswith("win"):
|
||||||
resource_path = os.path.normpath(os.path.join(os.path.dirname(sys.executable), resource_name))
|
resource_path = os.path.normpath(os.path.join(os.path.dirname(sys.executable), resource_name))
|
||||||
@ -328,14 +335,16 @@ class Controller:
|
|||||||
else:
|
else:
|
||||||
for entry in importlib_resources.files('gns3server').joinpath(resource_name).iterdir():
|
for entry in importlib_resources.files('gns3server').joinpath(resource_name).iterdir():
|
||||||
full_path = os.path.join(dst_path, entry.name)
|
full_path = os.path.join(dst_path, entry.name)
|
||||||
if entry.is_file() and should_copy(str(entry), full_path, upgrade_resources):
|
if entry.is_file() and await should_copy(str(entry), full_path, upgrade_resources):
|
||||||
log.debug(f'Installing {resource_name} resource file "{entry.name}" to "{full_path}"')
|
log.debug(f'Installing {resource_name} resource file "{entry.name}" to "{full_path}"')
|
||||||
shutil.copy(str(entry), os.path.join(dst_path, entry.name))
|
shutil.copy(str(entry), os.path.join(full_path))
|
||||||
|
installed_resources.append(full_path)
|
||||||
elif entry.is_dir():
|
elif entry.is_dir():
|
||||||
os.makedirs(full_path, exist_ok=True)
|
os.makedirs(full_path, exist_ok=True)
|
||||||
Controller.install_resource_files(full_path, os.path.join(resource_name, entry.name))
|
await Controller.install_resource_files(full_path, os.path.join(resource_name, entry.name))
|
||||||
|
return installed_resources
|
||||||
|
|
||||||
def _install_base_configs(self):
|
async def _install_base_configs(self):
|
||||||
"""
|
"""
|
||||||
At startup we copy base configs to the user location to allow
|
At startup we copy base configs to the user location to allow
|
||||||
them to customize it
|
them to customize it
|
||||||
@ -345,11 +354,11 @@ class Controller:
|
|||||||
log.info(f"Installing base configs in '{dst_path}'")
|
log.info(f"Installing base configs in '{dst_path}'")
|
||||||
try:
|
try:
|
||||||
# do not overwrite base configs because they may have been customized by the user
|
# do not overwrite base configs because they may have been customized by the user
|
||||||
Controller.install_resource_files(dst_path, "configs", upgrade_resources=False)
|
await Controller.install_resource_files(dst_path, "configs", upgrade_resources=False)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
log.error(f"Could not install base config files to {dst_path}: {e}")
|
log.error(f"Could not install base config files to {dst_path}: {e}")
|
||||||
|
|
||||||
def _install_builtin_disks(self):
|
async def _install_builtin_disks(self):
|
||||||
"""
|
"""
|
||||||
At startup we copy built-in Qemu disks to the user location to allow
|
At startup we copy built-in Qemu disks to the user location to allow
|
||||||
them to use with appliances
|
them to use with appliances
|
||||||
@ -358,7 +367,7 @@ class Controller:
|
|||||||
dst_path = self.disks_path()
|
dst_path = self.disks_path()
|
||||||
log.info(f"Installing built-in disks in '{dst_path}'")
|
log.info(f"Installing built-in disks in '{dst_path}'")
|
||||||
try:
|
try:
|
||||||
Controller.install_resource_files(dst_path, "disks")
|
return await Controller.install_resource_files(dst_path, "disks")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
log.error(f"Could not install disk files to {dst_path}: {e}")
|
log.error(f"Could not install disk files to {dst_path}: {e}")
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ class ApplianceManager:
|
|||||||
os.makedirs(appliances_dir, exist_ok=True)
|
os.makedirs(appliances_dir, exist_ok=True)
|
||||||
return appliances_dir
|
return appliances_dir
|
||||||
|
|
||||||
def install_builtin_appliances(self):
|
async def install_builtin_appliances(self):
|
||||||
"""
|
"""
|
||||||
At startup we copy the built-in appliances files.
|
At startup we copy the built-in appliances files.
|
||||||
"""
|
"""
|
||||||
@ -119,7 +119,7 @@ class ApplianceManager:
|
|||||||
log.info(f"Installing built-in appliances in '{dst_path}'")
|
log.info(f"Installing built-in appliances in '{dst_path}'")
|
||||||
from . import Controller
|
from . import Controller
|
||||||
try:
|
try:
|
||||||
Controller.instance().install_resource_files(dst_path, "appliances")
|
await Controller.instance().install_resource_files(dst_path, "appliances")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
log.error(f"Could not install built-in appliance files to {dst_path}: {e}")
|
log.error(f"Could not install built-in appliance files to {dst_path}: {e}")
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
from sqlalchemy import select, delete
|
from sqlalchemy import select, delete, update
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from .base import BaseRepository
|
from .base import BaseRepository
|
||||||
@ -103,6 +103,22 @@ class ImagesRepository(BaseRepository):
|
|||||||
await self._db_session.refresh(db_image)
|
await self._db_session.refresh(db_image)
|
||||||
return db_image
|
return db_image
|
||||||
|
|
||||||
|
async def update_image(self, image_path: str, checksum: str, checksum_algorithm: str) -> models.Image:
|
||||||
|
"""
|
||||||
|
Update an image.
|
||||||
|
"""
|
||||||
|
|
||||||
|
query = update(models.Image).\
|
||||||
|
where(models.Image.path == image_path).\
|
||||||
|
values(checksum=checksum, checksum_algorithm=checksum_algorithm)
|
||||||
|
|
||||||
|
await self._db_session.execute(query)
|
||||||
|
await self._db_session.commit()
|
||||||
|
image_db = await self.get_image_by_checksum(checksum)
|
||||||
|
if image_db:
|
||||||
|
await self._db_session.refresh(image_db) # force refresh of updated_at value
|
||||||
|
return image_db
|
||||||
|
|
||||||
async def delete_image(self, image_path: str) -> bool:
|
async def delete_image(self, image_path: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Delete an image.
|
Delete an image.
|
||||||
@ -119,7 +135,7 @@ class ImagesRepository(BaseRepository):
|
|||||||
await self._db_session.commit()
|
await self._db_session.commit()
|
||||||
return result.rowcount > 0
|
return result.rowcount > 0
|
||||||
|
|
||||||
async def prune_images(self) -> int:
|
async def prune_images(self, skip_images: list[str] = None) -> int:
|
||||||
"""
|
"""
|
||||||
Prune images not attached to any template.
|
Prune images not attached to any template.
|
||||||
"""
|
"""
|
||||||
@ -130,12 +146,15 @@ class ImagesRepository(BaseRepository):
|
|||||||
images = result.scalars().all()
|
images = result.scalars().all()
|
||||||
images_deleted = 0
|
images_deleted = 0
|
||||||
for image in images:
|
for image in images:
|
||||||
|
if skip_images and image.filename in skip_images:
|
||||||
|
log.debug(f"Skipping image '{image.path}' for pruning")
|
||||||
|
continue
|
||||||
try:
|
try:
|
||||||
log.debug(f"Deleting image '{image.path}'")
|
log.debug(f"Deleting image '{image.path}'")
|
||||||
os.remove(image.path)
|
os.remove(image.path)
|
||||||
except OSError:
|
except OSError:
|
||||||
log.warning(f"Could not delete image file {image.path}")
|
log.warning(f"Could not delete image file {image.path}")
|
||||||
if await self.delete_image(image.filename):
|
if await self.delete_image(image.path):
|
||||||
images_deleted += 1
|
images_deleted += 1
|
||||||
log.info(f"{images_deleted} image(s) have been deleted")
|
log.info(f"{images_deleted} image(s) have been deleted")
|
||||||
return images_deleted
|
return images_deleted
|
||||||
|
@ -170,3 +170,14 @@ class TemplatesRepository(BaseRepository):
|
|||||||
await self._db_session.commit()
|
await self._db_session.commit()
|
||||||
await self._db_session.refresh(template_in_db)
|
await self._db_session.refresh(template_in_db)
|
||||||
return template_in_db
|
return template_in_db
|
||||||
|
|
||||||
|
async def get_template_images(self, template_id: UUID) -> int:
|
||||||
|
"""
|
||||||
|
Return all images attached to a template.
|
||||||
|
"""
|
||||||
|
|
||||||
|
query = select(models.Image).\
|
||||||
|
join(models.Image.templates).\
|
||||||
|
filter(models.Template.template_id == template_id)
|
||||||
|
result = await self._db_session.execute(query)
|
||||||
|
return result.scalars().all()
|
||||||
|
@ -35,7 +35,8 @@ from watchdog.events import FileSystemEvent, PatternMatchingEventHandler
|
|||||||
|
|
||||||
from gns3server.db.repositories.computes import ComputesRepository
|
from gns3server.db.repositories.computes import ComputesRepository
|
||||||
from gns3server.db.repositories.images import ImagesRepository
|
from gns3server.db.repositories.images import ImagesRepository
|
||||||
from gns3server.utils.images import discover_images, read_image_info, default_images_directory, InvalidImageError
|
from gns3server.utils.images import md5sum, discover_images, read_image_info, InvalidImageError
|
||||||
|
from gns3server.utils.asyncio import wait_run_in_executor
|
||||||
from gns3server import schemas
|
from gns3server import schemas
|
||||||
|
|
||||||
from .models import Base
|
from .models import Base
|
||||||
@ -130,7 +131,7 @@ async def get_computes(app: FastAPI) -> List[dict]:
|
|||||||
return computes
|
return computes
|
||||||
|
|
||||||
|
|
||||||
async def discover_images_on_filesystem(app: FastAPI):
|
async def discover_images_on_filesystem(app: FastAPI) -> None:
|
||||||
|
|
||||||
async with AsyncSession(app.state._db_engine) as db_session:
|
async with AsyncSession(app.state._db_engine) as db_session:
|
||||||
images_repository = ImagesRepository(db_session)
|
images_repository = ImagesRepository(db_session)
|
||||||
@ -155,6 +156,25 @@ async def discover_images_on_filesystem(app: FastAPI):
|
|||||||
# monitor if images have been manually added
|
# monitor if images have been manually added
|
||||||
asyncio.create_task(monitor_images_on_filesystem(app))
|
asyncio.create_task(monitor_images_on_filesystem(app))
|
||||||
|
|
||||||
|
|
||||||
|
async def update_disk_checksums(updated_disks: List[str]) -> None:
|
||||||
|
"""
|
||||||
|
Update the checksum of a list of disks in the database.
|
||||||
|
|
||||||
|
:param updated_disks: list of updated disks
|
||||||
|
"""
|
||||||
|
|
||||||
|
from gns3server.api.server import app
|
||||||
|
async with AsyncSession(app.state._db_engine) as db_session:
|
||||||
|
images_repository = ImagesRepository(db_session)
|
||||||
|
for path in updated_disks:
|
||||||
|
image = await images_repository.get_image(path)
|
||||||
|
if image:
|
||||||
|
log.info(f"Updating image '{path}' in the database")
|
||||||
|
checksum = await wait_run_in_executor(md5sum, path, cache_to_md5file=False)
|
||||||
|
if image.checksum != checksum:
|
||||||
|
await images_repository.update_image(path, checksum, "md5")
|
||||||
|
|
||||||
class EventHandler(PatternMatchingEventHandler):
|
class EventHandler(PatternMatchingEventHandler):
|
||||||
"""
|
"""
|
||||||
Watchdog event handler.
|
Watchdog event handler.
|
||||||
@ -227,7 +247,6 @@ async def monitor_images_on_filesystem(app: FastAPI):
|
|||||||
|
|
||||||
async for filesystem_event in EventIterator(queue):
|
async for filesystem_event in EventIterator(queue):
|
||||||
# read the file system event from the queue
|
# read the file system event from the queue
|
||||||
print(filesystem_event)
|
|
||||||
image_path = filesystem_event.src_path
|
image_path = filesystem_event.src_path
|
||||||
expected_image_type = None
|
expected_image_type = None
|
||||||
if "IOU" in image_path:
|
if "IOU" in image_path:
|
||||||
|
@ -20,6 +20,11 @@ import stat
|
|||||||
import aiofiles
|
import aiofiles
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
try:
|
||||||
|
import importlib_resources
|
||||||
|
except ImportError:
|
||||||
|
from importlib import resources as importlib_resources
|
||||||
|
|
||||||
from typing import List, AsyncGenerator
|
from typing import List, AsyncGenerator
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from . import force_unix_path
|
from . import force_unix_path
|
||||||
@ -111,6 +116,14 @@ async def list_images(image_type):
|
|||||||
return images
|
return images
|
||||||
|
|
||||||
|
|
||||||
|
def get_builtin_disks() -> List[str]:
|
||||||
|
builtin_disks = []
|
||||||
|
for entry in importlib_resources.files('gns3server').joinpath("disks").iterdir():
|
||||||
|
if entry.is_file():
|
||||||
|
builtin_disks.append(entry.name)
|
||||||
|
return builtin_disks
|
||||||
|
|
||||||
|
|
||||||
async def read_image_info(path: str, expected_image_type: str = None) -> dict:
|
async def read_image_info(path: str, expected_image_type: str = None) -> dict:
|
||||||
|
|
||||||
header_magic_len = 7
|
header_magic_len = 7
|
||||||
@ -118,7 +131,7 @@ async def read_image_info(path: str, expected_image_type: str = None) -> dict:
|
|||||||
async with aiofiles.open(path, "rb") as f:
|
async with aiofiles.open(path, "rb") as f:
|
||||||
image_header = await f.read(header_magic_len) # read the first 7 bytes of the file
|
image_header = await f.read(header_magic_len) # read the first 7 bytes of the file
|
||||||
if len(image_header) >= header_magic_len:
|
if len(image_header) >= header_magic_len:
|
||||||
detected_image_type = check_valid_image_header(image_header)
|
detected_image_type = check_valid_image_header(path, image_header)
|
||||||
if expected_image_type and detected_image_type != expected_image_type:
|
if expected_image_type and detected_image_type != expected_image_type:
|
||||||
raise InvalidImageError(f"Detected image type for '{path}' is {detected_image_type}, "
|
raise InvalidImageError(f"Detected image type for '{path}' is {detected_image_type}, "
|
||||||
f"expected type is {expected_image_type}")
|
f"expected type is {expected_image_type}")
|
||||||
@ -302,7 +315,7 @@ class InvalidImageError(Exception):
|
|||||||
return self._message
|
return self._message
|
||||||
|
|
||||||
|
|
||||||
def check_valid_image_header(data: bytes, allow_raw_image: bool = False) -> str:
|
def check_valid_image_header(path: str, data: bytes, allow_raw_image: bool = False) -> str:
|
||||||
|
|
||||||
if data[:7] == b'\x7fELF\x01\x02\x01':
|
if data[:7] == b'\x7fELF\x01\x02\x01':
|
||||||
# for IOS images: file must start with the ELF magic number, be 32-bit, big endian and have an ELF version of 1
|
# for IOS images: file must start with the ELF magic number, be 32-bit, big endian and have an ELF version of 1
|
||||||
@ -317,7 +330,7 @@ def check_valid_image_header(data: bytes, allow_raw_image: bool = False) -> str:
|
|||||||
else:
|
else:
|
||||||
if allow_raw_image is True:
|
if allow_raw_image is True:
|
||||||
return "qemu"
|
return "qemu"
|
||||||
raise InvalidImageError("Could not detect image type, please make sure it is a valid image")
|
raise InvalidImageError(f"{path}: could not detect image type, please make sure it is a valid image")
|
||||||
|
|
||||||
|
|
||||||
async def write_image(
|
async def write_image(
|
||||||
@ -342,7 +355,7 @@ async def write_image(
|
|||||||
async for chunk in stream:
|
async for chunk in stream:
|
||||||
if check_image_header and len(chunk) >= header_magic_len:
|
if check_image_header and len(chunk) >= header_magic_len:
|
||||||
check_image_header = False
|
check_image_header = False
|
||||||
image_type = check_valid_image_header(chunk, allow_raw_image)
|
image_type = check_valid_image_header(image_path, chunk, allow_raw_image)
|
||||||
await f.write(chunk)
|
await f.write(chunk)
|
||||||
checksum.update(chunk)
|
checksum.update(chunk)
|
||||||
|
|
||||||
|
@ -28,11 +28,11 @@ pytestmark = pytest.mark.asyncio
|
|||||||
|
|
||||||
class TestApplianceRoutes:
|
class TestApplianceRoutes:
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
# @pytest.fixture(autouse=True)
|
||||||
def _install_builtin_appliances(self, controller: Controller):
|
# def _install_builtin_appliances(self, controller: Controller):
|
||||||
|
#
|
||||||
controller.appliance_manager.install_builtin_appliances()
|
# controller.appliance_manager.install_builtin_appliances()
|
||||||
controller.appliance_manager.load_appliances()
|
# controller.appliance_manager.load_appliances()
|
||||||
|
|
||||||
async def test_appliances_list(self, app: FastAPI, client: AsyncClient) -> None:
|
async def test_appliances_list(self, app: FastAPI, client: AsyncClient) -> None:
|
||||||
|
|
||||||
|
@ -261,7 +261,7 @@ class TestImageRoutes:
|
|||||||
|
|
||||||
async def test_prune_images(self, app: FastAPI, client: AsyncClient, db_session: AsyncSession) -> None:
|
async def test_prune_images(self, app: FastAPI, client: AsyncClient, db_session: AsyncSession) -> None:
|
||||||
|
|
||||||
response = await client.post(app.url_path_for("prune_images"))
|
response = await client.delete(app.url_path_for("prune_images"))
|
||||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||||
|
|
||||||
images_repo = ImagesRepository(db_session)
|
images_repo = ImagesRepository(db_session)
|
||||||
@ -275,7 +275,7 @@ class TestImageRoutes:
|
|||||||
controller: Controller
|
controller: Controller
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
controller.appliance_manager.install_builtin_appliances()
|
await controller.appliance_manager.install_builtin_appliances()
|
||||||
controller.appliance_manager.load_appliances() # make sure appliances are loaded
|
controller.appliance_manager.load_appliances() # make sure appliances are loaded
|
||||||
image_path = "tests/resources/empty30G.qcow2"
|
image_path = "tests/resources/empty30G.qcow2"
|
||||||
image_name = os.path.basename(image_path)
|
image_name = os.path.basename(image_path)
|
||||||
|
@ -245,6 +245,7 @@ async def test_start(controller):
|
|||||||
}
|
}
|
||||||
|
|
||||||
#with asyncio_patch("gns3server.controller.compute.Compute.connect") as mock:
|
#with asyncio_patch("gns3server.controller.compute.Compute.connect") as mock:
|
||||||
|
with asyncio_patch("gns3server.controller.Controller._install_builtin_disks", return_value=[]):
|
||||||
await controller.start()
|
await controller.start()
|
||||||
#assert mock.called
|
#assert mock.called
|
||||||
assert len(controller.computes) == 1 # Local compute is created
|
assert len(controller.computes) == 1 # Local compute is created
|
||||||
@ -266,6 +267,7 @@ async def test_start_vm(controller):
|
|||||||
with asyncio_patch("gns3server.controller.gns3vm.vmware_gns3_vm.VMwareGNS3VM.start") as mock:
|
with asyncio_patch("gns3server.controller.gns3vm.vmware_gns3_vm.VMwareGNS3VM.start") as mock:
|
||||||
with asyncio_patch("gns3server.controller.gns3vm.GNS3VM._check_network"):
|
with asyncio_patch("gns3server.controller.gns3vm.GNS3VM._check_network"):
|
||||||
with asyncio_patch("gns3server.controller.compute.Compute.connect"):
|
with asyncio_patch("gns3server.controller.compute.Compute.connect"):
|
||||||
|
with asyncio_patch("gns3server.controller.Controller._install_builtin_disks", return_value=[]):
|
||||||
await controller.start()
|
await controller.start()
|
||||||
assert mock.called
|
assert mock.called
|
||||||
assert "local" in controller.computes
|
assert "local" in controller.computes
|
||||||
@ -356,7 +358,7 @@ async def test_install_base_configs(controller, config, tmpdir):
|
|||||||
with open(str(tmpdir / 'iou_l2_base_startup-config.txt'), 'w+') as f:
|
with open(str(tmpdir / 'iou_l2_base_startup-config.txt'), 'w+') as f:
|
||||||
f.write('test')
|
f.write('test')
|
||||||
|
|
||||||
controller._install_base_configs()
|
await controller._install_base_configs()
|
||||||
assert os.path.exists(str(tmpdir / 'iou_l3_base_startup-config.txt'))
|
assert os.path.exists(str(tmpdir / 'iou_l3_base_startup-config.txt'))
|
||||||
|
|
||||||
# Check is the file has not been overwritten
|
# Check is the file has not been overwritten
|
||||||
@ -385,12 +387,13 @@ async def test_install_base_configs(controller, config, tmpdir):
|
|||||||
async def test_install_builtin_disks(controller, config, tmpdir, builtin_disk):
|
async def test_install_builtin_disks(controller, config, tmpdir, builtin_disk):
|
||||||
|
|
||||||
config.settings.Server.images_path = str(tmpdir)
|
config.settings.Server.images_path = str(tmpdir)
|
||||||
controller._install_builtin_disks()
|
await controller._install_builtin_disks()
|
||||||
# we only install Qemu empty disks at this time
|
# we only install Qemu empty disks at this time
|
||||||
assert os.path.exists(str(tmpdir / "QEMU" / builtin_disk))
|
assert os.path.exists(str(tmpdir / "QEMU" / builtin_disk))
|
||||||
|
|
||||||
|
|
||||||
def test_appliances(controller, config, tmpdir):
|
@pytest.mark.asyncio
|
||||||
|
async def test_appliances(controller, config, tmpdir):
|
||||||
|
|
||||||
my_appliance = {
|
my_appliance = {
|
||||||
"name": "My Appliance",
|
"name": "My Appliance",
|
||||||
@ -406,7 +409,7 @@ def test_appliances(controller, config, tmpdir):
|
|||||||
json.dump(my_appliance, f)
|
json.dump(my_appliance, f)
|
||||||
|
|
||||||
config.settings.Server.appliances_path = str(tmpdir)
|
config.settings.Server.appliances_path = str(tmpdir)
|
||||||
controller.appliance_manager.install_builtin_appliances()
|
await controller.appliance_manager.install_builtin_appliances()
|
||||||
controller.appliance_manager.load_appliances()
|
controller.appliance_manager.load_appliances()
|
||||||
assert len(controller.appliance_manager.appliances) > 0
|
assert len(controller.appliance_manager.appliances) > 0
|
||||||
for appliance in controller.appliance_manager.appliances.values():
|
for appliance in controller.appliance_manager.appliances.values():
|
||||||
|
Loading…
x
Reference in New Issue
Block a user