mirror of
https://github.com/GNS3/gns3-server.git
synced 2024-12-30 09:38:53 +00:00
commit
d44f6eb2f1
@ -45,7 +45,7 @@ router = APIRouter()
|
||||
@router.post("/projects/{project_id}/ports/udp", status_code=status.HTTP_201_CREATED)
|
||||
def allocate_udp_port(project_id: UUID) -> dict:
|
||||
"""
|
||||
Allocate an UDP port on the compute.
|
||||
Allocate a UDP port on the compute.
|
||||
"""
|
||||
|
||||
pm = ProjectManager.instance()
|
||||
|
@ -94,7 +94,7 @@ def add_appliance_version(appliance_id: UUID, appliance_version: schemas.Applian
|
||||
if version.get("name") == appliance_version.name:
|
||||
raise ControllerError(message=f"Appliance '{appliance_id}' already has version '{appliance_version.name}'")
|
||||
|
||||
appliance.versions.append(appliance_version.dict(exclude_unset=True))
|
||||
appliance.versions.append(appliance_version.model_dump(exclude_unset=True))
|
||||
return appliance.asdict()
|
||||
|
||||
|
||||
|
@ -318,7 +318,7 @@ async def create_disk_image(
|
||||
|
||||
if node.node_type != "qemu":
|
||||
raise ControllerBadRequestError("Creating a disk image is only supported on a Qemu node")
|
||||
await node.post(f"/disk_image/{disk_name}", data=disk_data.dict(exclude_unset=True))
|
||||
await node.post(f"/disk_image/{disk_name}", data=disk_data.model_dump(exclude_unset=True))
|
||||
|
||||
|
||||
@router.put("/{node_id}/qemu/disk_image/{disk_name}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@ -333,7 +333,7 @@ async def update_disk_image(
|
||||
|
||||
if node.node_type != "qemu":
|
||||
raise ControllerBadRequestError("Updating a disk image is only supported on a Qemu node")
|
||||
await node.put(f"/disk_image/{disk_name}", data=disk_data.dict(exclude_unset=True))
|
||||
await node.put(f"/disk_image/{disk_name}", data=disk_data.model_dump(exclude_unset=True))
|
||||
|
||||
|
||||
@router.delete("/{node_id}/qemu/disk_image/{disk_name}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
@ -2645,7 +2645,7 @@ class QemuVM(BaseNode):
|
||||
def asdict(self):
|
||||
answer = {"project_id": self.project.id, "node_id": self.id, "node_directory": self.working_path}
|
||||
# Qemu has a long list of options. The JSON schema is the single source of information
|
||||
for field in Qemu.schema()["properties"]:
|
||||
for field in Qemu.model_json_schema()["properties"]:
|
||||
if field not in answer:
|
||||
try:
|
||||
answer[field] = getattr(self, field)
|
||||
|
@ -253,7 +253,7 @@ class ApplianceManager:
|
||||
appliances_info = self._find_appliances_from_image_checksum(image_checksum)
|
||||
for appliance, image_version in appliances_info:
|
||||
try:
|
||||
schemas.Appliance.parse_obj(appliance.asdict())
|
||||
schemas.Appliance.model_validate(appliance.asdict())
|
||||
except ValidationError as e:
|
||||
log.warning(f"Could not validate appliance '{appliance.id}': {e}")
|
||||
if appliance.versions:
|
||||
@ -284,7 +284,7 @@ class ApplianceManager:
|
||||
raise ControllerNotFoundError(message=f"Could not find appliance '{appliance_id}'")
|
||||
|
||||
try:
|
||||
schemas.Appliance.parse_obj(appliance.asdict())
|
||||
schemas.Appliance.model_validate(appliance.asdict())
|
||||
except ValidationError as e:
|
||||
raise ControllerError(message=f"Could not validate appliance '{appliance_id}': {e}")
|
||||
|
||||
@ -339,7 +339,7 @@ class ApplianceManager:
|
||||
appliance = Appliance(path, json.load(f), builtin=builtin)
|
||||
json_data = appliance.asdict() # Check if loaded without error
|
||||
if appliance.status != "broken":
|
||||
schemas.Appliance.parse_obj(json_data)
|
||||
schemas.Appliance.model_validate(json_data)
|
||||
self._appliances[appliance.id] = appliance
|
||||
if not appliance.symbol or appliance.symbol.startswith(":/symbols/"):
|
||||
# apply a default symbol if the appliance has none or a default symbol
|
||||
|
@ -52,12 +52,12 @@ class DynamipsNodeValidation(DynamipsCreate):
|
||||
|
||||
def _check_topology_schema(topo, path):
|
||||
try:
|
||||
Topology.parse_obj(topo)
|
||||
Topology.model_validate(topo)
|
||||
|
||||
# Check the nodes property against compute schemas
|
||||
for node in topo["topology"].get("nodes", []):
|
||||
if node["node_type"] == "dynamips":
|
||||
DynamipsNodeValidation.parse_obj(node.get("properties", {}))
|
||||
DynamipsNodeValidation.model_validate(node.get("properties", {}))
|
||||
|
||||
except pydantic.ValidationError as e:
|
||||
error = f"Invalid data in topology file {path}: {e}"
|
||||
|
@ -21,7 +21,7 @@ from fastapi.encoders import jsonable_encoder
|
||||
from sqlalchemy import Column, DateTime, func, inspect
|
||||
from sqlalchemy.types import TypeDecorator, CHAR, VARCHAR
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.ext.declarative import as_declarative
|
||||
from sqlalchemy.orm import as_declarative
|
||||
|
||||
|
||||
@as_declarative()
|
||||
|
@ -68,7 +68,7 @@ class ComputesRepository(BaseRepository):
|
||||
|
||||
async def update_compute(self, compute_id: UUID, compute_update: schemas.ComputeUpdate) -> Optional[models.Compute]:
|
||||
|
||||
update_values = compute_update.dict(exclude_unset=True)
|
||||
update_values = compute_update.model_dump(exclude_unset=True)
|
||||
|
||||
password = compute_update.password
|
||||
if password:
|
||||
|
@ -93,7 +93,7 @@ class RbacRepository(BaseRepository):
|
||||
Update a role.
|
||||
"""
|
||||
|
||||
update_values = role_update.dict(exclude_unset=True)
|
||||
update_values = role_update.model_dump(exclude_unset=True)
|
||||
query = update(models.Role).\
|
||||
where(models.Role.role_id == role_id).\
|
||||
values(update_values)
|
||||
@ -236,7 +236,7 @@ class RbacRepository(BaseRepository):
|
||||
Update a permission.
|
||||
"""
|
||||
|
||||
update_values = permission_update.dict(exclude_unset=True)
|
||||
update_values = permission_update.model_dump(exclude_unset=True)
|
||||
query = update(models.Permission).\
|
||||
where(models.Permission.permission_id == permission_id).\
|
||||
values(update_values)
|
||||
|
@ -97,7 +97,7 @@ class UsersRepository(BaseRepository):
|
||||
Update an user.
|
||||
"""
|
||||
|
||||
update_values = user_update.dict(exclude_unset=True)
|
||||
update_values = user_update.model_dump(exclude_unset=True)
|
||||
password = update_values.pop("password", None)
|
||||
if password:
|
||||
update_values["hashed_password"] = self._auth_service.hash_password(password=password.get_secret_value())
|
||||
@ -207,10 +207,10 @@ class UsersRepository(BaseRepository):
|
||||
user_group_update: schemas.UserGroupUpdate
|
||||
) -> Optional[models.UserGroup]:
|
||||
"""
|
||||
Update an user group.
|
||||
Update a user group.
|
||||
"""
|
||||
|
||||
update_values = user_group_update.dict(exclude_unset=True)
|
||||
update_values = user_group_update.model_dump(exclude_unset=True)
|
||||
query = update(models.UserGroup).\
|
||||
where(models.UserGroup.user_group_id == user_group_id).\
|
||||
values(update_values)
|
||||
@ -224,7 +224,7 @@ class UsersRepository(BaseRepository):
|
||||
|
||||
async def delete_user_group(self, user_group_id: UUID) -> bool:
|
||||
"""
|
||||
Delete an user group.
|
||||
Delete a user group.
|
||||
"""
|
||||
|
||||
query = delete(models.UserGroup).where(models.UserGroup.user_group_id == user_group_id)
|
||||
|
@ -122,7 +122,7 @@ async def get_computes(app: FastAPI) -> List[dict]:
|
||||
db_computes = await ComputesRepository(db_session).get_computes()
|
||||
for db_compute in db_computes:
|
||||
try:
|
||||
compute = schemas.Compute.from_orm(db_compute)
|
||||
compute = schemas.Compute.model_validate(db_compute)
|
||||
except ValidationError as e:
|
||||
log.error(f"Could not load compute '{db_compute.compute_id}' from database: {e}")
|
||||
continue
|
||||
@ -212,7 +212,7 @@ async def discover_images_on_filesystem(app: FastAPI):
|
||||
existing_image_paths = []
|
||||
for db_image in db_images:
|
||||
try:
|
||||
image = schemas.Image.from_orm(db_image)
|
||||
image = schemas.Image.model_validate(db_image)
|
||||
existing_image_paths.append(image.path)
|
||||
except ValidationError as e:
|
||||
log.error(f"Could not load image '{db_image.filename}' from database: {e}")
|
||||
|
@ -45,7 +45,7 @@ class CustomAdapter(BaseModel):
|
||||
adapter_number: int
|
||||
port_name: Optional[str] = None
|
||||
adapter_type: Optional[str] = None
|
||||
mac_address: Optional[str] = Field(None, regex="^([0-9a-fA-F]{2}[:]){5}([0-9a-fA-F]{2})$")
|
||||
mac_address: Optional[str] = Field(None, pattern="^([0-9a-fA-F]{2}[:]){5}([0-9a-fA-F]{2})$")
|
||||
|
||||
|
||||
class ConsoleType(str, Enum):
|
||||
|
@ -31,7 +31,7 @@ class DockerBase(BaseModel):
|
||||
node_id: Optional[UUID] = None
|
||||
console: Optional[int] = Field(None, gt=0, le=65535, description="Console TCP port")
|
||||
console_type: Optional[ConsoleType] = Field(None, description="Console type")
|
||||
console_resolution: Optional[str] = Field(None, regex="^[0-9]+x[0-9]+$", description="Console resolution for VNC")
|
||||
console_resolution: Optional[str] = Field(None, pattern="^[0-9]+x[0-9]+$", description="Console resolution for VNC")
|
||||
console_http_port: Optional[int] = Field(None, description="Internal port in the container for the HTTP server")
|
||||
console_http_path: Optional[str] = Field(None, description="Path of the web interface")
|
||||
aux: Optional[int] = Field(None, gt=0, le=65535, description="Auxiliary TCP port")
|
||||
@ -67,7 +67,7 @@ class DockerUpdate(DockerBase):
|
||||
class Docker(DockerBase):
|
||||
|
||||
container_id: str = Field(
|
||||
..., min_length=12, max_length=64, regex="^[a-f0-9]+$", description="Docker container ID (read only)"
|
||||
..., min_length=12, max_length=64, pattern="^[a-f0-9]+$", description="Docker container ID (read only)"
|
||||
)
|
||||
project_id: UUID = Field(..., description="Project ID")
|
||||
node_directory: str = Field(..., description="Path to the node working directory (read only)")
|
||||
|
@ -129,13 +129,13 @@ class DynamipsBase(BaseModel):
|
||||
image: Optional[str] = Field(None, description="Path to the IOS image")
|
||||
image_md5sum: Optional[str] = Field(None, description="Checksum of the IOS image")
|
||||
usage: Optional[str] = Field(None, description="How to use the Dynamips VM")
|
||||
chassis: Optional[str] = Field(None, description="Cisco router chassis model", regex="^[0-9]{4}(XM)?$")
|
||||
chassis: Optional[str] = Field(None, description="Cisco router chassis model", pattern="^[0-9]{4}(XM)?$")
|
||||
startup_config_content: Optional[str] = Field(None, description="Content of IOS startup configuration file")
|
||||
private_config_content: Optional[str] = Field(None, description="Content of IOS private configuration file")
|
||||
mmap: Optional[bool] = Field(None, description="MMAP feature")
|
||||
sparsemem: Optional[bool] = Field(None, description="Sparse memory feature")
|
||||
clock_divisor: Optional[int] = Field(None, description="Clock divisor")
|
||||
idlepc: Optional[str] = Field(None, description="Idle-PC value", regex="^(0x[0-9a-fA-F]+)?$")
|
||||
idlepc: Optional[str] = Field(None, description="Idle-PC value", pattern="^(0x[0-9a-fA-F]+)?$")
|
||||
idlemax: Optional[int] = Field(None, description="Idlemax value")
|
||||
idlesleep: Optional[int] = Field(None, description="Idlesleep value")
|
||||
exec_area: Optional[int] = Field(None, description="Exec area value")
|
||||
@ -147,7 +147,7 @@ class DynamipsBase(BaseModel):
|
||||
aux: Optional[int] = Field(None, gt=0, le=65535, description="Auxiliary console TCP port")
|
||||
aux_type: Optional[DynamipsConsoleType] = Field(None, description="Auxiliary console type")
|
||||
mac_addr: Optional[str] = Field(
|
||||
None, description="Base MAC address", regex="^([0-9a-fA-F]{4}\\.){2}[0-9a-fA-F]{4}$"
|
||||
None, description="Base MAC address", pattern="^([0-9a-fA-F]{4}\\.){2}[0-9a-fA-F]{4}$"
|
||||
)
|
||||
system_id: Optional[str] = Field(None, description="System ID")
|
||||
slot0: Optional[DynamipsAdapters] = Field(None, description="Network module slot 0")
|
||||
@ -174,7 +174,7 @@ class DynamipsCreate(DynamipsBase):
|
||||
"""
|
||||
|
||||
name: str
|
||||
platform: str = Field(..., description="Cisco router platform", regex="^c[0-9]{4}$")
|
||||
platform: str = Field(..., description="Cisco router platform", pattern="^c[0-9]{4}$")
|
||||
image: str = Field(..., description="Path to the IOS image")
|
||||
ram: int = Field(..., description="Amount of RAM in MB")
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from typing import Optional, List
|
||||
from uuid import UUID
|
||||
from enum import Enum
|
||||
@ -43,15 +43,14 @@ class EthernetSwitchPort(BaseModel):
|
||||
port_number: int
|
||||
type: EthernetSwitchPortType = Field(..., description="Port type")
|
||||
vlan: int = Field(..., ge=1, le=4094, description="VLAN number")
|
||||
ethertype: Optional[EthernetSwitchEtherType] = Field(None, description="QinQ Ethertype")
|
||||
ethertype: Optional[EthernetSwitchEtherType] = Field("0x8100", description="QinQ Ethertype")
|
||||
|
||||
@validator("ethertype")
|
||||
def validate_ethertype(cls, v, values):
|
||||
@model_validator(mode="after")
|
||||
def check_ethertype(self) -> "EthernetSwitchPort":
|
||||
|
||||
if v is not None:
|
||||
if "type" not in values or values["type"] != EthernetSwitchPortType.qinq:
|
||||
raise ValueError("Ethertype is only for QinQ port type")
|
||||
return v
|
||||
if self.ethertype != EthernetSwitchEtherType.ethertype_8021q and self.type != EthernetSwitchPortType.qinq:
|
||||
raise ValueError("Ethertype is only for QinQ port type")
|
||||
return self
|
||||
|
||||
|
||||
class TelnetConsoleType(str, Enum):
|
||||
|
@ -29,7 +29,7 @@ class IOUBase(BaseModel):
|
||||
name: str
|
||||
path: str = Field(..., description="IOU executable path")
|
||||
application_id: int = Field(..., description="Application ID for running IOU executable")
|
||||
node_id: Optional[UUID]
|
||||
node_id: Optional[UUID] = None
|
||||
usage: Optional[str] = Field(None, description="How to use the node")
|
||||
console: Optional[int] = Field(None, gt=0, le=65535, description="Console TCP port")
|
||||
console_type: Optional[ConsoleType] = Field(None, description="Console type")
|
||||
@ -57,7 +57,7 @@ class IOUUpdate(IOUBase):
|
||||
Properties to update an IOU node.
|
||||
"""
|
||||
|
||||
name: Optional[str]
|
||||
name: Optional[str] = None
|
||||
path: Optional[str] = Field(None, description="IOU executable path")
|
||||
application_id: Optional[int] = Field(None, description="Application ID for running IOU executable")
|
||||
|
||||
|
@ -156,7 +156,7 @@ class QemuBase(BaseModel):
|
||||
"""
|
||||
|
||||
name: str
|
||||
node_id: Optional[UUID]
|
||||
node_id: Optional[UUID] = None
|
||||
usage: Optional[str] = Field(None, description="How to use the node")
|
||||
linked_clone: Optional[bool] = Field(None, description="Whether the VM is a linked clone or not")
|
||||
qemu_path: Optional[str] = Field(None, description="Qemu executable path")
|
||||
@ -197,7 +197,7 @@ class QemuBase(BaseModel):
|
||||
adapters: Optional[int] = Field(None, ge=0, le=275, description="Number of adapters")
|
||||
adapter_type: Optional[QemuAdapterType] = Field(None, description="QEMU adapter type")
|
||||
mac_address: Optional[str] = Field(
|
||||
None, description="QEMU MAC address", regex="^([0-9a-fA-F]{2}[:]){5}([0-9a-fA-F]{2})$"
|
||||
None, description="QEMU MAC address", pattern="^([0-9a-fA-F]{2}[:]){5}([0-9a-fA-F]{2})$"
|
||||
)
|
||||
replicate_network_connection_state: Optional[bool] = Field(
|
||||
None, description="Replicate the network connection state for links in Qemu"
|
||||
@ -227,7 +227,7 @@ class QemuUpdate(QemuBase):
|
||||
Properties to update a Qemu node.
|
||||
"""
|
||||
|
||||
name: Optional[str]
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
class Qemu(QemuBase):
|
||||
|
@ -58,7 +58,7 @@ class VirtualBoxBase(BaseModel):
|
||||
|
||||
name: str
|
||||
vmname: str = Field(..., description="VirtualBox VM name (in VirtualBox itself)")
|
||||
node_id: Optional[UUID]
|
||||
node_id: Optional[UUID] = None
|
||||
linked_clone: Optional[bool] = Field(None, description="Whether the VM is a linked clone or not")
|
||||
usage: Optional[str] = Field(None, description="How to use the node")
|
||||
# 36 adapters is the maximum given by the ICH9 chipset in VirtualBox
|
||||
@ -86,8 +86,8 @@ class VirtualBoxUpdate(VirtualBoxBase):
|
||||
Properties to update a VirtualBox node.
|
||||
"""
|
||||
|
||||
name: Optional[str]
|
||||
vmname: Optional[str]
|
||||
name: Optional[str] = None
|
||||
vmname: Optional[str] = None
|
||||
|
||||
|
||||
class VirtualBox(VirtualBoxBase):
|
||||
|
@ -64,7 +64,7 @@ class VMwareBase(BaseModel):
|
||||
name: str
|
||||
vmx_path: str = Field(..., description="Path to the vmx file")
|
||||
linked_clone: bool = Field(..., description="Whether the VM is a linked clone or not")
|
||||
node_id: Optional[UUID]
|
||||
node_id: Optional[UUID] = None
|
||||
usage: Optional[str] = Field(None, description="How to use the node")
|
||||
console: Optional[int] = Field(None, gt=0, le=65535, description="Console TCP port")
|
||||
console_type: Optional[VMwareConsoleType] = Field(None, description="Console type")
|
||||
@ -90,9 +90,9 @@ class VMwareUpdate(VMwareBase):
|
||||
Properties to update a VMware node.
|
||||
"""
|
||||
|
||||
name: Optional[str]
|
||||
vmx_path: Optional[str]
|
||||
linked_clone: Optional[bool]
|
||||
name: Optional[str] = None
|
||||
vmx_path: Optional[str] = None
|
||||
linked_clone: Optional[bool] = None
|
||||
|
||||
|
||||
class VMware(VMwareBase):
|
||||
|
@ -37,7 +37,7 @@ class VPCSBase(BaseModel):
|
||||
"""
|
||||
|
||||
name: str
|
||||
node_id: Optional[UUID]
|
||||
node_id: Optional[UUID] = None
|
||||
usage: Optional[str] = Field(None, description="How to use the node")
|
||||
console: Optional[int] = Field(None, gt=0, le=65535, description="Console TCP port")
|
||||
console_type: Optional[ConsoleType] = Field(None, description="Console type")
|
||||
@ -57,7 +57,7 @@ class VPCSUpdate(VPCSBase):
|
||||
Properties to update a VPCS node.
|
||||
"""
|
||||
|
||||
name: Optional[str]
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
class VPCS(VPCSBase):
|
||||
|
@ -17,7 +17,16 @@
|
||||
import socket
|
||||
|
||||
from enum import Enum
|
||||
from pydantic import BaseModel, Field, SecretStr, FilePath, DirectoryPath, validator
|
||||
from pydantic import (
|
||||
ConfigDict,
|
||||
BaseModel,
|
||||
Field,
|
||||
SecretStr,
|
||||
FilePath,
|
||||
DirectoryPath,
|
||||
field_validator,
|
||||
model_validator
|
||||
)
|
||||
from typing import List
|
||||
|
||||
|
||||
@ -28,19 +37,13 @@ class ControllerSettings(BaseModel):
|
||||
jwt_access_token_expire_minutes: int = 1440 # 24 hours
|
||||
default_admin_username: str = "admin"
|
||||
default_admin_password: SecretStr = SecretStr("admin")
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
anystr_strip_whitespace = True
|
||||
model_config = ConfigDict(validate_assignment=True, str_strip_whitespace=True)
|
||||
|
||||
|
||||
class VPCSSettings(BaseModel):
|
||||
|
||||
vpcs_path: str = "vpcs"
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
anystr_strip_whitespace = True
|
||||
model_config = ConfigDict(validate_assignment=True, str_strip_whitespace=True)
|
||||
|
||||
|
||||
class DynamipsSettings(BaseModel):
|
||||
@ -50,20 +53,14 @@ class DynamipsSettings(BaseModel):
|
||||
dynamips_path: str = "dynamips"
|
||||
sparse_memory_support: bool = True
|
||||
ghost_ios_support: bool = True
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
anystr_strip_whitespace = True
|
||||
model_config = ConfigDict(validate_assignment=True, str_strip_whitespace=True)
|
||||
|
||||
|
||||
class IOUSettings(BaseModel):
|
||||
|
||||
iourc_path: str = None
|
||||
license_check: bool = True
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
anystr_strip_whitespace = True
|
||||
model_config = ConfigDict(validate_assignment=True, str_strip_whitespace=True)
|
||||
|
||||
|
||||
class QemuSettings(BaseModel):
|
||||
@ -72,19 +69,13 @@ class QemuSettings(BaseModel):
|
||||
monitor_host: str = "127.0.0.1"
|
||||
enable_hardware_acceleration: bool = True
|
||||
require_hardware_acceleration: bool = False
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
anystr_strip_whitespace = True
|
||||
model_config = ConfigDict(validate_assignment=True, str_strip_whitespace=True)
|
||||
|
||||
|
||||
class VirtualBoxSettings(BaseModel):
|
||||
|
||||
vboxmanage_path: str = None
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
anystr_strip_whitespace = True
|
||||
model_config = ConfigDict(validate_assignment=True, str_strip_whitespace=True)
|
||||
|
||||
|
||||
class VMwareSettings(BaseModel):
|
||||
@ -93,16 +84,13 @@ class VMwareSettings(BaseModel):
|
||||
vmnet_start_range: int = Field(2, ge=1, le=255)
|
||||
vmnet_end_range: int = Field(255, ge=1, le=255) # should be limited to 19 on Windows
|
||||
block_host_traffic: bool = False
|
||||
model_config = ConfigDict(validate_assignment=True, str_strip_whitespace=True)
|
||||
|
||||
@validator("vmnet_end_range")
|
||||
def vmnet_port_range(cls, v, values):
|
||||
if "vmnet_start_range" in values and v <= values["vmnet_start_range"]:
|
||||
@model_validator(mode="after")
|
||||
def check_vmnet_port_range(self) -> "VMwareSettings":
|
||||
if self.vmnet_end_range <= self.vmnet_start_range:
|
||||
raise ValueError("vmnet_end_range must be > vmnet_start_range")
|
||||
return v
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
anystr_strip_whitespace = True
|
||||
return self
|
||||
|
||||
|
||||
class ServerProtocol(str, Enum):
|
||||
@ -156,45 +144,47 @@ class ServerSettings(BaseModel):
|
||||
default_nat_interface: str = None
|
||||
allow_remote_console: bool = False
|
||||
enable_builtin_templates: bool = True
|
||||
model_config = ConfigDict(validate_assignment=True, str_strip_whitespace=True, use_enum_values=True)
|
||||
|
||||
@validator("additional_images_paths", pre=True)
|
||||
|
||||
@field_validator("additional_images_paths", mode="before")
|
||||
@classmethod
|
||||
def split_additional_images_paths(cls, v):
|
||||
if v:
|
||||
return v.split(";")
|
||||
return list()
|
||||
|
||||
@validator("allowed_interfaces", pre=True)
|
||||
|
||||
@field_validator("allowed_interfaces", mode="before")
|
||||
@classmethod
|
||||
def split_allowed_interfaces(cls, v):
|
||||
if v:
|
||||
return v.split(",")
|
||||
return list()
|
||||
|
||||
@validator("console_end_port_range")
|
||||
def console_port_range(cls, v, values):
|
||||
if "console_start_port_range" in values and v <= values["console_start_port_range"]:
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_console_port_range(self) -> "ServerSettings":
|
||||
if self.console_end_port_range <= self.console_start_port_range:
|
||||
raise ValueError("console_end_port_range must be > console_start_port_range")
|
||||
return v
|
||||
return self
|
||||
|
||||
@validator("vnc_console_end_port_range")
|
||||
def vnc_console_port_range(cls, v, values):
|
||||
if "vnc_console_start_port_range" in values and v <= values["vnc_console_start_port_range"]:
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_vnc_port_range(self) -> "ServerSettings":
|
||||
if self.vnc_console_end_port_range <= self.vnc_console_start_port_range:
|
||||
raise ValueError("vnc_console_end_port_range must be > vnc_console_start_port_range")
|
||||
return v
|
||||
return self
|
||||
|
||||
@validator("enable_ssl")
|
||||
def validate_enable_ssl(cls, v, values):
|
||||
|
||||
if v is True:
|
||||
if "certfile" not in values or not values["certfile"]:
|
||||
@model_validator(mode="after")
|
||||
def check_enable_ssl(self) -> "ServerSettings":
|
||||
if self.enable_ssl is True:
|
||||
if self.certfile is None:
|
||||
raise ValueError("SSL is enabled but certfile is not configured")
|
||||
if "certkey" not in values or not values["certkey"]:
|
||||
if self.certkey is None:
|
||||
raise ValueError("SSL is enabled but certkey is not configured")
|
||||
return v
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
anystr_strip_whitespace = True
|
||||
use_enum_values = True
|
||||
return self
|
||||
|
||||
|
||||
class ServerConfig(BaseModel):
|
||||
|
@ -321,7 +321,7 @@ class ApplianceImage(BaseModel):
|
||||
|
||||
filename: str = Field(..., title='Filename')
|
||||
version: str = Field(..., title='Version of the file')
|
||||
md5sum: str = Field(..., title='md5sum of the file', regex='^[a-f0-9]{32}$')
|
||||
md5sum: str = Field(..., title='md5sum of the file', pattern='^[a-f0-9]{32}$')
|
||||
filesize: int = Field(..., title='File size in bytes')
|
||||
download_url: Optional[Union[AnyUrl, constr(max_length=0)]] = Field(
|
||||
None, title='Download url where you can download the appliance from a browser'
|
||||
@ -351,7 +351,7 @@ class ApplianceVersionImages(BaseModel):
|
||||
class ApplianceVersion(BaseModel):
|
||||
|
||||
name: str = Field(..., title='Name of the version')
|
||||
idlepc: Optional[str] = Field(None, regex='^0x[0-9a-f]{8}')
|
||||
idlepc: Optional[str] = Field(None, pattern='^0x[0-9a-f]{8}')
|
||||
images: Optional[ApplianceVersionImages] = Field(None, title='Images used for this version')
|
||||
|
||||
|
||||
|
@ -21,5 +21,5 @@ from pydantic import BaseModel
|
||||
|
||||
class DateTimeModelMixin(BaseModel):
|
||||
|
||||
created_at: Optional[datetime]
|
||||
updated_at: Optional[datetime]
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
|
@ -16,8 +16,15 @@
|
||||
|
||||
import uuid
|
||||
|
||||
from pydantic import BaseModel, Field, SecretStr, validator
|
||||
from typing import List, Optional, Union
|
||||
from pydantic import (
|
||||
ConfigDict,
|
||||
BaseModel,
|
||||
Field,
|
||||
SecretStr,
|
||||
field_validator,
|
||||
model_validator
|
||||
)
|
||||
from typing import List, Optional, Union, Any
|
||||
from enum import Enum
|
||||
|
||||
from .nodes import NodeType
|
||||
@ -44,9 +51,7 @@ class ComputeBase(BaseModel):
|
||||
user: str = None
|
||||
password: Optional[SecretStr] = None
|
||||
name: Optional[str] = None
|
||||
|
||||
class Config:
|
||||
use_enum_values = True
|
||||
model_config = ConfigDict(use_enum_values=True)
|
||||
|
||||
|
||||
class ComputeCreate(ComputeBase):
|
||||
@ -55,46 +60,28 @@ class ComputeCreate(ComputeBase):
|
||||
"""
|
||||
|
||||
compute_id: Union[str, uuid.UUID] = None
|
||||
|
||||
class Config:
|
||||
schema_extra = {
|
||||
"example": {
|
||||
"name": "My compute",
|
||||
"host": "127.0.0.1",
|
||||
"port": 3080,
|
||||
"user": "user",
|
||||
"password": "password"
|
||||
}
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"example": {
|
||||
"name": "My compute",
|
||||
"host": "127.0.0.1",
|
||||
"port": 3080,
|
||||
"user": "user",
|
||||
"password": "password"
|
||||
}
|
||||
})
|
||||
|
||||
@validator("compute_id", pre=True, always=True)
|
||||
def default_compute_id(cls, v, values):
|
||||
@model_validator(mode='before')
|
||||
@classmethod
|
||||
def set_default_compute_id_and_name(cls, data: Any) -> Any:
|
||||
|
||||
if v is not None:
|
||||
return v
|
||||
else:
|
||||
protocol = values.get("protocol")
|
||||
host = values.get("host")
|
||||
port = values.get("port")
|
||||
return uuid.uuid5(uuid.NAMESPACE_URL, f"{protocol}://{host}:{port}")
|
||||
|
||||
@validator("name", pre=True, always=True)
|
||||
def generate_name(cls, name, values):
|
||||
|
||||
if name is not None:
|
||||
return name
|
||||
else:
|
||||
protocol = values.get("protocol")
|
||||
host = values.get("host")
|
||||
port = values.get("port")
|
||||
user = values.get("user")
|
||||
if user:
|
||||
# due to random user generated by 1.4 it's common to have a very long user
|
||||
if len(user) > 14:
|
||||
user = user[:11] + "..."
|
||||
return f"{protocol}://{user}@{host}:{port}"
|
||||
else:
|
||||
return f"{protocol}://{host}:{port}"
|
||||
if "compute_id" not in data:
|
||||
data['compute_id'] = uuid.uuid5(
|
||||
uuid.NAMESPACE_URL,
|
||||
f"{data.get('protocol')}://{data.get('host')}:{data.get('port')}"
|
||||
)
|
||||
if "name" not in data:
|
||||
data['name'] = f"{data.get('protocol')}://{data.get('user', '')}@{data.get('host')}:{data.get('port')}"
|
||||
return data
|
||||
|
||||
|
||||
class ComputeUpdate(ComputeBase):
|
||||
@ -107,14 +94,12 @@ class ComputeUpdate(ComputeBase):
|
||||
port: Optional[int] = Field(None, gt=0, le=65535)
|
||||
user: Optional[str] = None
|
||||
password: Optional[SecretStr] = None
|
||||
|
||||
class Config:
|
||||
schema_extra = {
|
||||
"example": {
|
||||
"host": "10.0.0.1",
|
||||
"port": 8080,
|
||||
}
|
||||
model_config = ConfigDict(json_schema_extra={
|
||||
"example": {
|
||||
"host": "10.0.0.1",
|
||||
"port": 8080,
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
class Capabilities(BaseModel):
|
||||
@ -143,9 +128,7 @@ class Compute(DateTimeModelMixin, ComputeBase):
|
||||
disk_usage_percent: Optional[float] = Field(None, description="Disk usage of the compute", ge=0, le=100)
|
||||
last_error: Optional[str] = Field(None, description="Last error found on the compute")
|
||||
capabilities: Optional[Capabilities] = None
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class ComputeVirtualBoxVM(BaseModel):
|
||||
@ -182,6 +165,4 @@ class AutoIdlePC(BaseModel):
|
||||
platform: str = Field(..., description="Cisco platform")
|
||||
image: str = Field(..., description="Image path")
|
||||
ram: int = Field(..., description="Amount of RAM in MB")
|
||||
|
||||
class Config:
|
||||
schema_extra = {"example": {"platform": "c7200", "image": "/path/to/c7200_image.bin", "ram": 256}}
|
||||
model_config = ConfigDict(json_schema_extra={"example": {"platform": "c7200", "image": "/path/to/c7200_image.bin", "ram": 256}})
|
||||
|
@ -14,7 +14,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import ConfigDict, BaseModel, Field
|
||||
from enum import Enum
|
||||
|
||||
from .base import DateTimeModelMixin
|
||||
@ -41,6 +41,4 @@ class ImageBase(BaseModel):
|
||||
|
||||
|
||||
class Image(DateTimeModelMixin, ImageBase):
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
@ -54,7 +54,7 @@ class LinkBase(BaseModel):
|
||||
Link data.
|
||||
"""
|
||||
|
||||
nodes: Optional[List[LinkNode]] = Field(None, min_items=0, max_items=2)
|
||||
nodes: Optional[List[LinkNode]] = Field(None, min_length=0, max_length=2)
|
||||
suspend: Optional[bool] = None
|
||||
link_style: Optional[LinkStyle] = None
|
||||
filters: Optional[dict] = None
|
||||
@ -63,7 +63,7 @@ class LinkBase(BaseModel):
|
||||
class LinkCreate(LinkBase):
|
||||
|
||||
link_id: UUID = Field(default_factory=uuid4)
|
||||
nodes: List[LinkNode] = Field(..., min_items=2, max_items=2)
|
||||
nodes: List[LinkNode] = Field(..., min_length=2, max_length=2)
|
||||
|
||||
|
||||
class LinkUpdate(LinkBase):
|
||||
|
@ -14,8 +14,8 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from pydantic import BaseModel, Field, validator
|
||||
from typing import List, Optional, Union
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from typing import List, Optional, Union, Any
|
||||
from enum import Enum
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
@ -96,7 +96,7 @@ class NodePort(BaseModel):
|
||||
port_number: int = Field(..., description="Port slot")
|
||||
link_type: LinkType = Field(..., description="Type of link")
|
||||
data_link_types: dict = Field(..., description="Available PCAP types for capture")
|
||||
mac_address: Union[str, None] = Field(None, regex="^([0-9a-fA-F]{2}[:]){5}([0-9a-fA-F]{2})$")
|
||||
mac_address: Union[str, None] = Field(None, pattern="^([0-9a-fA-F]{2}[:]){5}([0-9a-fA-F]{2})$")
|
||||
|
||||
|
||||
class NodeBase(BaseModel):
|
||||
@ -117,7 +117,7 @@ class NodeBase(BaseModel):
|
||||
False, description="Automatically start the console when the node has started"
|
||||
)
|
||||
aux: Optional[int] = Field(None, gt=0, le=65535, description="Auxiliary console TCP port")
|
||||
aux_type: Optional[ConsoleType]
|
||||
aux_type: Optional[ConsoleType] = None
|
||||
properties: Optional[dict] = Field(default_factory=dict, description="Properties specific to an emulator")
|
||||
|
||||
label: Optional[Label] = None
|
||||
@ -134,21 +134,18 @@ class NodeBase(BaseModel):
|
||||
first_port_name: Optional[str] = Field(None, description="Name of the first port")
|
||||
custom_adapters: Optional[List[CustomAdapter]] = None
|
||||
|
||||
@validator("port_name_format", pre=True, always=True)
|
||||
def default_port_name_format(cls, v, values):
|
||||
if v is None:
|
||||
if "node_type" in values and values["node_type"] == NodeType.iou:
|
||||
return "Ethernet{segment0}/{port0}"
|
||||
return "Ethernet{0}"
|
||||
return v
|
||||
@model_validator(mode='before')
|
||||
@classmethod
|
||||
def set_default_port_name_format_and_port_segment_size(cls, data: Any) -> Any:
|
||||
|
||||
@validator("port_segment_size", pre=True, always=True)
|
||||
def default_port_segment_size(cls, v, values):
|
||||
if v is None:
|
||||
if "node_type" in values and values["node_type"] == NodeType.iou:
|
||||
return 4
|
||||
return 0
|
||||
return v
|
||||
if "port_name_format" not in data:
|
||||
if data.get('node_type') == NodeType.iou:
|
||||
data['port_name_format'] = "Ethernet{segment0}/{port0}"
|
||||
data['port_segment_size'] = 4
|
||||
else:
|
||||
data['port_name_format'] = "Ethernet{0}"
|
||||
data['port_segment_size'] = 0
|
||||
return data
|
||||
|
||||
|
||||
class NodeCreate(NodeBase):
|
||||
|
@ -15,7 +15,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel, validator
|
||||
from pydantic import field_validator, ConfigDict, BaseModel
|
||||
from uuid import UUID
|
||||
from enum import Enum
|
||||
|
||||
@ -53,11 +53,10 @@ class PermissionBase(BaseModel):
|
||||
path: str
|
||||
action: PermissionAction
|
||||
description: Optional[str] = None
|
||||
model_config = ConfigDict(use_enum_values=True)
|
||||
|
||||
class Config:
|
||||
use_enum_values = True
|
||||
|
||||
@validator("action", pre=True)
|
||||
@field_validator("action", mode="before")
|
||||
@classmethod
|
||||
def action_uppercase(cls, v):
|
||||
return v.upper()
|
||||
|
||||
@ -81,9 +80,7 @@ class PermissionUpdate(PermissionBase):
|
||||
class Permission(DateTimeModelMixin, PermissionBase):
|
||||
|
||||
permission_id: UUID
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class RoleBase(BaseModel):
|
||||
@ -116,6 +113,4 @@ class Role(DateTimeModelMixin, RoleBase):
|
||||
role_id: UUID
|
||||
is_builtin: bool
|
||||
permissions: List[Permission]
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
@ -14,7 +14,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import ConfigDict, BaseModel, Field
|
||||
from typing import Optional, Union
|
||||
from enum import Enum
|
||||
from uuid import UUID
|
||||
@ -58,15 +58,11 @@ class TemplateCreate(TemplateBase):
|
||||
|
||||
name: str
|
||||
template_type: NodeType
|
||||
|
||||
class Config:
|
||||
extra = "allow"
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class TemplateUpdate(TemplateBase):
|
||||
|
||||
class Config:
|
||||
extra = "allow"
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class Template(DateTimeModelMixin, TemplateBase):
|
||||
@ -77,10 +73,7 @@ class Template(DateTimeModelMixin, TemplateBase):
|
||||
symbol: str
|
||||
builtin: bool
|
||||
template_type: NodeType
|
||||
|
||||
class Config:
|
||||
extra = "allow"
|
||||
orm_mode = True
|
||||
model_config = ConfigDict(extra="allow", from_attributes=True)
|
||||
|
||||
|
||||
class TemplateUsage(BaseModel):
|
||||
|
@ -44,7 +44,7 @@ class DockerTemplate(TemplateBase):
|
||||
description="Path of the web interface",
|
||||
)
|
||||
console_resolution: Optional[str] = Field(
|
||||
"1024x768", regex="^[0-9]+x[0-9]+$", description="Console resolution for VNC"
|
||||
"1024x768", pattern="^[0-9]+x[0-9]+$", description="Console resolution for VNC"
|
||||
)
|
||||
extra_hosts: Optional[str] = Field("", description="Docker extra hosts (added to /etc/hosts)")
|
||||
extra_volumes: Optional[List] = Field([], description="Additional directories to make persistent")
|
||||
|
@ -40,12 +40,12 @@ class DynamipsTemplate(TemplateBase):
|
||||
exec_area: Optional[int] = Field(64, description="Exec area value")
|
||||
mmap: Optional[bool] = Field(True, description="MMAP feature")
|
||||
mac_addr: Optional[str] = Field(
|
||||
"", description="Base MAC address", regex="^([0-9a-fA-F]{4}\\.){2}[0-9a-fA-F]{4}$|^$"
|
||||
"", description="Base MAC address", pattern="^([0-9a-fA-F]{4}\\.){2}[0-9a-fA-F]{4}$|^$"
|
||||
)
|
||||
system_id: Optional[str] = Field("FTX0945W0MY", description="System ID")
|
||||
startup_config: Optional[str] = Field("ios_base_startup-config.txt", description="IOS startup configuration file")
|
||||
private_config: Optional[str] = Field("", description="IOS private configuration file")
|
||||
idlepc: Optional[str] = Field("", description="Idle-PC value", regex="^(0x[0-9a-fA-F]+)?$|^$")
|
||||
idlepc: Optional[str] = Field("", description="Idle-PC value", pattern="^(0x[0-9a-fA-F]+)?$|^$")
|
||||
idlemax: Optional[int] = Field(500, description="Idlemax value")
|
||||
idlesleep: Optional[int] = Field(30, description="Idlesleep value")
|
||||
disk0: Optional[int] = Field(0, description="Disk0 size in MB")
|
||||
|
@ -22,14 +22,14 @@ from typing import Optional, List
|
||||
|
||||
|
||||
DEFAULT_PORTS = [
|
||||
dict(port_number=0, name="Ethernet0"),
|
||||
dict(port_number=1, name="Ethernet1"),
|
||||
dict(port_number=2, name="Ethernet2"),
|
||||
dict(port_number=3, name="Ethernet3"),
|
||||
dict(port_number=4, name="Ethernet4"),
|
||||
dict(port_number=5, name="Ethernet5"),
|
||||
dict(port_number=6, name="Ethernet6"),
|
||||
dict(port_number=7, name="Ethernet7"),
|
||||
EthernetHubPort(port_number=0, name="Ethernet0"),
|
||||
EthernetHubPort(port_number=1, name="Ethernet1"),
|
||||
EthernetHubPort(port_number=2, name="Ethernet2"),
|
||||
EthernetHubPort(port_number=3, name="Ethernet3"),
|
||||
EthernetHubPort(port_number=4, name="Ethernet4"),
|
||||
EthernetHubPort(port_number=5, name="Ethernet5"),
|
||||
EthernetHubPort(port_number=6, name="Ethernet6"),
|
||||
EthernetHubPort(port_number=7, name="Ethernet7"),
|
||||
]
|
||||
|
||||
|
||||
|
@ -23,14 +23,14 @@ from typing import Optional, List
|
||||
from enum import Enum
|
||||
|
||||
DEFAULT_PORTS = [
|
||||
dict(port_number=0, name="Ethernet0", vlan=1, type="access", ethertype="0x8100"),
|
||||
dict(port_number=1, name="Ethernet1", vlan=1, type="access", ethertype="0x8100"),
|
||||
dict(port_number=2, name="Ethernet2", vlan=1, type="access", ethertype="0x8100"),
|
||||
dict(port_number=3, name="Ethernet3", vlan=1, type="access", ethertype="0x8100"),
|
||||
dict(port_number=4, name="Ethernet4", vlan=1, type="access", ethertype="0x8100"),
|
||||
dict(port_number=5, name="Ethernet5", vlan=1, type="access", ethertype="0x8100"),
|
||||
dict(port_number=6, name="Ethernet6", vlan=1, type="access", ethertype="0x8100"),
|
||||
dict(port_number=7, name="Ethernet7", vlan=1, type="access", ethertype="0x8100"),
|
||||
EthernetSwitchPort(port_number=0, name="Ethernet0", vlan=1, type="access", ethertype="0x8100"),
|
||||
EthernetSwitchPort(port_number=1, name="Ethernet1", vlan=1, type="access", ethertype="0x8100"),
|
||||
EthernetSwitchPort(port_number=2, name="Ethernet2", vlan=1, type="access", ethertype="0x8100"),
|
||||
EthernetSwitchPort(port_number=3, name="Ethernet3", vlan=1, type="access", ethertype="0x8100"),
|
||||
EthernetSwitchPort(port_number=4, name="Ethernet4", vlan=1, type="access", ethertype="0x8100"),
|
||||
EthernetSwitchPort(port_number=5, name="Ethernet5", vlan=1, type="access", ethertype="0x8100"),
|
||||
EthernetSwitchPort(port_number=6, name="Ethernet6", vlan=1, type="access", ethertype="0x8100"),
|
||||
EthernetSwitchPort(port_number=7, name="Ethernet7", vlan=1, type="access", ethertype="0x8100"),
|
||||
]
|
||||
|
||||
|
||||
|
@ -45,7 +45,7 @@ class QemuTemplate(TemplateBase):
|
||||
adapters: Optional[int] = Field(1, ge=0, le=275, description="Number of adapters")
|
||||
adapter_type: Optional[QemuAdapterType] = Field("e1000", description="QEMU adapter type")
|
||||
mac_address: Optional[str] = Field(
|
||||
"", description="QEMU MAC address", regex="^([0-9a-fA-F]{2}[:]){5}([0-9a-fA-F]{2})$|^$"
|
||||
"", description="QEMU MAC address", pattern="^([0-9a-fA-F]{2}[:]){5}([0-9a-fA-F]{2})$|^$"
|
||||
)
|
||||
first_port_name: Optional[str] = Field("", description="Optional name of the first networking port example: eth0")
|
||||
port_name_format: Optional[str] = Field(
|
||||
|
@ -75,7 +75,7 @@ def main():
|
||||
|
||||
with open(sys.argv[1]) as f:
|
||||
data = json.load(f)
|
||||
Topology.parse_obj(data)
|
||||
Topology.model_validate(data)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from pydantic import EmailStr, BaseModel, Field, SecretStr
|
||||
from pydantic import ConfigDict, EmailStr, BaseModel, Field, SecretStr
|
||||
from uuid import UUID
|
||||
|
||||
from .base import DateTimeModelMixin
|
||||
@ -27,24 +27,24 @@ class UserBase(BaseModel):
|
||||
Common user properties.
|
||||
"""
|
||||
|
||||
username: Optional[str] = Field(None, min_length=3, regex="[a-zA-Z0-9_-]+$")
|
||||
username: Optional[str] = Field(None, min_length=3, pattern="[a-zA-Z0-9_-]+$")
|
||||
is_active: bool = True
|
||||
email: Optional[EmailStr]
|
||||
full_name: Optional[str]
|
||||
email: Optional[EmailStr] = None
|
||||
full_name: Optional[str] = None
|
||||
|
||||
|
||||
class UserCreate(UserBase):
|
||||
"""
|
||||
Properties to create an user.
|
||||
Properties to create a user.
|
||||
"""
|
||||
|
||||
username: str = Field(..., min_length=3, regex="[a-zA-Z0-9_-]+$")
|
||||
username: str = Field(..., min_length=3, pattern="[a-zA-Z0-9_-]+$")
|
||||
password: SecretStr = Field(..., min_length=6, max_length=100)
|
||||
|
||||
|
||||
class UserUpdate(UserBase):
|
||||
"""
|
||||
Properties to update an user.
|
||||
Properties to update a user.
|
||||
"""
|
||||
|
||||
password: Optional[SecretStr] = Field(None, min_length=6, max_length=100)
|
||||
@ -56,8 +56,8 @@ class LoggedInUserUpdate(BaseModel):
|
||||
"""
|
||||
|
||||
password: Optional[SecretStr] = Field(None, min_length=6, max_length=100)
|
||||
email: Optional[EmailStr]
|
||||
full_name: Optional[str]
|
||||
email: Optional[EmailStr] = None
|
||||
full_name: Optional[str] = None
|
||||
|
||||
|
||||
class User(DateTimeModelMixin, UserBase):
|
||||
@ -65,9 +65,7 @@ class User(DateTimeModelMixin, UserBase):
|
||||
user_id: UUID
|
||||
last_login: Optional[datetime] = None
|
||||
is_superadmin: bool = False
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class UserGroupBase(BaseModel):
|
||||
@ -75,20 +73,20 @@ class UserGroupBase(BaseModel):
|
||||
Common user group properties.
|
||||
"""
|
||||
|
||||
name: Optional[str] = Field(None, min_length=3, regex="[a-zA-Z0-9_-]+$")
|
||||
name: Optional[str] = Field(None, min_length=3, pattern="[a-zA-Z0-9_-]+$")
|
||||
|
||||
|
||||
class UserGroupCreate(UserGroupBase):
|
||||
"""
|
||||
Properties to create an user group.
|
||||
Properties to create a user group.
|
||||
"""
|
||||
|
||||
name: Optional[str] = Field(..., min_length=3, regex="[a-zA-Z0-9_-]+$")
|
||||
name: Optional[str] = Field(..., min_length=3, pattern="[a-zA-Z0-9_-]+$")
|
||||
|
||||
|
||||
class UserGroupUpdate(UserGroupBase):
|
||||
"""
|
||||
Properties to update an user group.
|
||||
Properties to update a user group.
|
||||
"""
|
||||
|
||||
pass
|
||||
@ -98,9 +96,7 @@ class UserGroup(DateTimeModelMixin, UserGroupBase):
|
||||
|
||||
user_group_id: UUID
|
||||
is_builtin: bool
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class Credentials(BaseModel):
|
||||
|
@ -81,14 +81,14 @@ class QemuDiskImageBase(BaseModel):
|
||||
|
||||
format: QemuDiskImageFormat = Field(..., description="Image format type")
|
||||
size: int = Field(..., description="Image size in Megabytes")
|
||||
preallocation: Optional[QemuDiskImagePreallocation]
|
||||
cluster_size: Optional[int]
|
||||
refcount_bits: Optional[int]
|
||||
lazy_refcounts: Optional[QemuDiskImageOnOff]
|
||||
subformat: Optional[QemuDiskImageSubformat]
|
||||
static: Optional[QemuDiskImageOnOff]
|
||||
zeroed_grain: Optional[QemuDiskImageOnOff]
|
||||
adapter_type: Optional[QemuDiskImageAdapterType]
|
||||
preallocation: Optional[QemuDiskImagePreallocation] = None
|
||||
cluster_size: Optional[int] = None
|
||||
refcount_bits: Optional[int] = None
|
||||
lazy_refcounts: Optional[QemuDiskImageOnOff] = None
|
||||
subformat: Optional[QemuDiskImageSubformat] = None
|
||||
static: Optional[QemuDiskImageOnOff] = None
|
||||
zeroed_grain: Optional[QemuDiskImageOnOff] = None
|
||||
adapter_type: Optional[QemuDiskImageAdapterType] = None
|
||||
|
||||
|
||||
class QemuDiskImageCreate(QemuDiskImageBase):
|
||||
|
@ -49,7 +49,7 @@ class ComputesService:
|
||||
compute = await self._controller.add_compute(
|
||||
compute_id=str(db_compute.compute_id),
|
||||
connect=connect,
|
||||
**compute_create.dict(exclude_unset=True, exclude={"compute_id"}),
|
||||
**compute_create.model_dump(exclude_unset=True, exclude={"compute_id"}),
|
||||
)
|
||||
self._controller.notification.controller_emit("compute.created", compute.asdict())
|
||||
return db_compute
|
||||
@ -66,7 +66,7 @@ class ComputesService:
|
||||
) -> models.Compute:
|
||||
|
||||
compute = self._controller.get_compute(str(compute_id))
|
||||
await compute.update(**compute_update.dict(exclude_unset=True))
|
||||
await compute.update(**compute_update.model_dump(exclude_unset=True))
|
||||
db_compute = await self._computes_repo.update_compute(compute_id, compute_update)
|
||||
if not db_compute:
|
||||
raise ControllerNotFoundError(f"Compute '{compute_id}' not found")
|
||||
|
@ -237,11 +237,11 @@ class TemplatesService:
|
||||
# get the default template settings
|
||||
create_settings = jsonable_encoder(template_create, exclude_unset=True)
|
||||
template_schema = TEMPLATE_TYPE_TO_SCHEMA[template_create.template_type]
|
||||
template_settings = template_schema.parse_obj(create_settings).dict()
|
||||
template_settings = template_schema.model_validate(create_settings).model_dump()
|
||||
if template_create.template_type == "dynamips":
|
||||
# special case for Dynamips to cover all platform types that contain specific settings
|
||||
dynamips_template_schema = DYNAMIPS_PLATFORM_TO_SCHEMA[template_settings["platform"]]
|
||||
template_settings = dynamips_template_schema.parse_obj(create_settings).dict()
|
||||
template_settings = dynamips_template_schema.model_validate(create_settings).model_dump()
|
||||
except pydantic.ValidationError as e:
|
||||
raise ControllerBadRequestError(f"JSON schema error received while creating new template: {e}")
|
||||
|
||||
@ -287,7 +287,7 @@ class TemplatesService:
|
||||
template_schema = DYNAMIPS_PLATFORM_TO_UPDATE_SCHEMA[db_template.platform]
|
||||
else:
|
||||
template_schema = TEMPLATE_TYPE_TO_UPDATE_SCHEMA[db_template.template_type]
|
||||
template_settings = template_schema.parse_obj(update_settings).dict(exclude_unset=True)
|
||||
template_settings = template_schema.model_validate(update_settings).model_dump(exclude_unset=True)
|
||||
except pydantic.ValidationError as e:
|
||||
raise ControllerBadRequestError(f"JSON schema error received while updating template: {e}")
|
||||
|
||||
@ -297,7 +297,7 @@ class TemplatesService:
|
||||
elif db_template.template_type == "iou" and "path" in template_settings:
|
||||
await self._remove_image(db_template.template_id, db_template.path)
|
||||
elif db_template.template_type == "qemu":
|
||||
for key in template_update.dict().keys():
|
||||
for key in template_update.model_dump().keys():
|
||||
if key.endswith("_image") and key in template_settings:
|
||||
await self._remove_image(db_template.template_id, db_template.__dict__[key])
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
uvicorn==0.22.0 # v0.22.0 is the last to support Python 3.7
|
||||
fastapi==0.99.1
|
||||
fastapi==0.100.1
|
||||
python-multipart==0.0.6
|
||||
websockets==11.0.3
|
||||
aiohttp>=3.8.5,<3.9
|
||||
@ -15,7 +15,7 @@ aiosqlite==0.19.0
|
||||
alembic==1.11.1
|
||||
passlib[bcrypt]==1.7.4
|
||||
python-jose==3.3.0
|
||||
email-validator==1.3.1
|
||||
email-validator==2.0.0.post2
|
||||
watchfiles==0.19.0
|
||||
zstandard==0.21.0
|
||||
importlib_resources>=1.3
|
||||
|
@ -54,7 +54,7 @@ class TestComputeRoutes:
|
||||
params = {
|
||||
"compute_id": compute_id,
|
||||
"protocol": "http",
|
||||
"host": "localhost",
|
||||
"host": "127.0.0.1",
|
||||
"port": 84,
|
||||
"user": "julien",
|
||||
"password": "secure"}
|
||||
|
@ -90,7 +90,7 @@ async def test_create_project_with_supplier(app: FastAPI, client: AsyncClient, c
|
||||
|
||||
supplier = {
|
||||
'logo': 'logo.png',
|
||||
'url': 'http://example.com'
|
||||
'url': 'http://example.com/'
|
||||
}
|
||||
params = {"name": "test", "project_id": "30010203-0405-0607-0809-0a0b0c0d0e0f", "supplier": supplier}
|
||||
response = await client.post(app.url_path_for("create_project"), json=params)
|
||||
|
@ -70,8 +70,8 @@ class TestUserRoutes:
|
||||
assert user_in_db.username == params["username"]
|
||||
|
||||
# check that the user returned in the response is equal to the user in the database
|
||||
created_user = User(**response.json()).json()
|
||||
assert created_user == User.from_orm(user_in_db).json()
|
||||
created_user = User(**response.json()).model_dump_json()
|
||||
assert created_user == User.model_validate(user_in_db).model_dump_json()
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"attr, value, status_code",
|
||||
|
@ -364,6 +364,7 @@ async def test_install_base_configs(controller, config, tmpdir):
|
||||
assert f.read() == 'test'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"builtin_disk",
|
||||
[
|
||||
@ -383,7 +384,7 @@ async def test_install_base_configs(controller, config, tmpdir):
|
||||
)
|
||||
async def test_install_builtin_disks(controller, config, tmpdir, builtin_disk):
|
||||
|
||||
config.set_section_config("Server", {"images_path": str(tmpdir)})
|
||||
config.settings.Server.images_path = str(tmpdir)
|
||||
controller._install_builtin_disks()
|
||||
# we only install Qemu empty disks at this time
|
||||
assert os.path.exists(str(tmpdir / "QEMU" / builtin_disk))
|
||||
|
@ -74,7 +74,7 @@ def test_server_settings_to_list(tmpdir, setting: str, value: str, result: str):
|
||||
}
|
||||
})
|
||||
|
||||
assert config.settings.dict(exclude_unset=True)["Server"][setting] == result
|
||||
assert config.settings.model_dump(exclude_unset=True)["Server"][setting] == result
|
||||
|
||||
|
||||
def test_reload(tmpdir):
|
||||
@ -109,7 +109,7 @@ def test_server_password_hidden():
|
||||
"settings, exception_expected",
|
||||
(
|
||||
({"protocol": "https1"}, True),
|
||||
({"console_start_port_range": 15000}, False),
|
||||
({"console_start_port_range": 15000, "console_end_port_range": 20000}, False),
|
||||
({"console_start_port_range": 0}, True),
|
||||
({"console_start_port_range": 68000}, True),
|
||||
({"console_end_port_range": 15000}, False),
|
||||
|
@ -86,14 +86,8 @@ def test_parse_arguments(capsys, config, tmpdir):
|
||||
server_config.enable_ssl = True
|
||||
assert server._parse_arguments([]).ssl
|
||||
|
||||
server_config.certfile = None
|
||||
server_config.certkey = None
|
||||
|
||||
assert server._parse_arguments(["--certfile", "bla"]).certfile == "bla"
|
||||
assert server._parse_arguments([]).certfile is None
|
||||
|
||||
assert server._parse_arguments(["--certkey", "blu"]).certkey == "blu"
|
||||
assert server._parse_arguments([]).certkey is None
|
||||
|
||||
assert server._parse_arguments(["-L"]).local
|
||||
assert server._parse_arguments(["--local"]).local
|
||||
|
Loading…
Reference in New Issue
Block a user