mirror of
https://github.com/microsoft/onefuzz.git
synced 2025-06-14 19:08:08 +00:00
update python prereqs (#427)
Updates the following libraries in the service: * azure-core * azure-functions * azure-identity * azure-keyvault-keys * azure-keyvault-secrets * azure-mgmt-compute * azure-mgmt-core * azure-mgmt-loganalytics * azure-mgmt-network * azure-mgmt-resource * azure-mgmt-storage * azure-mgmt-subscription * azure-storage-blob * azure-storage-queue * pydantic * requests * jsonpatch Removes the following libraries in the service: * azure-cli-core * azure-cli-nspkg * azure-mgmt-cosmosdb * azure-servicebus Updates the following libraries in the CLI: * requests * semver * asciimatics * pydantic * tenacity Updates the following libraries in onefuzztypes: * pydantic The primary "legacy" libraries are [azure-graphrbac](https://pypi.org/project/azure-graphrbac/) and azure-cosmosdb-table. The former has not been updated to use azure-identity yet. The later is being rewritten as [azure-data-tables](https://pypi.org/project/azure-data-tables/), but is still in early beta.
This commit is contained in:
@ -1165,7 +1165,6 @@ Each event will be submitted via HTTP POST to the user provided URL.
|
||||
"items": {
|
||||
"$ref": "#/definitions/TaskDebugFlag"
|
||||
},
|
||||
"title": "Debug",
|
||||
"type": "array"
|
||||
},
|
||||
"job_id": {
|
||||
@ -2549,7 +2548,6 @@ Each event will be submitted via HTTP POST to the user provided URL.
|
||||
"items": {
|
||||
"$ref": "#/definitions/TaskDebugFlag"
|
||||
},
|
||||
"title": "Debug",
|
||||
"type": "array"
|
||||
},
|
||||
"job_id": {
|
||||
|
@ -47,7 +47,6 @@ def get(req: func.HttpRequest) -> func.HttpResponse:
|
||||
StorageType.corpus,
|
||||
read=True,
|
||||
write=True,
|
||||
create=True,
|
||||
delete=True,
|
||||
list=True,
|
||||
),
|
||||
|
9
src/api-service/__app__/onefuzzlib/azure/compute.py
Normal file
9
src/api-service/__app__/onefuzzlib/azure/compute.py
Normal file
@ -0,0 +1,9 @@
|
||||
from azure.mgmt.compute import ComputeManagementClient
|
||||
from memoization import cached
|
||||
|
||||
from .creds import get_identity, get_subscription
|
||||
|
||||
|
||||
@cached
|
||||
def get_compute_client() -> ComputeManagementClient:
|
||||
return ComputeManagementClient(get_identity(), get_subscription())
|
@ -10,7 +10,16 @@ import urllib.parse
|
||||
from typing import Dict, Optional, Union, cast
|
||||
|
||||
from azure.common import AzureHttpError, AzureMissingResourceHttpError
|
||||
from azure.storage.blob import BlobPermissions, BlockBlobService, ContainerPermissions
|
||||
from azure.core.exceptions import ResourceNotFoundError
|
||||
from azure.storage.blob import (
|
||||
BlobClient,
|
||||
BlobSasPermissions,
|
||||
BlobServiceClient,
|
||||
ContainerClient,
|
||||
ContainerSasPermissions,
|
||||
generate_blob_sas,
|
||||
generate_container_sas,
|
||||
)
|
||||
from memoization import cached
|
||||
from onefuzztypes.primitives import Container
|
||||
|
||||
@ -19,47 +28,41 @@ from .storage import (
|
||||
choose_account,
|
||||
get_accounts,
|
||||
get_storage_account_name_key,
|
||||
get_storage_account_name_key_by_name,
|
||||
)
|
||||
|
||||
|
||||
def get_url(account_name: str) -> str:
|
||||
return f"https://{account_name}.blob.core.windows.net/"
|
||||
|
||||
|
||||
@cached
|
||||
def get_blob_service(account_id: str) -> BlockBlobService:
|
||||
def get_blob_service(account_id: str) -> BlobServiceClient:
|
||||
logging.debug("getting blob container (account_id: %s)", account_id)
|
||||
account_name, account_key = get_storage_account_name_key(account_id)
|
||||
service = BlockBlobService(account_name=account_name, account_key=account_key)
|
||||
account_url = get_url(account_name)
|
||||
service = BlobServiceClient(account_url=account_url, credential=account_key)
|
||||
return service
|
||||
|
||||
|
||||
def get_service_by_container(
|
||||
container: Container, storage_type: StorageType
|
||||
) -> Optional[BlockBlobService]:
|
||||
account = get_account_by_container(container, storage_type)
|
||||
if account is None:
|
||||
return None
|
||||
service = get_blob_service(account)
|
||||
return service
|
||||
|
||||
|
||||
def container_exists_on_account(container: Container, account_id: str) -> bool:
|
||||
def container_metadata(
|
||||
container: Container, account_id: str
|
||||
) -> Optional[Dict[str, str]]:
|
||||
try:
|
||||
get_blob_service(account_id).get_container_properties(container)
|
||||
return True
|
||||
except AzureHttpError:
|
||||
return False
|
||||
|
||||
|
||||
def container_metadata(container: Container, account: str) -> Optional[Dict[str, str]]:
|
||||
try:
|
||||
result = get_blob_service(account).get_container_metadata(container)
|
||||
result = (
|
||||
get_blob_service(account_id)
|
||||
.get_container_client(container)
|
||||
.get_container_properties()
|
||||
)
|
||||
return cast(Dict[str, str], result)
|
||||
except AzureHttpError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def get_account_by_container(
|
||||
def find_container(
|
||||
container: Container, storage_type: StorageType
|
||||
) -> Optional[str]:
|
||||
) -> Optional[ContainerClient]:
|
||||
accounts = get_accounts(storage_type)
|
||||
|
||||
# check secondary accounts first by searching in reverse.
|
||||
@ -70,13 +73,17 @@ def get_account_by_container(
|
||||
# Secondary accounts, if they exist, are preferred for containers and have
|
||||
# increased IOP rates, this should be a slight optimization
|
||||
for account in reversed(accounts):
|
||||
if container_exists_on_account(container, account):
|
||||
return account
|
||||
client = get_blob_service(account).get_container_client(container)
|
||||
try:
|
||||
client.get_container_properties()
|
||||
return client
|
||||
except ResourceNotFoundError:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def container_exists(container: Container, storage_type: StorageType) -> bool:
|
||||
return get_account_by_container(container, storage_type) is not None
|
||||
return find_container(container, storage_type) is not None
|
||||
|
||||
|
||||
def get_containers(storage_type: StorageType) -> Dict[str, Dict[str, str]]:
|
||||
@ -98,11 +105,11 @@ def get_containers(storage_type: StorageType) -> Dict[str, Dict[str, str]]:
|
||||
def get_container_metadata(
|
||||
container: Container, storage_type: StorageType
|
||||
) -> Optional[Dict[str, str]]:
|
||||
account = get_account_by_container(container, storage_type)
|
||||
if account is None:
|
||||
client = find_container(container, storage_type)
|
||||
if client is None:
|
||||
return None
|
||||
|
||||
return container_metadata(container, account)
|
||||
result = client.get_container_properties().metadata
|
||||
return cast(Dict[str, str], result)
|
||||
|
||||
|
||||
def create_container(
|
||||
@ -110,12 +117,12 @@ def create_container(
|
||||
storage_type: StorageType,
|
||||
metadata: Optional[Dict[str, str]],
|
||||
) -> Optional[str]:
|
||||
service = get_service_by_container(container, storage_type)
|
||||
if service is None:
|
||||
client = find_container(container, storage_type)
|
||||
if client is None:
|
||||
account = choose_account(storage_type)
|
||||
service = get_blob_service(account)
|
||||
client = get_blob_service(account).get_container_client(container)
|
||||
try:
|
||||
service.create_container(container, metadata=metadata)
|
||||
client.create_container(metadata=metadata)
|
||||
except AzureHttpError as err:
|
||||
logging.error(
|
||||
(
|
||||
@ -130,11 +137,8 @@ def create_container(
|
||||
return None
|
||||
|
||||
return get_container_sas_url_service(
|
||||
container,
|
||||
service,
|
||||
client,
|
||||
read=True,
|
||||
add=True,
|
||||
create=True,
|
||||
write=True,
|
||||
delete=True,
|
||||
list=True,
|
||||
@ -152,26 +156,40 @@ def delete_container(container: Container, storage_type: StorageType) -> bool:
|
||||
|
||||
|
||||
def get_container_sas_url_service(
|
||||
container: Container,
|
||||
service: BlockBlobService,
|
||||
client: ContainerClient,
|
||||
*,
|
||||
read: bool = False,
|
||||
add: bool = False,
|
||||
create: bool = False,
|
||||
write: bool = False,
|
||||
delete: bool = False,
|
||||
list: bool = False,
|
||||
delete_previous_version: bool = False,
|
||||
tag: bool = False,
|
||||
) -> str:
|
||||
expiry = datetime.datetime.utcnow() + datetime.timedelta(days=30)
|
||||
permission = ContainerPermissions(read, add, create, write, delete, list)
|
||||
account_name = client.account_name
|
||||
container_name = client.container_name
|
||||
account_key = get_storage_account_name_key_by_name(account_name)
|
||||
|
||||
sas_token = service.generate_container_shared_access_signature(
|
||||
container, permission=permission, expiry=expiry
|
||||
sas = generate_container_sas(
|
||||
account_name,
|
||||
container_name,
|
||||
account_key=account_key,
|
||||
permission=ContainerSasPermissions(
|
||||
read=read,
|
||||
write=write,
|
||||
delete=delete,
|
||||
list=list,
|
||||
delete_previous_version=delete_previous_version,
|
||||
tag=tag,
|
||||
),
|
||||
expiry=datetime.datetime.utcnow() + datetime.timedelta(days=30),
|
||||
)
|
||||
|
||||
url = service.make_container_url(container, sas_token=sas_token)
|
||||
url = url.replace("?restype=container&", "?")
|
||||
return str(url)
|
||||
with_sas = ContainerClient(
|
||||
get_url(account_name),
|
||||
container_name=container_name,
|
||||
credential=sas,
|
||||
)
|
||||
return cast(str, with_sas.url)
|
||||
|
||||
|
||||
def get_container_sas_url(
|
||||
@ -179,22 +197,17 @@ def get_container_sas_url(
|
||||
storage_type: StorageType,
|
||||
*,
|
||||
read: bool = False,
|
||||
add: bool = False,
|
||||
create: bool = False,
|
||||
write: bool = False,
|
||||
delete: bool = False,
|
||||
list: bool = False,
|
||||
) -> str:
|
||||
service = get_service_by_container(container, storage_type)
|
||||
if not service:
|
||||
client = find_container(container, storage_type)
|
||||
if not client:
|
||||
raise Exception("unable to create container sas for missing container")
|
||||
|
||||
return get_container_sas_url_service(
|
||||
container,
|
||||
service,
|
||||
client,
|
||||
read=read,
|
||||
add=add,
|
||||
create=create,
|
||||
write=write,
|
||||
delete=delete,
|
||||
list=list,
|
||||
@ -211,26 +224,45 @@ def get_file_sas_url(
|
||||
create: bool = False,
|
||||
write: bool = False,
|
||||
delete: bool = False,
|
||||
list: bool = False,
|
||||
delete_previous_version: bool = False,
|
||||
tag: bool = False,
|
||||
days: int = 30,
|
||||
hours: int = 0,
|
||||
minutes: int = 0,
|
||||
) -> str:
|
||||
service = get_service_by_container(container, storage_type)
|
||||
if not service:
|
||||
client = find_container(container, storage_type)
|
||||
if not client:
|
||||
raise Exception("unable to find container: %s - %s" % (container, storage_type))
|
||||
|
||||
account_key = get_storage_account_name_key_by_name(client.account_name)
|
||||
expiry = datetime.datetime.utcnow() + datetime.timedelta(
|
||||
days=days, hours=hours, minutes=minutes
|
||||
)
|
||||
permission = BlobPermissions(read, add, create, write, delete, list)
|
||||
|
||||
sas_token = service.generate_blob_shared_access_signature(
|
||||
container, name, permission=permission, expiry=expiry
|
||||
permission = BlobSasPermissions(
|
||||
read=read,
|
||||
add=add,
|
||||
create=create,
|
||||
write=write,
|
||||
delete=delete,
|
||||
delete_previous_version=delete_previous_version,
|
||||
tag=tag,
|
||||
)
|
||||
sas = generate_blob_sas(
|
||||
client.account_name,
|
||||
container,
|
||||
name,
|
||||
account_key=account_key,
|
||||
permission=permission,
|
||||
expiry=expiry,
|
||||
)
|
||||
|
||||
url = service.make_blob_url(container, name, sas_token=sas_token)
|
||||
return str(url)
|
||||
with_sas = BlobClient(
|
||||
get_url(client.account_name),
|
||||
container,
|
||||
name,
|
||||
credential=sas,
|
||||
)
|
||||
return cast(str, with_sas.url)
|
||||
|
||||
|
||||
def save_blob(
|
||||
@ -239,49 +271,43 @@ def save_blob(
|
||||
data: Union[str, bytes],
|
||||
storage_type: StorageType,
|
||||
) -> None:
|
||||
service = get_service_by_container(container, storage_type)
|
||||
if not service:
|
||||
client = find_container(container, storage_type)
|
||||
if not client:
|
||||
raise Exception("unable to find container: %s - %s" % (container, storage_type))
|
||||
|
||||
if isinstance(data, str):
|
||||
service.create_blob_from_text(container, name, data)
|
||||
elif isinstance(data, bytes):
|
||||
service.create_blob_from_bytes(container, name, data)
|
||||
client.get_blob_client(name).upload_blob(data, overwrite=True)
|
||||
|
||||
|
||||
def get_blob(
|
||||
container: Container, name: str, storage_type: StorageType
|
||||
) -> Optional[bytes]:
|
||||
service = get_service_by_container(container, storage_type)
|
||||
if not service:
|
||||
client = find_container(container, storage_type)
|
||||
if not client:
|
||||
return None
|
||||
|
||||
try:
|
||||
blob = service.get_blob_to_bytes(container, name).content
|
||||
return cast(bytes, blob)
|
||||
return cast(
|
||||
bytes, client.get_blob_client(name).download_blob().content_as_bytes()
|
||||
)
|
||||
except AzureMissingResourceHttpError:
|
||||
return None
|
||||
|
||||
|
||||
def blob_exists(container: Container, name: str, storage_type: StorageType) -> bool:
|
||||
service = get_service_by_container(container, storage_type)
|
||||
if not service:
|
||||
client = find_container(container, storage_type)
|
||||
if not client:
|
||||
return False
|
||||
|
||||
try:
|
||||
service.get_blob_properties(container, name)
|
||||
return True
|
||||
except AzureMissingResourceHttpError:
|
||||
return False
|
||||
return cast(bool, client.get_blob_client(name).exists())
|
||||
|
||||
|
||||
def delete_blob(container: Container, name: str, storage_type: StorageType) -> bool:
|
||||
service = get_service_by_container(container, storage_type)
|
||||
if not service:
|
||||
client = find_container(container, storage_type)
|
||||
if not client:
|
||||
return False
|
||||
|
||||
try:
|
||||
service.delete_blob(container, name)
|
||||
client.get_blob_client(name).delete_blob()
|
||||
return True
|
||||
except AzureMissingResourceHttpError:
|
||||
return False
|
||||
|
@ -7,8 +7,6 @@ import os
|
||||
from typing import Any, List
|
||||
from uuid import UUID
|
||||
|
||||
from azure.cli.core import CLIError
|
||||
from azure.common.client_factory import get_client_from_cli_profile
|
||||
from azure.graphrbac import GraphRbacManagementClient
|
||||
from azure.graphrbac.models import CheckGroupMembershipParameters
|
||||
from azure.identity import DefaultAzureCredential
|
||||
@ -25,24 +23,16 @@ from .monkeypatch import allow_more_workers, reduce_logging
|
||||
|
||||
@cached
|
||||
def get_msi() -> MSIAuthentication:
|
||||
allow_more_workers()
|
||||
reduce_logging()
|
||||
return MSIAuthentication()
|
||||
|
||||
|
||||
@cached
|
||||
def mgmt_client_factory(client_class: Any) -> Any:
|
||||
def get_identity() -> DefaultAzureCredential:
|
||||
allow_more_workers()
|
||||
reduce_logging()
|
||||
try:
|
||||
return get_client_from_cli_profile(client_class)
|
||||
except CLIError:
|
||||
pass
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if issubclass(client_class, SubscriptionClient):
|
||||
return client_class(get_msi())
|
||||
else:
|
||||
return client_class(get_msi(), get_subscription())
|
||||
return DefaultAzureCredential()
|
||||
|
||||
|
||||
@cached
|
||||
@ -52,7 +42,9 @@ def get_base_resource_group() -> Any: # should be str
|
||||
|
||||
@cached
|
||||
def get_base_region() -> Any: # should be str
|
||||
client = mgmt_client_factory(ResourceManagementClient)
|
||||
client = ResourceManagementClient(
|
||||
credential=get_identity(), subscription_id=get_subscription()
|
||||
)
|
||||
group = client.resource_groups.get(get_base_resource_group())
|
||||
return group.location
|
||||
|
||||
@ -98,14 +90,15 @@ DAY_IN_SECONDS = 60 * 60 * 24
|
||||
|
||||
@cached(ttl=DAY_IN_SECONDS)
|
||||
def get_regions() -> List[str]:
|
||||
client = mgmt_client_factory(SubscriptionClient)
|
||||
subscription = get_subscription()
|
||||
client = SubscriptionClient(credential=get_identity())
|
||||
locations = client.subscriptions.list_locations(subscription)
|
||||
return sorted([x.name for x in locations])
|
||||
|
||||
|
||||
def get_graph_client() -> Any:
|
||||
return mgmt_client_factory(GraphRbacManagementClient)
|
||||
@cached
|
||||
def get_graph_client() -> GraphRbacManagementClient:
|
||||
return GraphRbacManagementClient(get_msi(), get_subscription())
|
||||
|
||||
|
||||
def is_member_of(group_id: str, member_id: str) -> bool:
|
||||
@ -133,7 +126,9 @@ def get_scaleset_identity_resource_path() -> str:
|
||||
@cached
|
||||
def get_scaleset_principal_id() -> UUID:
|
||||
api_version = "2018-11-30" # matches the apiversion in the deployment template
|
||||
client = mgmt_client_factory(ResourceManagementClient)
|
||||
client = ResourceManagementClient(
|
||||
credential=get_identity(), subscription_id=get_subscription()
|
||||
)
|
||||
uid = client.resources.get_by_id(get_scaleset_identity_resource_path(), api_version)
|
||||
return UUID(uid.properties["principalId"])
|
||||
|
||||
|
@ -6,24 +6,24 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from azure.mgmt.compute import ComputeManagementClient
|
||||
from azure.core.exceptions import ResourceNotFoundError
|
||||
from msrestazure.azure_exceptions import CloudError
|
||||
|
||||
from .creds import mgmt_client_factory
|
||||
from .compute import get_compute_client
|
||||
|
||||
|
||||
def list_disks(resource_group: str) -> Any:
|
||||
logging.info("listing disks %s", resource_group)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
return compute_client.disks.list_by_resource_group(resource_group)
|
||||
|
||||
|
||||
def delete_disk(resource_group: str, name: str) -> bool:
|
||||
logging.info("deleting disks %s : %s", resource_group, name)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
try:
|
||||
compute_client.disks.delete(resource_group, name)
|
||||
compute_client.disks.begin_delete(resource_group, name)
|
||||
return True
|
||||
except CloudError as err:
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
logging.error("unable to delete disk: %s", err)
|
||||
return False
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
from typing import Union
|
||||
|
||||
from azure.mgmt.compute import ComputeManagementClient
|
||||
from azure.core.exceptions import ResourceNotFoundError
|
||||
from memoization import cached
|
||||
from msrestazure.azure_exceptions import CloudError
|
||||
from msrestazure.tools import parse_resource_id
|
||||
@ -13,19 +13,19 @@ from onefuzztypes.enums import OS, ErrorCode
|
||||
from onefuzztypes.models import Error
|
||||
from onefuzztypes.primitives import Region
|
||||
|
||||
from .creds import mgmt_client_factory
|
||||
from .compute import get_compute_client
|
||||
|
||||
|
||||
@cached(ttl=60)
|
||||
def get_os(region: Region, image: str) -> Union[Error, OS]:
|
||||
client = mgmt_client_factory(ComputeManagementClient)
|
||||
client = get_compute_client()
|
||||
parsed = parse_resource_id(image)
|
||||
if "resource_group" in parsed:
|
||||
try:
|
||||
name = client.images.get(
|
||||
parsed["resource_group"], parsed["name"]
|
||||
).storage_profile.os_disk.os_type.name
|
||||
except CloudError as err:
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
return Error(code=ErrorCode.INVALID_IMAGE, errors=[str(err)])
|
||||
else:
|
||||
publisher, offer, sku, version = image.split(":")
|
||||
@ -36,7 +36,7 @@ def get_os(region: Region, image: str) -> Union[Error, OS]:
|
||||
)[0].name
|
||||
name = client.virtual_machine_images.get(
|
||||
region, publisher, offer, sku, version
|
||||
).os_disk_image.operating_system.name
|
||||
except CloudError as err:
|
||||
).os_disk_image.operating_system.lower()
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
return Error(code=ErrorCode.INVALID_IMAGE, errors=[str(err)])
|
||||
return OS[name]
|
||||
|
@ -8,13 +8,14 @@ import os
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
from azure.mgmt.network import NetworkManagementClient
|
||||
from azure.core.exceptions import ResourceNotFoundError
|
||||
from msrestazure.azure_exceptions import CloudError
|
||||
from msrestazure.tools import parse_resource_id
|
||||
from onefuzztypes.enums import ErrorCode
|
||||
from onefuzztypes.models import Error
|
||||
|
||||
from .creds import get_base_resource_group, mgmt_client_factory
|
||||
from .creds import get_base_resource_group
|
||||
from .network_mgmt_client import get_network_client
|
||||
from .subnet import create_virtual_network, get_subnet_id
|
||||
from .vmss import get_instance_id
|
||||
|
||||
@ -26,7 +27,7 @@ def get_scaleset_instance_ip(scaleset: UUID, machine_id: UUID) -> Optional[str]:
|
||||
|
||||
resource_group = get_base_resource_group()
|
||||
|
||||
client = mgmt_client_factory(NetworkManagementClient)
|
||||
client = get_network_client()
|
||||
intf = client.network_interfaces.list_virtual_machine_scale_set_network_interfaces(
|
||||
resource_group, str(scaleset)
|
||||
)
|
||||
@ -40,7 +41,7 @@ def get_scaleset_instance_ip(scaleset: UUID, machine_id: UUID) -> Optional[str]:
|
||||
if config.private_ip_address is None:
|
||||
continue
|
||||
return str(config.private_ip_address)
|
||||
except CloudError:
|
||||
except (ResourceNotFoundError, CloudError):
|
||||
# this can fail if an interface is removed during the iteration
|
||||
pass
|
||||
|
||||
@ -49,53 +50,53 @@ def get_scaleset_instance_ip(scaleset: UUID, machine_id: UUID) -> Optional[str]:
|
||||
|
||||
def get_ip(resource_group: str, name: str) -> Optional[Any]:
|
||||
logging.info("getting ip %s:%s", resource_group, name)
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
network_client = get_network_client()
|
||||
try:
|
||||
return network_client.public_ip_addresses.get(resource_group, name)
|
||||
except CloudError:
|
||||
except (ResourceNotFoundError, CloudError):
|
||||
return None
|
||||
|
||||
|
||||
def delete_ip(resource_group: str, name: str) -> Any:
|
||||
logging.info("deleting ip %s:%s", resource_group, name)
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
return network_client.public_ip_addresses.delete(resource_group, name)
|
||||
network_client = get_network_client()
|
||||
return network_client.public_ip_addresses.begin_delete(resource_group, name)
|
||||
|
||||
|
||||
def create_ip(resource_group: str, name: str, location: str) -> Any:
|
||||
logging.info("creating ip for %s:%s in %s", resource_group, name, location)
|
||||
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
network_client = get_network_client()
|
||||
params: Dict[str, Union[str, Dict[str, str]]] = {
|
||||
"location": location,
|
||||
"public_ip_allocation_method": "Dynamic",
|
||||
}
|
||||
if "ONEFUZZ_OWNER" in os.environ:
|
||||
params["tags"] = {"OWNER": os.environ["ONEFUZZ_OWNER"]}
|
||||
return network_client.public_ip_addresses.create_or_update(
|
||||
return network_client.public_ip_addresses.begin_create_or_update(
|
||||
resource_group, name, params
|
||||
)
|
||||
|
||||
|
||||
def get_public_nic(resource_group: str, name: str) -> Optional[Any]:
|
||||
logging.info("getting nic: %s %s", resource_group, name)
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
network_client = get_network_client()
|
||||
try:
|
||||
return network_client.network_interfaces.get(resource_group, name)
|
||||
except CloudError:
|
||||
except (ResourceNotFoundError, CloudError):
|
||||
return None
|
||||
|
||||
|
||||
def delete_nic(resource_group: str, name: str) -> Optional[Any]:
|
||||
logging.info("deleting nic %s:%s", resource_group, name)
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
return network_client.network_interfaces.delete(resource_group, name)
|
||||
network_client = get_network_client()
|
||||
return network_client.network_interfaces.begin_delete(resource_group, name)
|
||||
|
||||
|
||||
def create_public_nic(resource_group: str, name: str, location: str) -> Optional[Error]:
|
||||
logging.info("creating nic for %s:%s in %s", resource_group, name, location)
|
||||
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
network_client = get_network_client()
|
||||
subnet_id = get_subnet_id(resource_group, location)
|
||||
if not subnet_id:
|
||||
return create_virtual_network(resource_group, location, location)
|
||||
@ -119,8 +120,10 @@ def create_public_nic(resource_group: str, name: str, location: str) -> Optional
|
||||
params["tags"] = {"OWNER": os.environ["ONEFUZZ_OWNER"]}
|
||||
|
||||
try:
|
||||
network_client.network_interfaces.create_or_update(resource_group, name, params)
|
||||
except CloudError as err:
|
||||
network_client.network_interfaces.begin_create_or_update(
|
||||
resource_group, name, params
|
||||
)
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
if "RetryableError" not in repr(err):
|
||||
return Error(
|
||||
code=ErrorCode.VM_CREATE_FAILED,
|
||||
@ -131,7 +134,7 @@ def create_public_nic(resource_group: str, name: str, location: str) -> Optional
|
||||
|
||||
def get_public_ip(resource_id: str) -> Optional[str]:
|
||||
logging.info("getting ip for %s", resource_id)
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
network_client = get_network_client()
|
||||
resource = parse_resource_id(resource_id)
|
||||
ip = (
|
||||
network_client.network_interfaces.get(
|
||||
|
@ -4,24 +4,24 @@
|
||||
# Licensed under the MIT License.
|
||||
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
from typing import Dict
|
||||
|
||||
from azure.mgmt.loganalytics import LogAnalyticsManagementClient
|
||||
from memoization import cached
|
||||
|
||||
from .creds import get_base_resource_group, mgmt_client_factory
|
||||
from .creds import get_base_resource_group, get_identity, get_subscription
|
||||
|
||||
|
||||
@cached(ttl=60)
|
||||
def get_montior_client() -> Any:
|
||||
return mgmt_client_factory(LogAnalyticsManagementClient)
|
||||
@cached
|
||||
def get_monitor_client() -> LogAnalyticsManagementClient:
|
||||
return LogAnalyticsManagementClient(get_identity(), get_subscription())
|
||||
|
||||
|
||||
@cached(ttl=60)
|
||||
def get_monitor_settings() -> Dict[str, str]:
|
||||
resource_group = get_base_resource_group()
|
||||
workspace_name = os.environ["ONEFUZZ_MONITOR"]
|
||||
client = get_montior_client()
|
||||
client = get_monitor_client()
|
||||
customer_id = client.workspaces.get(resource_group, workspace_name).customer_id
|
||||
shared_key = client.shared_keys.get_shared_keys(
|
||||
resource_group, workspace_name
|
||||
|
@ -0,0 +1,9 @@
|
||||
from azure.mgmt.network import NetworkManagementClient
|
||||
from memoization import cached
|
||||
|
||||
from .creds import get_identity, get_subscription
|
||||
|
||||
|
||||
@cached
|
||||
def get_network_client() -> NetworkManagementClient:
|
||||
return NetworkManagementClient(get_identity(), get_subscription())
|
@ -7,13 +7,14 @@ import logging
|
||||
import os
|
||||
import random
|
||||
from enum import Enum
|
||||
from typing import List, Tuple
|
||||
from typing import List, Tuple, cast
|
||||
|
||||
from azure.identity import DefaultAzureCredential
|
||||
from azure.mgmt.storage import StorageManagementClient
|
||||
from memoization import cached
|
||||
from msrestazure.tools import parse_resource_id
|
||||
|
||||
from .creds import get_base_resource_group, mgmt_client_factory
|
||||
from .creds import get_base_resource_group, get_subscription
|
||||
|
||||
|
||||
class StorageType(Enum):
|
||||
@ -21,6 +22,13 @@ class StorageType(Enum):
|
||||
config = "config"
|
||||
|
||||
|
||||
@cached
|
||||
def get_mgmt_client() -> StorageManagementClient:
|
||||
return StorageManagementClient(
|
||||
credential=DefaultAzureCredential(), subscription_id=get_subscription()
|
||||
)
|
||||
|
||||
|
||||
@cached
|
||||
def get_fuzz_storage() -> str:
|
||||
return os.environ["ONEFUZZ_DATA_STORAGE"]
|
||||
@ -54,16 +62,19 @@ def get_accounts(storage_type: StorageType) -> List[str]:
|
||||
|
||||
@cached
|
||||
def get_storage_account_name_key(account_id: str) -> Tuple[str, str]:
|
||||
client = mgmt_client_factory(StorageManagementClient)
|
||||
resource = parse_resource_id(account_id)
|
||||
key = (
|
||||
client.storage_accounts.list_keys(resource["resource_group"], resource["name"])
|
||||
.keys[0]
|
||||
.value
|
||||
)
|
||||
key = get_storage_account_name_key_by_name(resource["name"])
|
||||
return resource["name"], key
|
||||
|
||||
|
||||
@cached
|
||||
def get_storage_account_name_key_by_name(account_name: str) -> str:
|
||||
client = get_mgmt_client()
|
||||
group = get_base_resource_group()
|
||||
key = client.storage_accounts.list_keys(group, account_name).keys[0].value
|
||||
return cast(str, key)
|
||||
|
||||
|
||||
def choose_account(storage_type: StorageType) -> str:
|
||||
accounts = get_accounts(storage_type)
|
||||
if not accounts:
|
||||
@ -83,7 +94,7 @@ def corpus_accounts() -> List[str]:
|
||||
skip = get_func_storage()
|
||||
results = [get_fuzz_storage()]
|
||||
|
||||
client = mgmt_client_factory(StorageManagementClient)
|
||||
client = get_mgmt_client()
|
||||
group = get_base_resource_group()
|
||||
for account in client.storage_accounts.list_by_resource_group(group):
|
||||
# protection from someone adding the corpus tag to the config account
|
||||
|
@ -7,20 +7,20 @@ import logging
|
||||
import os
|
||||
from typing import Any, Optional, Union, cast
|
||||
|
||||
from azure.mgmt.network import NetworkManagementClient
|
||||
from azure.core.exceptions import ResourceNotFoundError
|
||||
from msrestazure.azure_exceptions import CloudError
|
||||
from onefuzztypes.enums import ErrorCode
|
||||
from onefuzztypes.models import Error
|
||||
|
||||
from .creds import mgmt_client_factory
|
||||
from .network_mgmt_client import get_network_client
|
||||
|
||||
|
||||
def get_subnet_id(resource_group: str, name: str) -> Optional[str]:
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
network_client = get_network_client()
|
||||
try:
|
||||
subnet = network_client.subnets.get(resource_group, name, name)
|
||||
return cast(str, subnet.id)
|
||||
except CloudError:
|
||||
except (CloudError, ResourceNotFoundError):
|
||||
logging.info(
|
||||
"subnet missing: resource group: %s name: %s",
|
||||
resource_group,
|
||||
@ -30,10 +30,10 @@ def get_subnet_id(resource_group: str, name: str) -> Optional[str]:
|
||||
|
||||
|
||||
def delete_subnet(resource_group: str, name: str) -> Union[None, CloudError, Any]:
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
network_client = get_network_client()
|
||||
try:
|
||||
return network_client.virtual_networks.delete(resource_group, name)
|
||||
except CloudError as err:
|
||||
return network_client.virtual_networks.begin_delete(resource_group, name)
|
||||
except (CloudError, ResourceNotFoundError) as err:
|
||||
if err.error and "InUseSubnetCannotBeDeleted" in str(err.error):
|
||||
logging.error(
|
||||
"subnet delete failed: %s %s : %s", resource_group, name, repr(err)
|
||||
@ -52,7 +52,7 @@ def create_virtual_network(
|
||||
location,
|
||||
)
|
||||
|
||||
network_client = mgmt_client_factory(NetworkManagementClient)
|
||||
network_client = get_network_client()
|
||||
params = {
|
||||
"location": location,
|
||||
"address_space": {"address_prefixes": ["10.0.0.0/8"]},
|
||||
@ -62,8 +62,10 @@ def create_virtual_network(
|
||||
params["tags"] = {"OWNER": os.environ["ONEFUZZ_OWNER"]}
|
||||
|
||||
try:
|
||||
network_client.virtual_networks.create_or_update(resource_group, name, params)
|
||||
except CloudError as err:
|
||||
network_client.virtual_networks.begin_create_or_update(
|
||||
resource_group, name, params
|
||||
)
|
||||
except (CloudError, ResourceNotFoundError) as err:
|
||||
return Error(code=ErrorCode.UNABLE_TO_CREATE_NETWORK, errors=[str(err.message)])
|
||||
|
||||
return None
|
||||
|
@ -8,7 +8,7 @@ import os
|
||||
from typing import Any, Dict, List, Optional, Union, cast
|
||||
from uuid import UUID
|
||||
|
||||
from azure.mgmt.compute import ComputeManagementClient
|
||||
from azure.core.exceptions import ResourceNotFoundError
|
||||
from azure.mgmt.compute.models import VirtualMachine
|
||||
from msrestazure.azure_exceptions import CloudError
|
||||
from onefuzztypes.enums import OS, ErrorCode
|
||||
@ -16,7 +16,8 @@ from onefuzztypes.models import Authentication, Error
|
||||
from onefuzztypes.primitives import Extension, Region
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .creds import get_base_resource_group, mgmt_client_factory
|
||||
from .compute import get_compute_client
|
||||
from .creds import get_base_resource_group
|
||||
from .disk import delete_disk, list_disks
|
||||
from .image import get_os
|
||||
from .ip import create_public_nic, delete_ip, delete_nic, get_ip, get_public_nic
|
||||
@ -26,7 +27,7 @@ def get_vm(name: str) -> Optional[VirtualMachine]:
|
||||
resource_group = get_base_resource_group()
|
||||
|
||||
logging.debug("getting vm: %s", name)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
try:
|
||||
return cast(
|
||||
VirtualMachine,
|
||||
@ -34,7 +35,7 @@ def get_vm(name: str) -> Optional[VirtualMachine]:
|
||||
resource_group, name, expand="instanceView"
|
||||
),
|
||||
)
|
||||
except CloudError as err:
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
logging.debug("vm does not exist %s", err)
|
||||
return None
|
||||
|
||||
@ -50,7 +51,7 @@ def create_vm(
|
||||
resource_group = get_base_resource_group()
|
||||
logging.info("creating vm %s:%s:%s", resource_group, location, name)
|
||||
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
|
||||
nic = get_public_nic(resource_group, name)
|
||||
if nic is None:
|
||||
@ -104,8 +105,10 @@ def create_vm(
|
||||
params["tags"] = {"OWNER": os.environ["ONEFUZZ_OWNER"]}
|
||||
|
||||
try:
|
||||
compute_client.virtual_machines.create_or_update(resource_group, name, params)
|
||||
except CloudError as err:
|
||||
compute_client.virtual_machines.begin_create_or_update(
|
||||
resource_group, name, params
|
||||
)
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
if "The request failed due to conflict with a concurrent request" in str(err):
|
||||
logging.debug(
|
||||
"create VM had conflicts with concurrent request, ignoring %s", err
|
||||
@ -124,12 +127,12 @@ def get_extension(vm_name: str, extension_name: str) -> Optional[Any]:
|
||||
vm_name,
|
||||
extension_name,
|
||||
)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
try:
|
||||
return compute_client.virtual_machine_extensions.get(
|
||||
resource_group, vm_name, extension_name
|
||||
)
|
||||
except CloudError as err:
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
logging.error("extension does not exist %s", err)
|
||||
return None
|
||||
|
||||
@ -140,8 +143,8 @@ def create_extension(vm_name: str, extension: Dict) -> Any:
|
||||
logging.info(
|
||||
"creating extension: %s:%s:%s", resource_group, vm_name, extension["name"]
|
||||
)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
return compute_client.virtual_machine_extensions.create_or_update(
|
||||
compute_client = get_compute_client()
|
||||
return compute_client.virtual_machine_extensions.begin_create_or_update(
|
||||
resource_group, vm_name, extension["name"], extension
|
||||
)
|
||||
|
||||
@ -150,8 +153,8 @@ def delete_vm(name: str) -> Any:
|
||||
resource_group = get_base_resource_group()
|
||||
|
||||
logging.info("deleting vm: %s %s", resource_group, name)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
return compute_client.virtual_machines.delete(resource_group, name)
|
||||
compute_client = get_compute_client()
|
||||
return compute_client.virtual_machines.begin_delete(resource_group, name)
|
||||
|
||||
|
||||
def has_components(name: str) -> bool:
|
||||
|
@ -8,25 +8,27 @@ import os
|
||||
from typing import Any, Dict, List, Optional, Union, cast
|
||||
from uuid import UUID
|
||||
|
||||
from azure.mgmt.compute import ComputeManagementClient
|
||||
from azure.mgmt.compute.models import ResourceSku, ResourceSkuRestrictionsType
|
||||
from azure.core.exceptions import ResourceNotFoundError
|
||||
from azure.mgmt.compute.models import (
|
||||
ResourceSku,
|
||||
ResourceSkuRestrictionsType,
|
||||
VirtualMachineScaleSetVMInstanceIDs,
|
||||
VirtualMachineScaleSetVMInstanceRequiredIDs,
|
||||
)
|
||||
from memoization import cached
|
||||
from msrestazure.azure_exceptions import CloudError
|
||||
from onefuzztypes.enums import OS, ErrorCode
|
||||
from onefuzztypes.models import Error
|
||||
from onefuzztypes.primitives import Region
|
||||
|
||||
from .creds import (
|
||||
get_base_resource_group,
|
||||
get_scaleset_identity_resource_path,
|
||||
mgmt_client_factory,
|
||||
)
|
||||
from .compute import get_compute_client
|
||||
from .creds import get_base_resource_group, get_scaleset_identity_resource_path
|
||||
from .image import get_os
|
||||
|
||||
|
||||
def list_vmss(name: UUID) -> Optional[List[str]]:
|
||||
resource_group = get_base_resource_group()
|
||||
client = mgmt_client_factory(ComputeManagementClient)
|
||||
client = get_compute_client()
|
||||
try:
|
||||
instances = [
|
||||
x.instance_id
|
||||
@ -35,30 +37,37 @@ def list_vmss(name: UUID) -> Optional[List[str]]:
|
||||
)
|
||||
]
|
||||
return instances
|
||||
except CloudError as err:
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
logging.error("cloud error listing vmss: %s (%s)", name, err)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def delete_vmss(name: UUID) -> Any:
|
||||
def delete_vmss(name: UUID) -> bool:
|
||||
resource_group = get_base_resource_group()
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
try:
|
||||
compute_client.virtual_machine_scale_sets.delete(resource_group, str(name))
|
||||
compute_client.virtual_machine_scale_sets.begin_delete(
|
||||
resource_group, str(name)
|
||||
)
|
||||
except ResourceNotFoundError:
|
||||
return True
|
||||
except CloudError as err:
|
||||
logging.error("cloud error deleting vmss: %s (%s)", name, err)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_vmss(name: UUID) -> Optional[Any]:
|
||||
resource_group = get_base_resource_group()
|
||||
logging.debug("getting vm: %s", name)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
try:
|
||||
return compute_client.virtual_machine_scale_sets.get(resource_group, str(name))
|
||||
except CloudError as err:
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
logging.debug("vm does not exist %s", err)
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def resize_vmss(name: UUID, capacity: int) -> None:
|
||||
@ -66,8 +75,8 @@ def resize_vmss(name: UUID, capacity: int) -> None:
|
||||
|
||||
resource_group = get_base_resource_group()
|
||||
logging.info("updating VM count - name: %s vm_count: %d", name, capacity)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client.virtual_machine_scale_sets.update(
|
||||
compute_client = get_compute_client()
|
||||
compute_client.virtual_machine_scale_sets.begin_update(
|
||||
resource_group, str(name), {"sku": {"capacity": capacity}}
|
||||
)
|
||||
|
||||
@ -82,7 +91,7 @@ def get_vmss_size(name: UUID) -> Optional[int]:
|
||||
def list_instance_ids(name: UUID) -> Dict[UUID, str]:
|
||||
logging.debug("get instance IDs for scaleset: %s", name)
|
||||
resource_group = get_base_resource_group()
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
|
||||
results = {}
|
||||
try:
|
||||
@ -90,8 +99,8 @@ def list_instance_ids(name: UUID) -> Dict[UUID, str]:
|
||||
resource_group, str(name)
|
||||
):
|
||||
results[UUID(instance.vm_id)] = cast(str, instance.instance_id)
|
||||
except CloudError:
|
||||
logging.debug("scaleset not available: %s", name)
|
||||
except (ResourceNotFoundError, CloudError):
|
||||
logging.debug("vm does not exist %s", name)
|
||||
return results
|
||||
|
||||
|
||||
@ -99,7 +108,7 @@ def list_instance_ids(name: UUID) -> Dict[UUID, str]:
|
||||
def get_instance_id(name: UUID, vm_id: UUID) -> Union[str, Error]:
|
||||
resource_group = get_base_resource_group()
|
||||
logging.info("get instance ID for scaleset node: %s:%s", name, vm_id)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
|
||||
vm_id_str = str(vm_id)
|
||||
for instance in compute_client.virtual_machine_scale_set_vms.list(
|
||||
@ -134,7 +143,7 @@ def reimage_vmss_nodes(name: UUID, vm_ids: List[UUID]) -> Optional[Error]:
|
||||
|
||||
resource_group = get_base_resource_group()
|
||||
logging.info("reimaging scaleset VM - name: %s vm_ids:%s", name, vm_ids)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
|
||||
instance_ids = []
|
||||
machine_to_id = list_instance_ids(name)
|
||||
@ -145,8 +154,10 @@ def reimage_vmss_nodes(name: UUID, vm_ids: List[UUID]) -> Optional[Error]:
|
||||
logging.info("unable to find vm_id for %s:%s", name, vm_id)
|
||||
|
||||
if instance_ids:
|
||||
compute_client.virtual_machine_scale_sets.reimage_all(
|
||||
resource_group, str(name), instance_ids=instance_ids
|
||||
compute_client.virtual_machine_scale_sets.begin_reimage_all(
|
||||
resource_group,
|
||||
str(name),
|
||||
VirtualMachineScaleSetVMInstanceIDs(instance_ids=instance_ids),
|
||||
)
|
||||
return None
|
||||
|
||||
@ -156,7 +167,7 @@ def delete_vmss_nodes(name: UUID, vm_ids: List[UUID]) -> Optional[Error]:
|
||||
|
||||
resource_group = get_base_resource_group()
|
||||
logging.info("deleting scaleset VM - name: %s vm_ids:%s", name, vm_ids)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
|
||||
instance_ids = []
|
||||
machine_to_id = list_instance_ids(name)
|
||||
@ -167,8 +178,10 @@ def delete_vmss_nodes(name: UUID, vm_ids: List[UUID]) -> Optional[Error]:
|
||||
logging.info("unable to find vm_id for %s:%s", name, vm_id)
|
||||
|
||||
if instance_ids:
|
||||
compute_client.virtual_machine_scale_sets.delete_instances(
|
||||
resource_group, str(name), instance_ids=instance_ids
|
||||
compute_client.virtual_machine_scale_sets.begin_delete_instances(
|
||||
resource_group,
|
||||
str(name),
|
||||
VirtualMachineScaleSetVMInstanceRequiredIDs(instance_ids=instance_ids),
|
||||
)
|
||||
return None
|
||||
|
||||
@ -178,8 +191,8 @@ def update_extensions(name: UUID, extensions: List[Any]) -> None:
|
||||
|
||||
resource_group = get_base_resource_group()
|
||||
logging.info("updating VM extensions: %s", name)
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client.virtual_machine_scale_sets.update(
|
||||
compute_client = get_compute_client()
|
||||
compute_client.virtual_machine_scale_sets.begin_update(
|
||||
resource_group,
|
||||
str(name),
|
||||
{"virtual_machine_profile": {"extension_profile": {"extensions": extensions}}},
|
||||
@ -218,7 +231,7 @@ def create_vmss(
|
||||
|
||||
resource_group = get_base_resource_group()
|
||||
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
|
||||
if image.startswith("/"):
|
||||
image_ref = {"id": image}
|
||||
@ -238,6 +251,7 @@ def create_vmss(
|
||||
"do_not_run_extensions_on_overprovisioned_vms": True,
|
||||
"upgrade_policy": {"mode": "Manual"},
|
||||
"sku": sku,
|
||||
"overprovision": False,
|
||||
"identity": {
|
||||
"type": "userAssigned",
|
||||
"userAssignedIdentities": {get_scaleset_identity_resource_path(): {}},
|
||||
@ -303,10 +317,10 @@ def create_vmss(
|
||||
params["tags"]["OWNER"] = owner
|
||||
|
||||
try:
|
||||
compute_client.virtual_machine_scale_sets.create_or_update(
|
||||
compute_client.virtual_machine_scale_sets.begin_create_or_update(
|
||||
resource_group, name, params
|
||||
)
|
||||
except CloudError as err:
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
if "The request failed due to conflict with a concurrent request" in repr(err):
|
||||
logging.debug(
|
||||
"create VM had conflicts with concurrent request, ignoring %s", err
|
||||
@ -322,7 +336,8 @@ def create_vmss(
|
||||
|
||||
@cached(ttl=60)
|
||||
def list_available_skus(location: str) -> List[str]:
|
||||
compute_client = mgmt_client_factory(ComputeManagementClient)
|
||||
compute_client = get_compute_client()
|
||||
|
||||
skus: List[ResourceSku] = list(
|
||||
compute_client.resource_skus.list(filter="location eq '%s'" % location)
|
||||
)
|
||||
|
@ -418,6 +418,7 @@ class NodeTasks(BASE_NODE_TASK, ORMMixin):
|
||||
|
||||
@classmethod
|
||||
def clear_by_machine_id(cls, machine_id: UUID) -> None:
|
||||
logging.info("clearing tasks for node: %s", machine_id)
|
||||
for entry in cls.get_by_machine_id(machine_id):
|
||||
entry.delete()
|
||||
|
||||
@ -444,6 +445,7 @@ class NodeMessage(ORMMixin):
|
||||
|
||||
@classmethod
|
||||
def clear_messages(cls, machine_id: UUID) -> None:
|
||||
logging.info("clearing messages for node: %s", machine_id)
|
||||
messages = cls.get_messages(machine_id)
|
||||
for message in messages:
|
||||
message.delete()
|
||||
@ -1062,14 +1064,12 @@ class Scaleset(BASE_SCALESET, ORMMixin):
|
||||
logging.info("deleting node %s:%s", self.scaleset_id, node.machine_id)
|
||||
node.delete()
|
||||
|
||||
vmss = get_vmss(self.scaleset_id)
|
||||
if vmss:
|
||||
logging.info("scaleset deleting: %s", self.scaleset_id)
|
||||
delete_vmss(self.scaleset_id)
|
||||
self.save()
|
||||
else:
|
||||
logging.info("scaleset delete starting: %s", self.scaleset_id)
|
||||
if delete_vmss(self.scaleset_id):
|
||||
logging.info("scaleset deleted: %s", self.scaleset_id)
|
||||
self.delete()
|
||||
else:
|
||||
self.save()
|
||||
|
||||
@classmethod
|
||||
def scaleset_max_size(cls, image: str) -> int:
|
||||
|
@ -218,9 +218,7 @@ def build_task_config(
|
||||
StorageType.corpus,
|
||||
read=ContainerPermission.Read in container_def.permissions,
|
||||
write=ContainerPermission.Write in container_def.permissions,
|
||||
add=ContainerPermission.Add in container_def.permissions,
|
||||
delete=ContainerPermission.Delete in container_def.permissions,
|
||||
create=ContainerPermission.Create in container_def.permissions,
|
||||
list=ContainerPermission.List in container_def.permissions,
|
||||
),
|
||||
}
|
||||
|
@ -38,7 +38,6 @@ TASK_DEFINITIONS = {
|
||||
ContainerPermission.Write,
|
||||
ContainerPermission.Read,
|
||||
ContainerPermission.List,
|
||||
ContainerPermission.Create,
|
||||
],
|
||||
),
|
||||
ContainerDefinition(
|
||||
@ -78,7 +77,7 @@ TASK_DEFINITIONS = {
|
||||
type=ContainerType.crashes,
|
||||
compare=Compare.Equal,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Write, ContainerPermission.Create],
|
||||
permissions=[ContainerPermission.Write],
|
||||
),
|
||||
ContainerDefinition(
|
||||
type=ContainerType.inputs,
|
||||
@ -88,7 +87,6 @@ TASK_DEFINITIONS = {
|
||||
ContainerPermission.Write,
|
||||
ContainerPermission.Read,
|
||||
ContainerPermission.List,
|
||||
ContainerPermission.Create,
|
||||
],
|
||||
),
|
||||
ContainerDefinition(
|
||||
@ -127,19 +125,19 @@ TASK_DEFINITIONS = {
|
||||
type=ContainerType.reports,
|
||||
compare=Compare.AtMost,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Create],
|
||||
permissions=[ContainerPermission.Write],
|
||||
),
|
||||
ContainerDefinition(
|
||||
type=ContainerType.unique_reports,
|
||||
compare=Compare.AtMost,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Create],
|
||||
permissions=[ContainerPermission.Write],
|
||||
),
|
||||
ContainerDefinition(
|
||||
type=ContainerType.no_repro,
|
||||
compare=Compare.AtMost,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Create],
|
||||
permissions=[ContainerPermission.Write],
|
||||
),
|
||||
],
|
||||
monitor_queue=ContainerType.crashes,
|
||||
@ -170,7 +168,6 @@ TASK_DEFINITIONS = {
|
||||
compare=Compare.Equal,
|
||||
value=1,
|
||||
permissions=[
|
||||
ContainerPermission.Create,
|
||||
ContainerPermission.List,
|
||||
ContainerPermission.Read,
|
||||
ContainerPermission.Write,
|
||||
@ -199,7 +196,6 @@ TASK_DEFINITIONS = {
|
||||
compare=Compare.Equal,
|
||||
value=1,
|
||||
permissions=[
|
||||
ContainerPermission.Create,
|
||||
ContainerPermission.List,
|
||||
ContainerPermission.Read,
|
||||
ContainerPermission.Write,
|
||||
@ -244,14 +240,14 @@ TASK_DEFINITIONS = {
|
||||
type=ContainerType.crashes,
|
||||
compare=Compare.Equal,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Create],
|
||||
permissions=[ContainerPermission.Write],
|
||||
),
|
||||
ContainerDefinition(
|
||||
type=ContainerType.inputs,
|
||||
compare=Compare.Equal,
|
||||
value=1,
|
||||
permissions=[
|
||||
ContainerPermission.Create,
|
||||
ContainerPermission.Write,
|
||||
ContainerPermission.Read,
|
||||
ContainerPermission.List,
|
||||
],
|
||||
@ -294,7 +290,7 @@ TASK_DEFINITIONS = {
|
||||
type=ContainerType.inputs,
|
||||
compare=Compare.Equal,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Create, ContainerPermission.List],
|
||||
permissions=[ContainerPermission.Write, ContainerPermission.List],
|
||||
),
|
||||
],
|
||||
monitor_queue=None,
|
||||
@ -332,7 +328,7 @@ TASK_DEFINITIONS = {
|
||||
type=ContainerType.crashes,
|
||||
compare=Compare.Equal,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Create],
|
||||
permissions=[ContainerPermission.Write],
|
||||
),
|
||||
ContainerDefinition(
|
||||
type=ContainerType.readonly_inputs,
|
||||
@ -371,19 +367,19 @@ TASK_DEFINITIONS = {
|
||||
type=ContainerType.reports,
|
||||
compare=Compare.AtMost,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Create],
|
||||
permissions=[ContainerPermission.Write],
|
||||
),
|
||||
ContainerDefinition(
|
||||
type=ContainerType.unique_reports,
|
||||
compare=Compare.AtMost,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Create],
|
||||
permissions=[ContainerPermission.Write],
|
||||
),
|
||||
ContainerDefinition(
|
||||
type=ContainerType.no_repro,
|
||||
compare=Compare.AtMost,
|
||||
value=1,
|
||||
permissions=[ContainerPermission.Create],
|
||||
permissions=[ContainerPermission.Write],
|
||||
),
|
||||
],
|
||||
monitor_queue=ContainerType.crashes,
|
||||
|
@ -6,6 +6,7 @@
|
||||
import logging
|
||||
from typing import Dict, Optional, Type
|
||||
|
||||
from azure.core.exceptions import ResourceNotFoundError
|
||||
from msrestazure.azure_exceptions import CloudError
|
||||
from onefuzztypes.enums import UpdateType
|
||||
from pydantic import BaseModel
|
||||
@ -50,7 +51,7 @@ def queue_update(
|
||||
visibility_timeout=visibility_timeout,
|
||||
):
|
||||
logging.error("unable to queue update")
|
||||
except CloudError as err:
|
||||
except (CloudError, ResourceNotFoundError) as err:
|
||||
logging.error("GOT ERROR %s", repr(err))
|
||||
logging.error("GOT ERROR %s", dir(err))
|
||||
raise err
|
||||
|
@ -1,38 +1,34 @@
|
||||
azure-cli-core~=2.10.0
|
||||
azure-cli-nspkg==3.0.4
|
||||
azure-common~=1.1.25
|
||||
azure-core~=1.7.0
|
||||
azure-core==1.10.0
|
||||
azure-cosmosdb-nspkg==2.0.2
|
||||
azure-cosmosdb-table==1.0.6
|
||||
azure-devops==6.0.0b4
|
||||
azure-functions~=1.3.0
|
||||
azure-functions==1.5.0
|
||||
azure-graphrbac~=0.61.1
|
||||
azure-identity~=1.3.1
|
||||
azure-keyvault-keys~=4.1.0
|
||||
azure-keyvault-secrets~=4.1.0
|
||||
azure-identity==1.5.0
|
||||
azure-keyvault-keys~=4.3.1
|
||||
azure-keyvault-secrets~=4.2.0
|
||||
azure-keyvault~=4.1.0
|
||||
azure-mgmt-compute~=13.0.0
|
||||
azure-mgmt-core~=1.0.0
|
||||
azure-mgmt-cosmosdb~=0.16.0
|
||||
azure-mgmt-loganalytics~=0.7.0
|
||||
azure-mgmt-network~=11.0.0
|
||||
azure-mgmt-resource~=10.1.0
|
||||
azure-mgmt-storage~=11.1.0
|
||||
azure-mgmt-subscription~=0.6.0
|
||||
azure-mgmt-compute~=18.0.0
|
||||
azure-mgmt-core==1.2.2
|
||||
azure-mgmt-loganalytics~=8.0.0
|
||||
azure-mgmt-network~=17.0.0
|
||||
azure-mgmt-resource~=15.0.0
|
||||
azure-mgmt-storage~=16.0.0
|
||||
azure-mgmt-subscription~=1.0.0
|
||||
azure-nspkg==3.0.2
|
||||
azure-servicebus==0.50.3
|
||||
azure-storage-blob==2.0.1
|
||||
azure-storage-blob==12.7.0
|
||||
azure-storage-common==2.1.0
|
||||
azure-storage-queue==12.1.2
|
||||
azure-storage-queue==12.1.5
|
||||
jinja2~=2.11.2
|
||||
msrestazure~=0.6.3
|
||||
opencensus-ext-azure~=1.0.2
|
||||
pydantic~=1.6.1
|
||||
pydantic~=1.7.3
|
||||
PyJWT~=1.7.1
|
||||
requests~=2.24.0
|
||||
requests~=2.25.1
|
||||
memoization~=0.3.1
|
||||
github3.py~=1.3.0
|
||||
typing-extensions~=3.7.4.3
|
||||
jsonpatch==1.26
|
||||
jsonpatch==1.28
|
||||
# onefuzz types version is set during build
|
||||
onefuzztypes==0.0.0
|
||||
|
@ -1,4 +1,3 @@
|
||||
-r __app__/requirements.txt
|
||||
pytest
|
||||
flake8
|
||||
mypy
|
||||
|
@ -10,7 +10,7 @@
|
||||
# For more information:
|
||||
# https://github.com/lonelyenvoy/python-memoization/issues/16
|
||||
|
||||
set -ex
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR=$(dirname ${BASH_SOURCE[0]})
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
# For more information:
|
||||
# https://github.com/lonelyenvoy/python-memoization/issues/16
|
||||
|
||||
set -ex
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR=$(dirname ${BASH_SOURCE[0]})
|
||||
|
||||
|
@ -1,17 +1,17 @@
|
||||
msal~=1.8.0
|
||||
pyjwt~=1.7.1
|
||||
requests~=2.24.0
|
||||
requests~=2.25.1
|
||||
jmespath~=0.10.0
|
||||
semver~=2.10.2
|
||||
semver~=2.13.0
|
||||
signalrcore~=0.8.7
|
||||
asciimatics~=1.11.0
|
||||
asciimatics~=1.12.0
|
||||
dataclasses~=0.6
|
||||
pydantic~=1.6.1 --no-binary=pydantic
|
||||
pydantic~=1.7.3 --no-binary=pydantic
|
||||
memoization~=0.3.1
|
||||
msrestazure==0.6.4
|
||||
azure-storage-blob~=12.3
|
||||
azure-applicationinsights==0.1.0
|
||||
tenacity==6.2.0
|
||||
tenacity==6.3.1
|
||||
docstring_parser==0.7.3
|
||||
# onefuzztypes version is set during build
|
||||
onefuzztypes==0.0.0
|
||||
|
@ -418,7 +418,9 @@ def assign_scaleset_role_manually(
|
||||
)
|
||||
|
||||
query = (
|
||||
"az rest --method post --url https://graph.microsoft.com/v1.0/servicePrincipals/%s/appRoleAssignedTo --body '%s' --headers \"Content-Type\"=application/json"
|
||||
"az rest --method post --url "
|
||||
"https://graph.microsoft.com/v1.0/servicePrincipals/%s/appRoleAssignedTo "
|
||||
"--body '%s' --headers \"Content-Type\"=application/json"
|
||||
% (scaleset_service_principal.object_id, body)
|
||||
)
|
||||
|
||||
|
@ -85,10 +85,8 @@ class TaskFeature(Enum):
|
||||
class ContainerPermission(Enum):
|
||||
Read = "Read"
|
||||
Write = "Write"
|
||||
Create = "Create"
|
||||
List = "List"
|
||||
Delete = "Delete"
|
||||
Add = "Add"
|
||||
|
||||
|
||||
class JobState(Enum):
|
||||
|
@ -1 +1 @@
|
||||
pydantic~=1.6.1 --no-binary=pydantic
|
||||
pydantic~=1.7.3 --no-binary=pydantic
|
||||
|
@ -7,6 +7,7 @@ import unittest
|
||||
|
||||
from onefuzztypes.models import Scaleset, SecretData, TeamsTemplate
|
||||
from onefuzztypes.requests import NotificationCreate
|
||||
from onefuzztypes.primitives import PoolName, Region
|
||||
from pydantic import ValidationError
|
||||
|
||||
|
||||
@ -53,29 +54,29 @@ class TestScaleset(unittest.TestCase):
|
||||
def test_scaleset_size(self) -> None:
|
||||
with self.assertRaises(ValueError):
|
||||
Scaleset(
|
||||
pool_name="test_pool",
|
||||
pool_name=PoolName("test-pool"),
|
||||
vm_sku="Standard_D2ds_v4",
|
||||
image="Canonical:UbuntuServer:18.04-LTS:latest",
|
||||
region="westus2",
|
||||
region=Region("westus2"),
|
||||
size=-1,
|
||||
spot_instances=False,
|
||||
)
|
||||
|
||||
scaleset = Scaleset(
|
||||
pool_name="test_pool",
|
||||
pool_name=PoolName("test-pool"),
|
||||
vm_sku="Standard_D2ds_v4",
|
||||
image="Canonical:UbuntuServer:18.04-LTS:latest",
|
||||
region="westus2",
|
||||
region=Region("westus2"),
|
||||
size=0,
|
||||
spot_instances=False,
|
||||
)
|
||||
self.assertEqual(scaleset.size, 0)
|
||||
|
||||
scaleset = Scaleset(
|
||||
pool_name="test_pool",
|
||||
pool_name=PoolName("test-pool"),
|
||||
vm_sku="Standard_D2ds_v4",
|
||||
image="Canonical:UbuntuServer:18.04-LTS:latest",
|
||||
region="westus2",
|
||||
region=Region("westus2"),
|
||||
size=80,
|
||||
spot_instances=False,
|
||||
)
|
||||
|
Reference in New Issue
Block a user