address formatting from black 21.4b0 (#831)

This commit is contained in:
bmc-msft 2021-04-26 12:35:16 -04:00 committed by GitHub
parent b5db1bd3fe
commit cf3d904940
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 128 additions and 128 deletions

View File

@ -95,7 +95,7 @@ def process_state_update(obj: HasState) -> None:
def process_state_updates(obj: HasState, max_updates: int = 5) -> None:
""" process through the state machine for an object """
"""process through the state machine for an object"""
for _ in range(max_updates):
state = obj.state

View File

@ -40,7 +40,7 @@ def get_auth_token(request: func.HttpRequest) -> Optional[str]:
def parse_jwt_token(request: func.HttpRequest) -> Result[UserInfo]:
""" Obtains the Access Token from the Authorization Header """
"""Obtains the Access Token from the Authorization Header"""
token_str = get_auth_token(request)
if token_str is None:
return Error(

View File

@ -366,7 +366,7 @@ class Node(BASE_NODE, ORMMixin):
self.save()
def set_halt(self) -> None:
""" Tell the node to stop everything. """
"""Tell the node to stop everything."""
self.set_shutdown()
self.stop()
self.set_state(NodeState.halt)

View File

@ -185,7 +185,7 @@ class Pool(BASE_POOL, ORMMixin):
self.save()
def shutdown(self) -> None:
""" shutdown allows nodes to finish current work then delete """
"""shutdown allows nodes to finish current work then delete"""
from .nodes import Node
from .scalesets import Scaleset
@ -207,7 +207,7 @@ class Pool(BASE_POOL, ORMMixin):
self.save()
def halt(self) -> None:
""" halt the pool immediately """
"""halt the pool immediately"""
from .nodes import Node
from .scalesets import Scaleset

View File

@ -22,7 +22,7 @@ SANITIZERS = ["address", "dataflow", "memory", "undefined"]
class Ossfuzz(Command):
def build(self, project: str, sanitizer: str) -> None:
""" Build the latest oss-fuzz target """
"""Build the latest oss-fuzz target"""
self.logger.info("building %s:%s", project, sanitizer)
cmd = [
"docker",
@ -47,7 +47,7 @@ class Ossfuzz(Command):
sanitizers: Optional[List[str]] = None,
notification_config: Optional[NotificationConfig] = None,
) -> None:
""" Build & Launch all of the libFuzzer targets for a given project """
"""Build & Launch all of the libFuzzer targets for a given project"""
if sanitizers is None:
sanitizers = SANITIZERS

View File

@ -165,7 +165,7 @@ class Endpoint:
class Files(Endpoint):
""" Interact with files within a container """
"""Interact with files within a container"""
endpoint = "files"
@ -177,19 +177,19 @@ class Files(Endpoint):
def list(
self, container: primitives.Container, prefix: Optional[str] = None
) -> models.Files:
""" Get a list of files in a container """
"""Get a list of files in a container"""
self.logger.debug("listing files in container: %s", container)
client = self._get_client(container)
return models.Files(files=client.list_blobs(name_starts_with=prefix))
def delete(self, container: primitives.Container, filename: str) -> None:
""" delete a file from a container """
"""delete a file from a container"""
self.logger.debug("deleting in container: %s:%s", container, filename)
client = self._get_client(container)
client.delete_blob(filename)
def get(self, container: primitives.Container, filename: str) -> bytes:
""" get a file from a container """
"""get a file from a container"""
self.logger.debug("getting file from container: %s:%s", container, filename)
client = self._get_client(container)
downloaded = client.download_blob(filename)
@ -201,7 +201,7 @@ class Files(Endpoint):
file_path: str,
blob_name: Optional[str] = None,
) -> None:
""" uploads a file to a container """
"""uploads a file to a container"""
if not blob_name:
# Default blob name to file basename. This means that the file data will be
# written to the "root" of the container, if simulating a directory tree.
@ -220,7 +220,7 @@ class Files(Endpoint):
def upload_dir(
self, container: primitives.Container, dir_path: primitives.Directory
) -> None:
""" uploads a directory to a container """
"""uploads a directory to a container"""
self.logger.debug("uploading directory to container %s:%s", container, dir_path)
@ -230,7 +230,7 @@ class Files(Endpoint):
def download_dir(
self, container: primitives.Container, dir_path: primitives.Directory
) -> None:
""" downloads a container to a directory """
"""downloads a container to a directory"""
self.logger.debug(
"downloading container to directory %s:%s", container, dir_path
@ -241,10 +241,10 @@ class Files(Endpoint):
class Versions(Endpoint):
""" Onefuzz Instance """
"""Onefuzz Instance"""
def check(self, exact: bool = False) -> str:
""" Compare API and CLI versions for compatibility """
"""Compare API and CLI versions for compatibility"""
versions = self.onefuzz.info.get().versions
api_str = versions["onefuzz"].version
cli_str = __version__
@ -279,23 +279,23 @@ class Versions(Endpoint):
class Info(Endpoint):
""" Information about the OneFuzz instance """
"""Information about the OneFuzz instance"""
endpoint = "info"
def get(self) -> responses.Info:
""" Get information about the OneFuzz instance """
"""Get information about the OneFuzz instance"""
self.logger.debug("getting info")
return self._req_model("GET", responses.Info)
class Webhooks(Endpoint):
""" Interact with Webhooks """
"""Interact with Webhooks"""
endpoint = "webhooks"
def get(self, webhook_id: UUID_EXPANSION) -> webhooks.Webhook:
""" get a webhook """
"""get a webhook"""
webhook_id_expanded = self._disambiguate_uuid(
"webhook_id", webhook_id, lambda: [str(x.webhook_id) for x in self.list()]
@ -309,7 +309,7 @@ class Webhooks(Endpoint):
)
def list(self) -> List[webhooks.Webhook]:
""" list webhooks """
"""list webhooks"""
self.logger.debug("listing webhooks")
return self._req_model_list(
@ -326,7 +326,7 @@ class Webhooks(Endpoint):
*,
secret_token: Optional[str] = None,
) -> webhooks.Webhook:
""" Create a webhook """
"""Create a webhook"""
self.logger.debug("creating webhook. name: %s", name)
return self._req_model(
"POST",
@ -345,7 +345,7 @@ class Webhooks(Endpoint):
event_types: Optional[List[events.EventType]] = None,
secret_token: Optional[str] = None,
) -> webhooks.Webhook:
""" Update a webhook """
"""Update a webhook"""
webhook_id_expanded = self._disambiguate_uuid(
"webhook_id", webhook_id, lambda: [str(x.webhook_id) for x in self.list()]
@ -365,7 +365,7 @@ class Webhooks(Endpoint):
)
def delete(self, webhook_id: UUID_EXPANSION) -> responses.BoolResult:
""" Delete a webhook """
"""Delete a webhook"""
webhook_id_expanded = self._disambiguate_uuid(
"webhook_id", webhook_id, lambda: [str(x.webhook_id) for x in self.list()]
@ -378,7 +378,7 @@ class Webhooks(Endpoint):
)
def ping(self, webhook_id: UUID_EXPANSION) -> events.EventPing:
""" ping a webhook """
"""ping a webhook"""
webhook_id_expanded = self._disambiguate_uuid(
"webhook_id", webhook_id, lambda: [str(x.webhook_id) for x in self.list()]
@ -393,7 +393,7 @@ class Webhooks(Endpoint):
)
def logs(self, webhook_id: UUID_EXPANSION) -> List[webhooks.WebhookMessageLog]:
""" retreive webhook event log """
"""retreive webhook event log"""
webhook_id_expanded = self._disambiguate_uuid(
"webhook_id", webhook_id, lambda: [str(x.webhook_id) for x in self.list()]
@ -409,7 +409,7 @@ class Webhooks(Endpoint):
class Containers(Endpoint):
""" Interact with Onefuzz containers """
"""Interact with Onefuzz containers"""
endpoint = "containers"
@ -418,7 +418,7 @@ class Containers(Endpoint):
self.files = Files(onefuzz)
def get(self, name: str) -> responses.ContainerInfo:
""" Get a fully qualified SAS URL for a container """
"""Get a fully qualified SAS URL for a container"""
self.logger.debug("get container: %s", name)
return self._req_model(
"GET", responses.ContainerInfo, data=requests.ContainerGet(name=name)
@ -427,7 +427,7 @@ class Containers(Endpoint):
def create(
self, name: str, metadata: Optional[Dict[str, str]] = None
) -> responses.ContainerInfo:
""" Create a storage container """
"""Create a storage container"""
self.logger.debug("create container: %s", name)
return self._req_model(
"POST",
@ -436,14 +436,14 @@ class Containers(Endpoint):
)
def delete(self, name: str) -> responses.BoolResult:
""" Delete a storage container """
"""Delete a storage container"""
self.logger.debug("delete container: %s", name)
return self._req_model(
"DELETE", responses.BoolResult, data=requests.ContainerDelete(name=name)
)
def list(self) -> List[responses.ContainerInfoBase]:
""" Get a list of containers """
"""Get a list of containers"""
self.logger.debug("list containers")
return self._req_model_list("GET", responses.ContainerInfoBase)
@ -480,12 +480,12 @@ class Containers(Endpoint):
class Repro(Endpoint):
""" Interact with Reproduction VMs """
"""Interact with Reproduction VMs"""
endpoint = "repro_vms"
def get(self, vm_id: UUID_EXPANSION) -> models.Repro:
""" get information about a Reproduction VM """
"""get information about a Reproduction VM"""
vm_id_expanded = self._disambiguate_uuid(
"vm_id", vm_id, lambda: [str(x.vm_id) for x in self.list()]
)
@ -498,7 +498,7 @@ class Repro(Endpoint):
def create(
self, container: primitives.Container, path: str, duration: int = 24
) -> models.Repro:
""" Create a Reproduction VM from a Crash Report """
"""Create a Reproduction VM from a Crash Report"""
self.logger.info(
"creating repro vm: %s %s (%d hours)", container, path, duration
)
@ -509,7 +509,7 @@ class Repro(Endpoint):
)
def delete(self, vm_id: UUID_EXPANSION) -> models.Repro:
""" Delete a Reproduction VM """
"""Delete a Reproduction VM"""
vm_id_expanded = self._disambiguate_uuid(
"vm_id", vm_id, lambda: [str(x.vm_id) for x in self.list()]
)
@ -520,14 +520,14 @@ class Repro(Endpoint):
)
def list(self) -> List[models.Repro]:
""" List all VMs """
"""List all VMs"""
self.logger.debug("listing repro vms")
return self._req_model_list("GET", models.Repro, data=requests.ReproGet())
def _dbg_linux(
self, repro: models.Repro, debug_command: Optional[str]
) -> Optional[str]:
""" Launch gdb with GDB script that includes 'target remote | ssh ...' """
"""Launch gdb with GDB script that includes 'target remote | ssh ...'"""
if (
repro.auth is None
@ -574,7 +574,7 @@ class Repro(Endpoint):
def _dbg_windows(
self, repro: models.Repro, debug_command: Optional[str]
) -> Optional[str]:
""" Setup an SSH tunnel, then connect via CDB over SSH tunnel """
"""Setup an SSH tunnel, then connect via CDB over SSH tunnel"""
if (
repro.auth is None
@ -618,7 +618,7 @@ class Repro(Endpoint):
delete_after_use: bool = False,
debug_command: Optional[str] = None,
) -> Optional[str]:
""" Connect to an existing Reproduction VM """
"""Connect to an existing Reproduction VM"""
self.logger.info("connecting to reproduction VM: %s", vm_id)
@ -678,7 +678,7 @@ class Repro(Endpoint):
delete_after_use: bool = False,
debug_command: Optional[str] = None,
) -> Optional[str]:
""" Create and connect to a Reproduction VM """
"""Create and connect to a Reproduction VM"""
repro = self.create(container, path, duration=duration)
return self.connect(
repro.vm_id, delete_after_use=delete_after_use, debug_command=debug_command
@ -686,14 +686,14 @@ class Repro(Endpoint):
class Notifications(Endpoint):
""" Interact with models.Notifications """
"""Interact with models.Notifications"""
endpoint = "notifications"
def create(
self, container: primitives.Container, config: models.NotificationConfig
) -> models.Notification:
""" Create a notification based on a config file """
"""Create a notification based on a config file"""
config = requests.NotificationCreate(container=container, config=config.config)
return self._req_model("POST", models.Notification, data=config)
@ -701,7 +701,7 @@ class Notifications(Endpoint):
def create_teams(
self, container: primitives.Container, url: str
) -> models.Notification:
""" Create a Teams notification integration """
"""Create a Teams notification integration"""
self.logger.debug("create teams notification integration: %s", container)
@ -723,7 +723,7 @@ class Notifications(Endpoint):
on_dup_set_state: Optional[Dict[str, str]] = None,
on_dup_fields: Optional[Dict[str, str]] = None,
) -> models.Notification:
""" Create an Azure DevOps notification integration """
"""Create an Azure DevOps notification integration"""
self.logger.debug("create ado notification integration: %s", container)
@ -747,7 +747,7 @@ class Notifications(Endpoint):
return self.create(container, entry)
def delete(self, notification_id: UUID_EXPANSION) -> models.Notification:
""" Delete a notification integration """
"""Delete a notification integration"""
notification_id_expanded = self._disambiguate_uuid(
"notification_id",
@ -766,19 +766,19 @@ class Notifications(Endpoint):
)
def list(self) -> List[models.Notification]:
""" List notification integrations """
"""List notification integrations"""
self.logger.debug("listing notification integrations")
return self._req_model_list("GET", models.Notification)
class Tasks(Endpoint):
""" Interact with tasks """
"""Interact with tasks"""
endpoint = "tasks"
def delete(self, task_id: UUID_EXPANSION) -> models.Task:
""" Stop an individual task """
"""Stop an individual task"""
task_id_expanded = self._disambiguate_uuid(
"task_id", task_id, lambda: [str(x.task_id) for x in self.list()]
@ -791,7 +791,7 @@ class Tasks(Endpoint):
)
def get(self, task_id: UUID_EXPANSION) -> models.Task:
""" Get information about a task """
"""Get information about a task"""
task_id_expanded = self._disambiguate_uuid(
"task_id", task_id, lambda: [str(x.task_id) for x in self.list()]
)
@ -803,7 +803,7 @@ class Tasks(Endpoint):
)
def create_with_config(self, config: models.TaskConfig) -> models.Task:
""" Create a Task using TaskConfig """
"""Create a Task using TaskConfig"""
return self._req_model("POST", models.Task, data=config)
@ -924,7 +924,7 @@ class Tasks(Endpoint):
job_id: Optional[UUID_EXPANSION] = None,
state: Optional[List[enums.TaskState]] = enums.TaskState.available(),
) -> List[models.Task]:
""" Get information about all tasks """
"""Get information about all tasks"""
self.logger.debug("list tasks")
job_id_expanded: Optional[UUID] = None
@ -943,7 +943,7 @@ class Tasks(Endpoint):
class JobContainers(Endpoint):
""" Interact with Containers used within tasks in a Job """
"""Interact with Containers used within tasks in a Job"""
endpoint = "jobs"
@ -972,17 +972,17 @@ class JobContainers(Endpoint):
class JobTasks(Endpoint):
""" Interact with tasks within a job """
"""Interact with tasks within a job"""
endpoint = "jobs"
def list(self, job_id: UUID_EXPANSION) -> List[models.Task]:
""" List all of the tasks for a given job """
"""List all of the tasks for a given job"""
return self.onefuzz.tasks.list(job_id=job_id, state=[])
class Jobs(Endpoint):
""" Interact with Jobs """
"""Interact with Jobs"""
endpoint = "jobs"
@ -993,7 +993,7 @@ class Jobs(Endpoint):
def delete(self, job_id: UUID_EXPANSION) -> models.Job:
""" Stop a job and all tasks that make up a job """
"""Stop a job and all tasks that make up a job"""
job_id_expanded = self._disambiguate_uuid(
"job_id", job_id, lambda: [str(x.job_id) for x in self.list()]
)
@ -1004,7 +1004,7 @@ class Jobs(Endpoint):
)
def get(self, job_id: UUID_EXPANSION) -> models.Job:
""" Get information about a specific job """
"""Get information about a specific job"""
job_id_expanded = self._disambiguate_uuid(
"job_id", job_id, lambda: [str(x.job_id) for x in self.list()]
)
@ -1015,7 +1015,7 @@ class Jobs(Endpoint):
return job
def create_with_config(self, config: models.JobConfig) -> models.Job:
""" Create a job """
"""Create a job"""
self.logger.debug(
"create job: project:%s name:%s build:%s",
config.project,
@ -1031,7 +1031,7 @@ class Jobs(Endpoint):
def create(
self, project: str, name: str, build: str, duration: int = 24
) -> models.Job:
""" Create a job """
"""Create a job"""
return self.create_with_config(
models.JobConfig(project=project, name=name, build=build, duration=duration)
)
@ -1040,7 +1040,7 @@ class Jobs(Endpoint):
self,
job_state: Optional[List[enums.JobState]] = enums.JobState.available(),
) -> List[models.Job]:
""" Get information about all jobs """
"""Get information about all jobs"""
self.logger.debug("list jobs")
return self._req_model_list(
"GET", models.Job, data=requests.JobSearch(state=job_state)
@ -1048,7 +1048,7 @@ class Jobs(Endpoint):
class Pool(Endpoint):
""" Interact with worker pools """
"""Interact with worker pools"""
endpoint = "pool"
@ -1078,7 +1078,7 @@ class Pool(Endpoint):
)
def get_config(self, pool_name: primitives.PoolName) -> models.AgentConfig:
""" Get the agent configuration for the pool """
"""Get the agent configuration for the pool"""
pool = self.get(pool_name)
@ -1125,7 +1125,7 @@ class Pool(Endpoint):
class Node(Endpoint):
""" Interact with nodes """
"""Interact with nodes"""
endpoint = "node"
@ -1248,7 +1248,7 @@ class Node(Endpoint):
class Scaleset(Endpoint):
""" Interact with managed scaleset pools """
"""Interact with managed scaleset pools"""
endpoint = "scaleset"
@ -1373,7 +1373,7 @@ class Scaleset(Endpoint):
class ScalesetProxy(Endpoint):
""" Interact with Scaleset Proxies (NOTE: This API is unstable) """
"""Interact with Scaleset Proxies (NOTE: This API is unstable)"""
endpoint = "proxy"
@ -1384,7 +1384,7 @@ class ScalesetProxy(Endpoint):
*,
dst_port: Optional[int] = None,
) -> responses.BoolResult:
""" Stop a proxy node """
"""Stop a proxy node"""
(
scaleset,
@ -1408,7 +1408,7 @@ class ScalesetProxy(Endpoint):
)
def reset(self, region: primitives.Region) -> responses.BoolResult:
""" Reset the proxy for an existing region """
"""Reset the proxy for an existing region"""
return self._req_model(
"PATCH", responses.BoolResult, data=requests.ProxyReset(region=region)
@ -1417,7 +1417,7 @@ class ScalesetProxy(Endpoint):
def get(
self, scaleset_id: UUID_EXPANSION, machine_id: UUID_EXPANSION, dst_port: int
) -> responses.ProxyGetResult:
""" Get information about a specific job """
"""Get information about a specific job"""
(
scaleset,
machine_id_expanded,
@ -1445,7 +1445,7 @@ class ScalesetProxy(Endpoint):
*,
duration: Optional[int] = 1,
) -> responses.ProxyGetResult:
""" Create a proxy """
"""Create a proxy"""
(
scaleset,
machine_id_expanded,
@ -1533,27 +1533,27 @@ class Onefuzz:
self.job_templates._load_cache()
def licenses(self) -> object:
""" Return third-party licenses used by this package """
"""Return third-party licenses used by this package"""
data = pkgutil.get_data("onefuzz", "data/licenses.json")
if data is None:
raise Exception("missing licenses.json")
return json.loads(data)
def privacy_statement(self) -> bytes:
""" Return OneFuzz privacy statement """
"""Return OneFuzz privacy statement"""
data = pkgutil.get_data("onefuzz", "data/privacy.txt")
if data is None:
raise Exception("missing licenses.json")
return data
def logout(self) -> None:
""" Logout of Onefuzz """
"""Logout of Onefuzz"""
self.logger.debug("logout")
self._backend.logout()
def login(self) -> str:
""" Login to Onefuzz """
"""Login to Onefuzz"""
# Rather than interacting MSAL directly, call a simple API which
# actuates the login process
@ -1573,7 +1573,7 @@ class Onefuzz:
enable_feature: Optional[PreviewFeature] = None,
tenant_domain: Optional[str] = None,
) -> BackendConfig:
""" Configure onefuzz CLI """
"""Configure onefuzz CLI"""
self.logger.debug("set config")
if endpoint is not None:

View File

@ -4,7 +4,7 @@ import subprocess # nosec
def azcopy_sync(src: str, dst: str) -> None:
""" Expose azcopy for uploading/downloading files """
"""Expose azcopy for uploading/downloading files"""
azcopy = os.environ.get("AZCOPY") or shutil.which("azcopy")
if not azcopy:

View File

@ -204,7 +204,7 @@ class Builder:
def parse_param(
self, name: str, param: inspect.Parameter, help_doc: Optional[str] = None
) -> Tuple[List[str], Dict[str, Any]]:
""" Parse a single parameter """
"""Parse a single parameter"""
default = param.default
annotation = param.annotation
@ -416,7 +416,7 @@ class Builder:
def parse_instance(
self, inst: Callable, subparser: argparse._SubParsersAction
) -> None:
""" Expose every non-private callable in a class instance """
"""Expose every non-private callable in a class instance"""
for (name, func) in self.get_children(inst, is_callable=True):
sub = subparser.add_parser(name, help=self.get_help(func))
add_base(sub)

View File

@ -35,7 +35,7 @@ DEFAULT_TAIL_DELAY = 10.0
class DebugRepro(Command):
""" Debug repro instances """
"""Debug repro instances"""
def _disambiguate(self, vm_id: UUID_EXPANSION) -> str:
return str(
@ -75,7 +75,7 @@ class DebugRepro(Command):
class DebugNode(Command):
""" Debug a specific node on a scaleset """
"""Debug a specific node on a scaleset"""
def rdp(self, machine_id: UUID_EXPANSION, duration: Optional[int] = 1) -> None:
node = self.onefuzz.nodes.get(machine_id)
@ -95,7 +95,7 @@ class DebugNode(Command):
class DebugScaleset(Command):
""" Debug tasks """
"""Debug tasks"""
def _get_proxy_setup(
self, scaleset_id: UUID, machine_id: UUID, port: int, duration: Optional[int]
@ -172,7 +172,7 @@ class DebugScaleset(Command):
class DebugTask(Command):
""" Debug a specific job """
"""Debug a specific job"""
def list_nodes(self, task_id: UUID_EXPANSION) -> Optional[List[NodeAssignment]]:
task = self.onefuzz.tasks.get(task_id)
@ -255,7 +255,7 @@ class DebugTask(Command):
class DebugJobTask(Command):
""" Debug a task for a specific job """
"""Debug a task for a specific job"""
def _get_task(self, job_id: UUID_EXPANSION, task_type: TaskType) -> UUID:
for task in self.onefuzz.tasks.list(job_id=job_id):
@ -273,7 +273,7 @@ class DebugJobTask(Command):
*,
duration: Optional[int] = 1,
) -> None:
""" SSH into the first node running the specified task type in the job """
"""SSH into the first node running the specified task type in the job"""
return self.onefuzz.debug.task.ssh(
self._get_task(job_id, task_type), duration=duration
)
@ -285,14 +285,14 @@ class DebugJobTask(Command):
*,
duration: Optional[int] = 1,
) -> None:
""" RDP into the first node running the specified task type in the job """
"""RDP into the first node running the specified task type in the job"""
return self.onefuzz.debug.task.rdp(
self._get_task(job_id, task_type), duration=duration
)
class DebugJob(Command):
""" Debug a specific Job """
"""Debug a specific Job"""
def __init__(self, onefuzz: Any, logger: logging.Logger):
super().__init__(onefuzz, logger)
@ -335,7 +335,7 @@ class DebugJob(Command):
)
def download_files(self, job_id: UUID_EXPANSION, output: Directory) -> None:
""" Download the containers by container type for each task in the specified job """
"""Download the containers by container type for each task in the specified job"""
to_download = {}
tasks = self.onefuzz.tasks.list(job_id=job_id, state=None)
@ -570,7 +570,7 @@ class DebugLog(Command):
class DebugNotification(Command):
""" Debug notification integrations """
"""Debug notification integrations"""
def _get_container(
self, task: Task, container_type: ContainerType
@ -592,7 +592,7 @@ class DebugNotification(Command):
report_container_type: ContainerType = ContainerType.unique_reports,
crash_name: str = "fake-crash-sample",
) -> None:
""" Inject a report into the first crash reporting task in the specified job """
"""Inject a report into the first crash reporting task in the specified job"""
tasks = self.onefuzz.tasks.list(job_id=job_id, state=[])
for task in tasks:
@ -616,7 +616,7 @@ class DebugNotification(Command):
report_container_type: ContainerType = ContainerType.unique_reports,
crash_name: str = "fake-crash-sample",
) -> None:
""" Inject a report into the specified crash reporting task """
"""Inject a report into the specified crash reporting task"""
task = self.onefuzz.tasks.get(task_id)
crashes = self._get_container(task, ContainerType.crashes)
@ -664,7 +664,7 @@ class DebugNotification(Command):
class Debug(Command):
""" Debug running jobs """
"""Debug running jobs"""
def __init__(self, onefuzz: Any, logger: logging.Logger):
super().__init__(onefuzz, logger)

View File

@ -20,14 +20,14 @@ def container_type_name(container_type: ContainerType) -> str:
class TemplateSubmitHandler(Endpoint):
""" Submit Job Template """
"""Submit Job Template"""
_endpoint = "job_templates"
def _process_containers(
self, request: JobTemplateRequest, args: Dict[str, Any]
) -> None:
""" Create containers based on the argparse args """
"""Create containers based on the argparse args"""
for container in request.containers:
directory_arg = container_type_name(container.type)
@ -106,7 +106,7 @@ class TemplateSubmitHandler(Endpoint):
def _convert_container_args(
self, config: JobTemplateConfig, args: Dict[str, Any]
) -> List[TaskContainers]:
""" Convert the job template into a list of containers """
"""Convert the job template into a list of containers"""
containers = []
container_names = args["container_names"]
@ -124,7 +124,7 @@ class TemplateSubmitHandler(Endpoint):
def _convert_args(
self, config: JobTemplateConfig, args: Dict[str, Any]
) -> JobTemplateRequest:
""" convert arguments from argparse into a JobTemplateRequest """
"""convert arguments from argparse into a JobTemplateRequest"""
user_fields = {}
for field in config.user_fields:
@ -156,7 +156,7 @@ class TemplateSubmitHandler(Endpoint):
*,
wait_for_running: bool,
) -> Job:
""" Convert argparse args into a JobTemplateRequest and submit it """
"""Convert argparse args into a JobTemplateRequest and submit it"""
self.onefuzz.logger.debug("building: %s", config.name)
request = self._convert_args(config, args)
job = self._execute_request(config, request, args)

View File

@ -34,7 +34,7 @@ def load_templates(templates: List[JobTemplateConfig]) -> None:
class JobTemplates(Endpoint):
""" Job Templates """
"""Job Templates"""
endpoint = "job_templates"
@ -44,7 +44,7 @@ class JobTemplates(Endpoint):
self.submit = TemplateSubmitHandler(onefuzz)
def info(self, name: str) -> Optional[JobTemplateConfig]:
""" Display information for a Job Template """
"""Display information for a Job Template"""
self.onefuzz._warn_preview(PreviewFeature.job_templates)
endpoint = self.onefuzz._backend.config.endpoint
@ -62,7 +62,7 @@ class JobTemplates(Endpoint):
return None
def list(self) -> Optional[List[str]]:
""" List available Job Templates """
"""List available Job Templates"""
self.onefuzz._warn_preview(PreviewFeature.job_templates)
@ -90,7 +90,7 @@ class JobTemplates(Endpoint):
load_templates(entry.configs)
def refresh(self) -> None:
""" Update available templates """
"""Update available templates"""
self.onefuzz._warn_preview(PreviewFeature.job_templates)
self.onefuzz.logger.info("refreshing job template cache")

View File

@ -18,12 +18,12 @@ from ..api import Endpoint, PreviewFeature
class Manage(Endpoint):
""" Manage Job Templates """
"""Manage Job Templates"""
endpoint = "job_templates/manage"
def list(self) -> List[JobTemplateIndex]:
""" List templates """
"""List templates"""
self.onefuzz._warn_preview(PreviewFeature.job_templates)
self.onefuzz.logger.debug("listing job templates")
@ -32,14 +32,14 @@ class Manage(Endpoint):
)
def get(self, name: str) -> JobTemplate:
""" Get an existing Job Template """
"""Get an existing Job Template"""
self.onefuzz._warn_preview(PreviewFeature.job_templates)
self.onefuzz.logger.debug("get job template")
return self._req_model("GET", JobTemplate, data=JobTemplateGet(name=name))
def upload(self, name: str, template: JobTemplate) -> BoolResult:
""" Upload a Job Template """
"""Upload a Job Template"""
self.onefuzz._warn_preview(PreviewFeature.job_templates)
self.onefuzz.logger.debug("upload job template")
@ -50,7 +50,7 @@ class Manage(Endpoint):
)
def delete(self, name: str) -> BoolResult:
""" Delete a Job Template """
"""Delete a Job Template"""
self.onefuzz._warn_preview(PreviewFeature.job_templates)
self.onefuzz.logger.debug("delete job templates")

View File

@ -21,7 +21,7 @@ def short(value: UUID) -> str:
class Status(Command):
""" Monitor status of Onefuzz Instance """
"""Monitor status of Onefuzz Instance"""
def project(
self,
@ -107,7 +107,7 @@ class Status(Command):
)
def raw(self) -> None:
""" Raw status update stream """
"""Raw status update stream"""
raw(self.onefuzz, self.logger)
def top(
@ -118,7 +118,7 @@ class Status(Command):
project: Optional[List[str]] = None,
name: Optional[List[str]] = None,
) -> None:
""" Onefuzz Top """
"""Onefuzz Top"""
job_filter = JobFilter(job_id=job_id, project=project, name=name)
top = Top(self.onefuzz, self.logger, show_details, job_filter)
top.run()

View File

@ -145,7 +145,7 @@ class TopView(Frame):
return [(x, y) for (y, x) in enumerate(data)]
def auto_resize(self, name: str) -> None:
""" recompute widget width based on max length of all of the values """
"""recompute widget width based on max length of all of the values"""
widget = self.find_widget(name)
for column in range(len(widget._columns) - 1):

View File

@ -17,7 +17,7 @@ from .templates.regression import Regression
class Template(Command):
""" Pre-defined job templates """
"""Pre-defined job templates"""
def __init__(self, onefuzz: Onefuzz, logger: logging.Logger) -> None:
super().__init__(onefuzz, logger)

View File

@ -15,7 +15,7 @@ from . import JobHelper
class AFL(Command):
""" Pre-defined AFL job """
"""Pre-defined AFL job"""
def basic(
self,

View File

@ -24,10 +24,10 @@ class QemuArch(Enum):
class Libfuzzer(Command):
""" Pre-defined Libfuzzer job """
"""Pre-defined Libfuzzer job"""
def _check_is_libfuzzer(self, target_exe: File) -> None:
""" Look for a magic string """
"""Look for a magic string"""
self.logger.debug(
"checking %s for %s", repr(target_exe), repr(LIBFUZZER_MAGIC_STRING)
)

View File

@ -23,7 +23,7 @@ VM_COUNT = 1
class OssFuzz(Command):
""" OssFuzz style jobs """
"""OssFuzz style jobs"""
def _containers(self, project: str, build: str, platform: OS) -> Dict[str, str]:
guid = self.onefuzz.utils.namespaced_guid(

View File

@ -15,7 +15,7 @@ from . import JobHelper
class Radamsa(Command):
""" Pre-defined Radamsa job """
"""Pre-defined Radamsa job"""
def basic(
self,

View File

@ -16,7 +16,7 @@ from . import JobHelper
class Regression(Command):
""" Regression job """
"""Regression job"""
def _check_regression(self, container: Container, file: File) -> bool:
content = self.onefuzz.containers.files.get(Container(container), file)

View File

@ -150,7 +150,7 @@ def register_application(
def create_application_credential(application_name: str, subscription_id: str) -> str:
""" Add a new password to the application registration """
"""Add a new password to the application registration"""
logger.info("creating application credential for '%s'" % application_name)
client = get_graph_client(subscription_id)
@ -167,7 +167,7 @@ def create_application_credential(application_name: str, subscription_id: str) -
def create_application_registration(
onefuzz_instance_name: str, name: str, approle: OnefuzzAppRole, subscription_id: str
) -> Application:
""" Create an application registration """
"""Create an application registration"""
client = get_graph_client(subscription_id)
apps: List[Application] = list(

View File

@ -230,7 +230,7 @@ class TestOnefuzz:
def launch(
self, path: Directory, *, os_list: List[OS], targets: List[str], duration=int
) -> None:
""" Launch all of the fuzzing templates """
"""Launch all of the fuzzing templates"""
for target, config in TARGETS.items():
if target not in targets:
continue
@ -364,7 +364,7 @@ class TestOnefuzz:
def check_jobs(
self, poll: bool = False, stop_on_complete_check: bool = False
) -> bool:
""" Check all of the integration jobs """
"""Check all of the integration jobs"""
jobs: Dict[UUID, Job] = {x.job_id: x for x in self.get_jobs()}
job_tasks: Dict[UUID, List[Task]] = {}
check_containers: Dict[UUID, Dict[Container, Tuple[ContainerWrapper, int]]] = {}
@ -647,7 +647,7 @@ class TestOnefuzz:
return pools
def cleanup(self) -> None:
""" cleanup all of the integration pools & jobs """
"""cleanup all of the integration pools & jobs"""
self.logger.info("cleaning up")
errors: List[Exception] = []

View File

@ -30,7 +30,7 @@ class TelemetryEvent(Enum):
@classmethod
def can_share(cls) -> List["TelemetryEvent"]:
""" only these events will be shared to the central telemetry """
"""only these events will be shared to the central telemetry"""
return [cls.task, cls.state_changed]
@ -44,7 +44,7 @@ class TelemetryData(Enum):
@classmethod
def can_share(cls) -> List["TelemetryData"]:
""" only these types of data will be shared to the central telemetry """
"""only these types of data will be shared to the central telemetry"""
return [cls.current_state, cls.vm_id, cls.job_id, cls.task_id, cls.task_type]
@ -100,7 +100,7 @@ class JobState(Enum):
@classmethod
def available(cls) -> List["JobState"]:
""" set of states that indicate if tasks can be added to it """
"""set of states that indicate if tasks can be added to it"""
return [x for x in cls if x not in [cls.stopping, cls.stopped]]
@classmethod
@ -138,7 +138,7 @@ class TaskState(Enum):
@classmethod
def available(cls) -> List["TaskState"]:
""" set of states that indicate if the task isn't stopping """
"""set of states that indicate if the task isn't stopping"""
return [x for x in cls if x not in [TaskState.stopping, TaskState.stopped]]
@classmethod
@ -178,7 +178,7 @@ class VmState(Enum):
@classmethod
def available(cls) -> List["VmState"]:
""" set of states that indicate if the repro vm isn't stopping """
"""set of states that indicate if the repro vm isn't stopping"""
return [x for x in cls if x not in [cls.stopping, cls.stopped]]
@ -289,7 +289,7 @@ class PoolState(Enum):
@classmethod
def available(cls) -> List["PoolState"]:
""" set of states that indicate if it's available for work """
"""set of states that indicate if it's available for work"""
return [cls.running]
@ -311,13 +311,13 @@ class ScalesetState(Enum):
@classmethod
def available(cls) -> List["ScalesetState"]:
""" set of states that indicate if it's available for work """
"""set of states that indicate if it's available for work"""
unavailable = [cls.shutdown, cls.halt, cls.creation_failed]
return [x for x in cls if x not in unavailable]
@classmethod
def modifying(cls) -> List["ScalesetState"]:
""" set of states that indicate scaleset is resizing """
"""set of states that indicate scaleset is resizing"""
return [
cls.halt,
cls.init,