mirror of
https://github.com/microsoft/onefuzz.git
synced 2025-06-17 12:28:07 +00:00
Fix query to get he existing proxy (#2791)
This commit is contained in:
@ -41,7 +41,7 @@ public class ProxyOperations : StatefulOrm<Proxy, VmState, ProxyOperations>, IPr
|
|||||||
|
|
||||||
public async Async.Task<Proxy> GetOrCreate(Region region) {
|
public async Async.Task<Proxy> GetOrCreate(Region region) {
|
||||||
{
|
{
|
||||||
var proxyList = QueryAsync(filter: TableClient.CreateQueryFilter($"RowKey eq {region.String} and outdated eq false"));
|
var proxyList = QueryAsync(filter: TableClient.CreateQueryFilter($"PartitionKey eq {region.String} and outdated eq false"));
|
||||||
await foreach (var proxy in proxyList) {
|
await foreach (var proxy in proxyList) {
|
||||||
if (IsOutdated(proxy)) {
|
if (IsOutdated(proxy)) {
|
||||||
var r1 = await Replace(proxy with { Outdated = true });
|
var r1 = await Replace(proxy with { Outdated = true });
|
||||||
|
@ -123,7 +123,6 @@ class Endpoint:
|
|||||||
as_params: bool = False,
|
as_params: bool = False,
|
||||||
alternate_endpoint: Optional[str] = None,
|
alternate_endpoint: Optional[str] = None,
|
||||||
) -> A:
|
) -> A:
|
||||||
|
|
||||||
# Retrieve Auth Parameters
|
# Retrieve Auth Parameters
|
||||||
self._req_config_params()
|
self._req_config_params()
|
||||||
|
|
||||||
@ -161,7 +160,6 @@ class Endpoint:
|
|||||||
def _req_config_params(
|
def _req_config_params(
|
||||||
self,
|
self,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
if self.onefuzz._backend.config.endpoint is None:
|
if self.onefuzz._backend.config.endpoint is None:
|
||||||
raise Exception("Endpoint Not Configured")
|
raise Exception("Endpoint Not Configured")
|
||||||
|
|
||||||
@ -529,7 +527,6 @@ class Containers(Endpoint):
|
|||||||
def _download_tasks(
|
def _download_tasks(
|
||||||
self, tasks: List[models.Task], output: Optional[primitives.Directory]
|
self, tasks: List[models.Task], output: Optional[primitives.Directory]
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
to_download: Dict[str, str] = {}
|
to_download: Dict[str, str] = {}
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
for container in task.config.containers:
|
for container in task.config.containers:
|
||||||
@ -611,7 +608,6 @@ class Repro(Endpoint):
|
|||||||
with build_ssh_command(
|
with build_ssh_command(
|
||||||
repro.ip, repro.auth.private_key, command="-T"
|
repro.ip, repro.auth.private_key, command="-T"
|
||||||
) as ssh_cmd:
|
) as ssh_cmd:
|
||||||
|
|
||||||
gdb_script = [
|
gdb_script = [
|
||||||
"target remote | %s sudo /onefuzz/bin/repro-stdout.sh"
|
"target remote | %s sudo /onefuzz/bin/repro-stdout.sh"
|
||||||
% " ".join(ssh_cmd)
|
% " ".join(ssh_cmd)
|
||||||
@ -1006,7 +1002,7 @@ class Tasks(Endpoint):
|
|||||||
tags = {}
|
tags = {}
|
||||||
|
|
||||||
containers_submit = []
|
containers_submit = []
|
||||||
for (container_type, container) in containers:
|
for container_type, container in containers:
|
||||||
containers_submit.append(
|
containers_submit.append(
|
||||||
models.TaskContainers(name=container, type=container_type)
|
models.TaskContainers(name=container, type=container_type)
|
||||||
)
|
)
|
||||||
@ -1208,7 +1204,6 @@ class Jobs(Endpoint):
|
|||||||
self.tasks = JobTasks(onefuzz)
|
self.tasks = JobTasks(onefuzz)
|
||||||
|
|
||||||
def delete(self, job_id: UUID_EXPANSION) -> models.Job:
|
def delete(self, job_id: UUID_EXPANSION) -> models.Job:
|
||||||
|
|
||||||
"""Stop a job and all tasks that make up a job"""
|
"""Stop a job and all tasks that make up a job"""
|
||||||
job_id_expanded = self._disambiguate_uuid(
|
job_id_expanded = self._disambiguate_uuid(
|
||||||
"job_id", job_id, lambda: [str(x.job_id) for x in self.list()]
|
"job_id", job_id, lambda: [str(x.job_id) for x in self.list()]
|
||||||
@ -1834,7 +1829,6 @@ class Onefuzz:
|
|||||||
authority: Optional[str] = None,
|
authority: Optional[str] = None,
|
||||||
tenant_domain: Optional[str] = None,
|
tenant_domain: Optional[str] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
if endpoint:
|
if endpoint:
|
||||||
self._backend.config.endpoint = endpoint
|
self._backend.config.endpoint = endpoint
|
||||||
if authority is not None:
|
if authority is not None:
|
||||||
|
@ -429,7 +429,7 @@ class Builder:
|
|||||||
self, inst: Callable, subparser: argparse._SubParsersAction
|
self, inst: Callable, subparser: argparse._SubParsersAction
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Expose every non-private callable in a class instance"""
|
"""Expose every non-private callable in a class instance"""
|
||||||
for (name, func) in self.get_children(inst, is_callable=True):
|
for name, func in self.get_children(inst, is_callable=True):
|
||||||
sub = subparser.add_parser(name, help=self.get_help(func))
|
sub = subparser.add_parser(name, help=self.get_help(func))
|
||||||
add_base(sub)
|
add_base(sub)
|
||||||
self.parse_function(func, sub)
|
self.parse_function(func, sub)
|
||||||
@ -442,7 +442,7 @@ class Builder:
|
|||||||
title="subcommands", dest="level_%d" % level
|
title="subcommands", dest="level_%d" % level
|
||||||
)
|
)
|
||||||
|
|
||||||
for (name, endpoint) in self.get_children(inst, is_typed=True):
|
for name, endpoint in self.get_children(inst, is_typed=True):
|
||||||
parser = subparser.add_parser(
|
parser = subparser.add_parser(
|
||||||
name, help=self.get_help(endpoint), parents=[self.top_level]
|
name, help=self.get_help(endpoint), parents=[self.top_level]
|
||||||
)
|
)
|
||||||
@ -451,7 +451,7 @@ class Builder:
|
|||||||
title="subcommands", dest="level_%d" % (level + 1)
|
title="subcommands", dest="level_%d" % (level + 1)
|
||||||
)
|
)
|
||||||
|
|
||||||
for (nested_name, nested_endpoint) in self.get_children(
|
for nested_name, nested_endpoint in self.get_children(
|
||||||
endpoint, is_typed=True
|
endpoint, is_typed=True
|
||||||
):
|
):
|
||||||
nested = method_subparser.add_parser(
|
nested = method_subparser.add_parser(
|
||||||
|
@ -462,7 +462,6 @@ class DebugLog(Command):
|
|||||||
if self._app_id is None:
|
if self._app_id is None:
|
||||||
raise Exception("instance does not have an insights_appid")
|
raise Exception("instance does not have an insights_appid")
|
||||||
if self._client is None:
|
if self._client is None:
|
||||||
|
|
||||||
creds = AzureIdentityCredentialAdapter(
|
creds = AzureIdentityCredentialAdapter(
|
||||||
AzureCliCredential(), resource_id="https://api.applicationinsights.io"
|
AzureCliCredential(), resource_id="https://api.applicationinsights.io"
|
||||||
)
|
)
|
||||||
|
@ -61,7 +61,6 @@ class TemplateSubmitHandler(Endpoint):
|
|||||||
def _define_missing_containers(
|
def _define_missing_containers(
|
||||||
self, config: JobTemplateConfig, request: JobTemplateRequest
|
self, config: JobTemplateConfig, request: JobTemplateRequest
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
for container_type in config.containers:
|
for container_type in config.containers:
|
||||||
seen = False
|
seen = False
|
||||||
for container in request.containers:
|
for container in request.containers:
|
||||||
|
@ -47,7 +47,6 @@ def column_config(fields: Optional[List[str]]) -> List[Union[int, str]]:
|
|||||||
|
|
||||||
class TopView(Frame):
|
class TopView(Frame):
|
||||||
def __init__(self, screen: Any, cache: TopCache):
|
def __init__(self, screen: Any, cache: TopCache):
|
||||||
|
|
||||||
super(TopView, self).__init__(
|
super(TopView, self).__init__(
|
||||||
screen, screen.height, screen.width, has_border=True, can_scroll=False
|
screen, screen.height, screen.width, has_border=True, can_scroll=False
|
||||||
)
|
)
|
||||||
@ -144,7 +143,6 @@ class TopView(Frame):
|
|||||||
"""recompute widget width based on max length of all of the values"""
|
"""recompute widget width based on max length of all of the values"""
|
||||||
widget = self.find_widget(name)
|
widget = self.find_widget(name)
|
||||||
for column in range(len(widget._columns) - 1):
|
for column in range(len(widget._columns) - 1):
|
||||||
|
|
||||||
sizes = [len(x[0][column]) + 1 for x in widget.options]
|
sizes = [len(x[0][column]) + 1 for x in widget.options]
|
||||||
if widget._titles:
|
if widget._titles:
|
||||||
sizes.append(len(widget._titles[column]) + 1)
|
sizes.append(len(widget._titles[column]) + 1)
|
||||||
|
@ -97,7 +97,7 @@ class JobHelper:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def create_containers(self) -> None:
|
def create_containers(self) -> None:
|
||||||
for (container_type, container_name) in self.containers.items():
|
for container_type, container_name in self.containers.items():
|
||||||
self.logger.info("using container: %s", container_name)
|
self.logger.info("using container: %s", container_name)
|
||||||
self.onefuzz.containers.create(
|
self.onefuzz.containers.create(
|
||||||
container_name, metadata={"container_type": container_type.name}
|
container_name, metadata={"container_type": container_type.name}
|
||||||
|
@ -466,7 +466,6 @@ class Libfuzzer(Command):
|
|||||||
preserve_existing_outputs: bool = False,
|
preserve_existing_outputs: bool = False,
|
||||||
check_fuzzer_help: bool = True,
|
check_fuzzer_help: bool = True,
|
||||||
) -> Optional[Job]:
|
) -> Optional[Job]:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
libfuzzer merge task
|
libfuzzer merge task
|
||||||
"""
|
"""
|
||||||
@ -585,7 +584,6 @@ class Libfuzzer(Command):
|
|||||||
check_fuzzer_help: bool = True,
|
check_fuzzer_help: bool = True,
|
||||||
expect_crash_on_failure: bool = False,
|
expect_crash_on_failure: bool = False,
|
||||||
) -> Optional[Job]:
|
) -> Optional[Job]:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
libfuzzer-dotnet task
|
libfuzzer-dotnet task
|
||||||
"""
|
"""
|
||||||
@ -910,7 +908,6 @@ class Libfuzzer(Command):
|
|||||||
check_retry_count: Optional[int] = 300,
|
check_retry_count: Optional[int] = 300,
|
||||||
check_fuzzer_help: bool = True,
|
check_fuzzer_help: bool = True,
|
||||||
) -> Optional[Job]:
|
) -> Optional[Job]:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
libfuzzer tasks, wrapped via qemu-user (PREVIEW FEATURE)
|
libfuzzer tasks, wrapped via qemu-user (PREVIEW FEATURE)
|
||||||
"""
|
"""
|
||||||
|
@ -116,7 +116,6 @@ class Regression(Command):
|
|||||||
delete_input_container: bool = True,
|
delete_input_container: bool = True,
|
||||||
check_regressions: bool = False,
|
check_regressions: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
libfuzzer regression task
|
libfuzzer regression task
|
||||||
|
|
||||||
@ -177,7 +176,6 @@ class Regression(Command):
|
|||||||
delete_input_container: bool = True,
|
delete_input_container: bool = True,
|
||||||
check_regressions: bool = False,
|
check_regressions: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
if dryrun:
|
if dryrun:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ class TestHelper(unittest.TestCase):
|
|||||||
values[(filename, Directory("c:\\unused\\"))] = expected
|
values[(filename, Directory("c:\\unused\\"))] = expected
|
||||||
values[(filename, Directory("c:\\unused\\"))] = expected
|
values[(filename, Directory("c:\\unused\\"))] = expected
|
||||||
|
|
||||||
for (args, expected) in values.items():
|
for args, expected in values.items():
|
||||||
self.assertEqual(helper.setup_relative_blob_name(*args), expected)
|
self.assertEqual(helper.setup_relative_blob_name(*args), expected)
|
||||||
|
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
|
@ -631,7 +631,6 @@ class Client:
|
|||||||
|
|
||||||
# Add --custom_domain value to Allowed token audiences setting
|
# Add --custom_domain value to Allowed token audiences setting
|
||||||
if self.custom_domain:
|
if self.custom_domain:
|
||||||
|
|
||||||
if self.multi_tenant_domain != "":
|
if self.multi_tenant_domain != "":
|
||||||
root_domain = self.multi_tenant_domain
|
root_domain = self.multi_tenant_domain
|
||||||
else:
|
else:
|
||||||
@ -776,7 +775,6 @@ class Client:
|
|||||||
logger.info("parsing config: %s", self.config)
|
logger.info("parsing config: %s", self.config)
|
||||||
|
|
||||||
if self.config:
|
if self.config:
|
||||||
|
|
||||||
with open(self.config, "r") as template_handle:
|
with open(self.config, "r") as template_handle:
|
||||||
config_template = json.load(template_handle)
|
config_template = json.load(template_handle)
|
||||||
|
|
||||||
|
@ -113,7 +113,6 @@ class Config:
|
|||||||
self.allowed_service_tags = proxy_config["allowed_service_tags"]
|
self.allowed_service_tags = proxy_config["allowed_service_tags"]
|
||||||
|
|
||||||
def parse_endpoint_json(self, config: Any) -> None:
|
def parse_endpoint_json(self, config: Any) -> None:
|
||||||
|
|
||||||
if "cli_client_id" not in config:
|
if "cli_client_id" not in config:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"CLI client_id not provided as valid key. Please Provide Valid Config."
|
"CLI client_id not provided as valid key. Please Provide Valid Config."
|
||||||
@ -241,7 +240,6 @@ def update_admins(config_client: InstanceConfigClient, admins: List[UUID]) -> No
|
|||||||
|
|
||||||
|
|
||||||
def parse_rules(proxy_config: Config) -> List[NsgRule]:
|
def parse_rules(proxy_config: Config) -> List[NsgRule]:
|
||||||
|
|
||||||
allowed_ips = proxy_config.allowed_ips
|
allowed_ips = proxy_config.allowed_ips
|
||||||
allowed_service_tags = proxy_config.allowed_service_tags
|
allowed_service_tags = proxy_config.allowed_service_tags
|
||||||
|
|
||||||
|
@ -382,7 +382,6 @@ def add_application_password(
|
|||||||
def add_application_password_impl(
|
def add_application_password_impl(
|
||||||
password_name: str, app_object_id: UUID, subscription_id: str
|
password_name: str, app_object_id: UUID, subscription_id: str
|
||||||
) -> Tuple[str, str]:
|
) -> Tuple[str, str]:
|
||||||
|
|
||||||
app = query_microsoft_graph(
|
app = query_microsoft_graph(
|
||||||
method="GET",
|
method="GET",
|
||||||
resource="applications/%s" % app_object_id,
|
resource="applications/%s" % app_object_id,
|
||||||
|
@ -305,8 +305,7 @@ EventTypeMap = {
|
|||||||
|
|
||||||
|
|
||||||
def get_event_type(event: Event) -> EventType:
|
def get_event_type(event: Event) -> EventType:
|
||||||
|
for event_type, event_class in EventTypeMap.items():
|
||||||
for (event_type, event_class) in EventTypeMap.items():
|
|
||||||
if isinstance(event, event_class):
|
if isinstance(event, event_class):
|
||||||
return event_type
|
return event_type
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ class Deployer:
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
for (msg, cmd) in commands:
|
for msg, cmd in commands:
|
||||||
print(msg)
|
print(msg)
|
||||||
subprocess.check_call(cmd, shell=True)
|
subprocess.check_call(cmd, shell=True)
|
||||||
|
|
||||||
@ -115,7 +115,7 @@ class Deployer:
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
for (msg, cmd) in commands:
|
for msg, cmd in commands:
|
||||||
print(msg)
|
print(msg)
|
||||||
output = subprocess.check_output(cmd, shell=True, encoding="UTF-8")
|
output = subprocess.check_output(cmd, shell=True, encoding="UTF-8")
|
||||||
if "client_id" in output:
|
if "client_id" in output:
|
||||||
@ -166,7 +166,7 @@ class Deployer:
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
for (msg, cmd) in commands:
|
for msg, cmd in commands:
|
||||||
print(msg)
|
print(msg)
|
||||||
print(cmd)
|
print(cmd)
|
||||||
subprocess.check_call(cmd, shell=True)
|
subprocess.check_call(cmd, shell=True)
|
||||||
|
@ -55,7 +55,7 @@ def delete_current_user_app_registrations(contains: str) -> None:
|
|||||||
):
|
):
|
||||||
my_apps.append((x["displayName"], x["id"]))
|
my_apps.append((x["displayName"], x["id"]))
|
||||||
|
|
||||||
for (name, id) in my_apps:
|
for name, id in my_apps:
|
||||||
print("Deleting: %s (%s)" % (name, id))
|
print("Deleting: %s (%s)" % (name, id))
|
||||||
result = client.delete(f"/applications/{id}")
|
result = client.delete(f"/applications/{id}")
|
||||||
if not result.ok:
|
if not result.ok:
|
||||||
|
@ -172,7 +172,6 @@ def download_artifacts(
|
|||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
group = parser.add_mutually_exclusive_group()
|
group = parser.add_mutually_exclusive_group()
|
||||||
group.add_argument("--branch")
|
group.add_argument("--branch")
|
||||||
|
Reference in New Issue
Block a user