mirror of
https://github.com/microsoft/onefuzz.git
synced 2025-06-17 12:28:07 +00:00
allow tasks environment variables to be set (#3294)
* allow tasks environment variables to be set * build fix * build fix * build fix * another fix * ajusting the analysis paramters * make tools dir optional in the analysis task * bug fix
This commit is contained in:
@ -547,6 +547,13 @@ If webhook is set to have Event Grid message format then the payload will look a
|
|||||||
"title": "Target Workers",
|
"title": "Target Workers",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
|
"task_env": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": "Task Env",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"$ref": "#/definitions/TaskType"
|
"$ref": "#/definitions/TaskType"
|
||||||
},
|
},
|
||||||
@ -2422,6 +2429,13 @@ If webhook is set to have Event Grid message format then the payload will look a
|
|||||||
"title": "Target Workers",
|
"title": "Target Workers",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
|
"task_env": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": "Task Env",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"$ref": "#/definitions/TaskType"
|
"$ref": "#/definitions/TaskType"
|
||||||
},
|
},
|
||||||
@ -3136,6 +3150,13 @@ If webhook is set to have Event Grid message format then the payload will look a
|
|||||||
"title": "Target Workers",
|
"title": "Target Workers",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
|
"task_env": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": "Task Env",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"$ref": "#/definitions/TaskType"
|
"$ref": "#/definitions/TaskType"
|
||||||
},
|
},
|
||||||
@ -3646,6 +3667,13 @@ If webhook is set to have Event Grid message format then the payload will look a
|
|||||||
"title": "Target Workers",
|
"title": "Target Workers",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
|
"task_env": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": "Task Env",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"$ref": "#/definitions/TaskType"
|
"$ref": "#/definitions/TaskType"
|
||||||
},
|
},
|
||||||
@ -4122,6 +4150,13 @@ If webhook is set to have Event Grid message format then the payload will look a
|
|||||||
"title": "Target Workers",
|
"title": "Target Workers",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
|
"task_env": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": "Task Env",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"$ref": "#/definitions/TaskType"
|
"$ref": "#/definitions/TaskType"
|
||||||
},
|
},
|
||||||
@ -4572,6 +4607,13 @@ If webhook is set to have Event Grid message format then the payload will look a
|
|||||||
"title": "Target Workers",
|
"title": "Target Workers",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
|
"task_env": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": "Task Env",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"$ref": "#/definitions/TaskType"
|
"$ref": "#/definitions/TaskType"
|
||||||
},
|
},
|
||||||
@ -5049,6 +5091,13 @@ If webhook is set to have Event Grid message format then the payload will look a
|
|||||||
"title": "Target Workers",
|
"title": "Target Workers",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
|
"task_env": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": "Task Env",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"$ref": "#/definitions/TaskType"
|
"$ref": "#/definitions/TaskType"
|
||||||
},
|
},
|
||||||
@ -6781,6 +6830,13 @@ If webhook is set to have Event Grid message format then the payload will look a
|
|||||||
"title": "Target Workers",
|
"title": "Target Workers",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
|
"task_env": {
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"title": "Task Env",
|
||||||
|
"type": "object"
|
||||||
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"$ref": "#/definitions/TaskType"
|
"$ref": "#/definitions/TaskType"
|
||||||
},
|
},
|
||||||
|
@ -220,6 +220,7 @@ public record TaskDetails(
|
|||||||
bool? PreserveExistingOutputs = null,
|
bool? PreserveExistingOutputs = null,
|
||||||
List<string>? ReportList = null,
|
List<string>? ReportList = null,
|
||||||
long? MinimizedStackDepth = null,
|
long? MinimizedStackDepth = null,
|
||||||
|
Dictionary<string, string>? TaskEnv = null,
|
||||||
|
|
||||||
// Deprecated. Retained for processing old table data.
|
// Deprecated. Retained for processing old table data.
|
||||||
string? CoverageFilter = null,
|
string? CoverageFilter = null,
|
||||||
@ -927,6 +928,7 @@ public record WorkUnit(
|
|||||||
Guid JobId,
|
Guid JobId,
|
||||||
Guid TaskId,
|
Guid TaskId,
|
||||||
TaskType TaskType,
|
TaskType TaskType,
|
||||||
|
Dictionary<string, string> Env,
|
||||||
// JSON-serialized `TaskUnitConfig`.
|
// JSON-serialized `TaskUnitConfig`.
|
||||||
[property: JsonConverter(typeof(TaskUnitConfigConverter))] TaskUnitConfig Config
|
[property: JsonConverter(typeof(TaskUnitConfigConverter))] TaskUnitConfig Config
|
||||||
);
|
);
|
||||||
|
@ -246,7 +246,7 @@ public static class Defs {
|
|||||||
),
|
),
|
||||||
new ContainerDefinition(
|
new ContainerDefinition(
|
||||||
Type: ContainerType.Tools,
|
Type: ContainerType.Tools,
|
||||||
Compare: Compare.Equal,
|
Compare: Compare.AtMost,
|
||||||
Value: 1,
|
Value: 1,
|
||||||
Permissions: ContainerPermission.Read | ContainerPermission.List
|
Permissions: ContainerPermission.Read | ContainerPermission.List
|
||||||
),
|
),
|
||||||
|
@ -215,6 +215,7 @@ public class Scheduler : IScheduler {
|
|||||||
JobId: taskConfig.JobId,
|
JobId: taskConfig.JobId,
|
||||||
TaskId: taskConfig.TaskId,
|
TaskId: taskConfig.TaskId,
|
||||||
TaskType: taskConfig.TaskType,
|
TaskType: taskConfig.TaskType,
|
||||||
|
Env: task.Config.Task.TaskEnv ?? new Dictionary<string, string>(),
|
||||||
// todo: make sure that we exclude nulls when serializing
|
// todo: make sure that we exclude nulls when serializing
|
||||||
// config = task_config.json(exclude_none = True, exclude_unset = True),
|
// config = task_config.json(exclude_none = True, exclude_unset = True),
|
||||||
Config: taskConfig);
|
Config: taskConfig);
|
||||||
|
@ -164,6 +164,7 @@ public class JinjaTemplateAdapter {
|
|||||||
true,
|
true,
|
||||||
targetOptions,
|
targetOptions,
|
||||||
1,
|
1,
|
||||||
|
new Dictionary<string, string>(),
|
||||||
"coverage filter",
|
"coverage filter",
|
||||||
"module allow list",
|
"module allow list",
|
||||||
"source allow list",
|
"source allow list",
|
||||||
|
@ -78,6 +78,7 @@ impl Fixture {
|
|||||||
job_id: self.job_id(),
|
job_id: self.job_id(),
|
||||||
task_id: self.task_id(),
|
task_id: self.task_id(),
|
||||||
config,
|
config,
|
||||||
|
env: std::collections::HashMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -163,6 +163,7 @@ fn debug_run_worker(opt: RunWorkerOpt) -> Result<()> {
|
|||||||
config: config.into(),
|
config: config.into(),
|
||||||
job_id: Uuid::new_v4(),
|
job_id: Uuid::new_v4(),
|
||||||
task_id,
|
task_id,
|
||||||
|
env: std::collections::HashMap::new(),
|
||||||
};
|
};
|
||||||
let work_set = WorkSet {
|
let work_set = WorkSet {
|
||||||
reboot: false,
|
reboot: false,
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
// Copyright (c) Microsoft Corporation.
|
// Copyright (c) Microsoft Corporation.
|
||||||
// Licensed under the MIT License.
|
// Licensed under the MIT License.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::{io::ErrorKind, sync::Arc};
|
use std::{io::ErrorKind, sync::Arc};
|
||||||
|
|
||||||
@ -112,6 +113,9 @@ pub struct WorkUnit {
|
|||||||
|
|
||||||
/// JSON-serialized task config.
|
/// JSON-serialized task config.
|
||||||
pub config: Secret<String>,
|
pub config: Secret<String>,
|
||||||
|
|
||||||
|
/// Environment variables to set for the task.
|
||||||
|
pub env: HashMap<String, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WorkUnit {
|
impl WorkUnit {
|
||||||
|
@ -496,6 +496,11 @@ impl IWorkerRunner for WorkerRunner {
|
|||||||
|
|
||||||
let mut cmd = Command::new("onefuzz-task");
|
let mut cmd = Command::new("onefuzz-task");
|
||||||
cmd.current_dir(&working_dir);
|
cmd.current_dir(&working_dir);
|
||||||
|
|
||||||
|
for (k, v) in &work.env {
|
||||||
|
cmd.env(k, v);
|
||||||
|
}
|
||||||
|
|
||||||
cmd.arg("managed");
|
cmd.arg("managed");
|
||||||
cmd.arg(config_path);
|
cmd.arg(config_path);
|
||||||
cmd.arg(setup_dir);
|
cmd.arg(setup_dir);
|
||||||
|
@ -20,6 +20,7 @@ impl Fixture {
|
|||||||
job_id,
|
job_id,
|
||||||
task_id,
|
task_id,
|
||||||
config,
|
config,
|
||||||
|
env: std::collections::HashMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ pub fn build_analysis_config(
|
|||||||
input_queue,
|
input_queue,
|
||||||
crashes,
|
crashes,
|
||||||
analysis,
|
analysis,
|
||||||
tools,
|
tools: Some(tools),
|
||||||
reports,
|
reports,
|
||||||
unique_reports,
|
unique_reports,
|
||||||
no_repro,
|
no_repro,
|
||||||
|
@ -37,7 +37,7 @@ pub struct Config {
|
|||||||
pub crashes: Option<SyncedDir>,
|
pub crashes: Option<SyncedDir>,
|
||||||
|
|
||||||
pub analysis: SyncedDir,
|
pub analysis: SyncedDir,
|
||||||
pub tools: SyncedDir,
|
pub tools: Option<SyncedDir>,
|
||||||
|
|
||||||
pub reports: Option<SyncedDir>,
|
pub reports: Option<SyncedDir>,
|
||||||
pub unique_reports: Option<SyncedDir>,
|
pub unique_reports: Option<SyncedDir>,
|
||||||
@ -61,7 +61,9 @@ pub async fn run(config: Config) -> Result<()> {
|
|||||||
tmp.reset().await?;
|
tmp.reset().await?;
|
||||||
|
|
||||||
config.analysis.init().await?;
|
config.analysis.init().await?;
|
||||||
config.tools.init_pull().await?;
|
if let Some(tools) = &config.tools {
|
||||||
|
tools.init_pull().await?;
|
||||||
|
}
|
||||||
|
|
||||||
// the tempdir is always created, however, the reports_path and
|
// the tempdir is always created, however, the reports_path and
|
||||||
// reports_monitor_future are only created if we have one of the three
|
// reports_monitor_future are only created if we have one of the three
|
||||||
@ -95,7 +97,9 @@ pub async fn run(config: Config) -> Result<()> {
|
|||||||
(None, None)
|
(None, None)
|
||||||
};
|
};
|
||||||
|
|
||||||
set_executable(&config.tools.local_path).await?;
|
if let Some(tools) = &config.tools {
|
||||||
|
set_executable(&tools.local_path).await?;
|
||||||
|
}
|
||||||
run_existing(&config, &reports_path).await?;
|
run_existing(&config, &reports_path).await?;
|
||||||
let poller = poll_inputs(&config, tmp, &reports_path);
|
let poller = poll_inputs(&config, tmp, &reports_path);
|
||||||
|
|
||||||
@ -207,8 +211,11 @@ pub async fn run_tool(
|
|||||||
.analyzer_exe(&config.analyzer_exe)
|
.analyzer_exe(&config.analyzer_exe)
|
||||||
.analyzer_options(&config.analyzer_options)
|
.analyzer_options(&config.analyzer_options)
|
||||||
.output_dir(&config.analysis.local_path)
|
.output_dir(&config.analysis.local_path)
|
||||||
.tools_dir(&config.tools.local_path)
|
|
||||||
.setup_dir(&config.common.setup_dir)
|
.setup_dir(&config.common.setup_dir)
|
||||||
|
.set_optional(
|
||||||
|
config.tools.clone().map(|t| t.local_path),
|
||||||
|
Expand::tools_dir,
|
||||||
|
)
|
||||||
.set_optional_ref(&config.common.extra_setup_dir, Expand::extra_setup_dir)
|
.set_optional_ref(&config.common.extra_setup_dir, Expand::extra_setup_dir)
|
||||||
.set_optional_ref(&config.common.extra_output, |expand, value| {
|
.set_optional_ref(&config.common.extra_output, |expand, value| {
|
||||||
expand.extra_output_dir(value.local_path.as_path())
|
expand.extra_output_dir(value.local_path.as_path())
|
||||||
|
@ -1023,6 +1023,7 @@ class Tasks(Endpoint):
|
|||||||
minimized_stack_depth: Optional[int] = None,
|
minimized_stack_depth: Optional[int] = None,
|
||||||
module_allowlist: Optional[str] = None,
|
module_allowlist: Optional[str] = None,
|
||||||
source_allowlist: Optional[str] = None,
|
source_allowlist: Optional[str] = None,
|
||||||
|
task_env: Optional[Dict[str, str]] = None,
|
||||||
) -> models.Task:
|
) -> models.Task:
|
||||||
"""
|
"""
|
||||||
Create a task
|
Create a task
|
||||||
@ -1100,6 +1101,7 @@ class Tasks(Endpoint):
|
|||||||
minimized_stack_depth=minimized_stack_depth,
|
minimized_stack_depth=minimized_stack_depth,
|
||||||
module_allowlist=module_allowlist,
|
module_allowlist=module_allowlist,
|
||||||
source_allowlist=source_allowlist,
|
source_allowlist=source_allowlist,
|
||||||
|
task_env=task_env,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -82,6 +82,7 @@ class Libfuzzer(Command):
|
|||||||
analyzer_options: Optional[List[str]] = None,
|
analyzer_options: Optional[List[str]] = None,
|
||||||
analyzer_env: Optional[Dict[str, str]] = None,
|
analyzer_env: Optional[Dict[str, str]] = None,
|
||||||
tools: Optional[Container] = None,
|
tools: Optional[Container] = None,
|
||||||
|
task_env: Optional[Dict[str, str]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
target_options = target_options or []
|
target_options = target_options or []
|
||||||
regression_containers = [
|
regression_containers = [
|
||||||
@ -125,6 +126,7 @@ class Libfuzzer(Command):
|
|||||||
debug=debug,
|
debug=debug,
|
||||||
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
||||||
minimized_stack_depth=minimized_stack_depth,
|
minimized_stack_depth=minimized_stack_depth,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
fuzzer_containers = [
|
fuzzer_containers = [
|
||||||
@ -176,6 +178,7 @@ class Libfuzzer(Command):
|
|||||||
colocate=colocate_all_tasks,
|
colocate=colocate_all_tasks,
|
||||||
check_fuzzer_help=check_fuzzer_help,
|
check_fuzzer_help=check_fuzzer_help,
|
||||||
expect_crash_on_failure=expect_crash_on_failure,
|
expect_crash_on_failure=expect_crash_on_failure,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
prereq_tasks = [fuzzer_task.task_id, regression_task.task_id]
|
prereq_tasks = [fuzzer_task.task_id, regression_task.task_id]
|
||||||
@ -238,6 +241,7 @@ class Libfuzzer(Command):
|
|||||||
check_fuzzer_help=check_fuzzer_help,
|
check_fuzzer_help=check_fuzzer_help,
|
||||||
module_allowlist=module_allowlist,
|
module_allowlist=module_allowlist,
|
||||||
source_allowlist=source_allowlist,
|
source_allowlist=source_allowlist,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
report_containers = [
|
report_containers = [
|
||||||
@ -274,24 +278,21 @@ class Libfuzzer(Command):
|
|||||||
debug=debug,
|
debug=debug,
|
||||||
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
||||||
minimized_stack_depth=minimized_stack_depth,
|
minimized_stack_depth=minimized_stack_depth,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
if analyzer_exe is not None:
|
if analyzer_exe is not None:
|
||||||
self.logger.info("creating custom analysis")
|
self.logger.info("creating custom analysis")
|
||||||
|
|
||||||
if tools is None:
|
|
||||||
self.logger.error(
|
|
||||||
"tools container cannot be empty when specifying a custom analyzer"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
analysis_containers = [
|
analysis_containers = [
|
||||||
(ContainerType.setup, containers[ContainerType.setup]),
|
(ContainerType.setup, containers[ContainerType.setup]),
|
||||||
(ContainerType.tools, tools),
|
|
||||||
(ContainerType.analysis, containers[ContainerType.analysis]),
|
(ContainerType.analysis, containers[ContainerType.analysis]),
|
||||||
(ContainerType.crashes, containers[ContainerType.crashes]),
|
(ContainerType.crashes, containers[ContainerType.crashes]),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if tools is not None:
|
||||||
|
analysis_containers.append((ContainerType.tools, tools))
|
||||||
|
|
||||||
self._add_optional_containers(
|
self._add_optional_containers(
|
||||||
analysis_containers,
|
analysis_containers,
|
||||||
containers,
|
containers,
|
||||||
@ -317,6 +318,7 @@ class Libfuzzer(Command):
|
|||||||
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
||||||
debug=debug,
|
debug=debug,
|
||||||
target_timeout=target_timeout,
|
target_timeout=target_timeout,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
def basic(
|
def basic(
|
||||||
@ -365,6 +367,7 @@ class Libfuzzer(Command):
|
|||||||
extra_setup_container: Optional[Container] = None,
|
extra_setup_container: Optional[Container] = None,
|
||||||
extra_output_container: Optional[Container] = None,
|
extra_output_container: Optional[Container] = None,
|
||||||
crashes: Optional[Container] = None,
|
crashes: Optional[Container] = None,
|
||||||
|
task_env: Optional[Dict[str, str]] = None,
|
||||||
) -> Optional[Job]:
|
) -> Optional[Job]:
|
||||||
"""
|
"""
|
||||||
Basic libfuzzer job
|
Basic libfuzzer job
|
||||||
@ -496,6 +499,7 @@ class Libfuzzer(Command):
|
|||||||
analyzer_options=analyzer_options,
|
analyzer_options=analyzer_options,
|
||||||
analyzer_env=analyzer_env,
|
analyzer_env=analyzer_env,
|
||||||
tools=tools,
|
tools=tools,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.logger.info("done creating tasks")
|
self.logger.info("done creating tasks")
|
||||||
@ -531,6 +535,7 @@ class Libfuzzer(Command):
|
|||||||
check_fuzzer_help: bool = False,
|
check_fuzzer_help: bool = False,
|
||||||
no_check_fuzzer_help: bool = False,
|
no_check_fuzzer_help: bool = False,
|
||||||
extra_setup_container: Optional[Container] = None,
|
extra_setup_container: Optional[Container] = None,
|
||||||
|
task_env: Optional[Dict[str, str]] = None,
|
||||||
) -> Optional[Job]:
|
) -> Optional[Job]:
|
||||||
"""
|
"""
|
||||||
libfuzzer merge task
|
libfuzzer merge task
|
||||||
@ -628,6 +633,7 @@ class Libfuzzer(Command):
|
|||||||
debug=debug,
|
debug=debug,
|
||||||
preserve_existing_outputs=preserve_existing_outputs,
|
preserve_existing_outputs=preserve_existing_outputs,
|
||||||
check_fuzzer_help=check_fuzzer_help,
|
check_fuzzer_help=check_fuzzer_help,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.logger.info("done creating tasks")
|
self.logger.info("done creating tasks")
|
||||||
@ -668,6 +674,7 @@ class Libfuzzer(Command):
|
|||||||
notification_config: Optional[NotificationConfig] = None,
|
notification_config: Optional[NotificationConfig] = None,
|
||||||
extra_setup_container: Optional[Container] = None,
|
extra_setup_container: Optional[Container] = None,
|
||||||
crashes: Optional[Container] = None,
|
crashes: Optional[Container] = None,
|
||||||
|
task_env: Optional[Dict[str, str]] = None,
|
||||||
) -> Optional[Job]:
|
) -> Optional[Job]:
|
||||||
pool = self.onefuzz.pools.get(pool_name)
|
pool = self.onefuzz.pools.get(pool_name)
|
||||||
|
|
||||||
@ -782,6 +789,7 @@ class Libfuzzer(Command):
|
|||||||
ensemble_sync_delay=ensemble_sync_delay,
|
ensemble_sync_delay=ensemble_sync_delay,
|
||||||
expect_crash_on_failure=expect_crash_on_failure,
|
expect_crash_on_failure=expect_crash_on_failure,
|
||||||
check_fuzzer_help=False,
|
check_fuzzer_help=False,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Ensure the fuzzing task starts before we schedule the coverage and
|
# Ensure the fuzzing task starts before we schedule the coverage and
|
||||||
@ -829,6 +837,7 @@ class Libfuzzer(Command):
|
|||||||
prereq_tasks=prereq_tasks,
|
prereq_tasks=prereq_tasks,
|
||||||
debug=debug,
|
debug=debug,
|
||||||
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
report_containers = [
|
report_containers = [
|
||||||
@ -861,6 +870,7 @@ class Libfuzzer(Command):
|
|||||||
check_retry_count=check_retry_count,
|
check_retry_count=check_retry_count,
|
||||||
debug=debug,
|
debug=debug,
|
||||||
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
colocate=colocate_all_tasks or colocate_secondary_tasks,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.logger.info("done creating tasks")
|
self.logger.info("done creating tasks")
|
||||||
@ -901,6 +911,7 @@ class Libfuzzer(Command):
|
|||||||
extra_setup_container: Optional[Container] = None,
|
extra_setup_container: Optional[Container] = None,
|
||||||
crashes: Optional[Container] = None,
|
crashes: Optional[Container] = None,
|
||||||
readonly_inputs: Optional[Container] = None,
|
readonly_inputs: Optional[Container] = None,
|
||||||
|
task_env: Optional[Dict[str, str]] = None,
|
||||||
) -> Optional[Job]:
|
) -> Optional[Job]:
|
||||||
"""
|
"""
|
||||||
libfuzzer tasks, wrapped via qemu-user (PREVIEW FEATURE)
|
libfuzzer tasks, wrapped via qemu-user (PREVIEW FEATURE)
|
||||||
@ -1057,6 +1068,7 @@ class Libfuzzer(Command):
|
|||||||
ensemble_sync_delay=ensemble_sync_delay,
|
ensemble_sync_delay=ensemble_sync_delay,
|
||||||
expect_crash_on_failure=False,
|
expect_crash_on_failure=False,
|
||||||
check_fuzzer_help=check_fuzzer_help,
|
check_fuzzer_help=check_fuzzer_help,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
report_containers = [
|
report_containers = [
|
||||||
@ -1093,6 +1105,7 @@ class Libfuzzer(Command):
|
|||||||
colocate=colocate_all_tasks,
|
colocate=colocate_all_tasks,
|
||||||
expect_crash_on_failure=False,
|
expect_crash_on_failure=False,
|
||||||
check_fuzzer_help=check_fuzzer_help,
|
check_fuzzer_help=check_fuzzer_help,
|
||||||
|
task_env=task_env,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.logger.info("done creating tasks")
|
self.logger.info("done creating tasks")
|
||||||
|
@ -172,6 +172,7 @@ class TaskDetails(BaseModel):
|
|||||||
target_assembly: Optional[str]
|
target_assembly: Optional[str]
|
||||||
target_class: Optional[str]
|
target_class: Optional[str]
|
||||||
target_method: Optional[str]
|
target_method: Optional[str]
|
||||||
|
task_env: Optional[Dict[str, str]]
|
||||||
|
|
||||||
|
|
||||||
class TaskPool(BaseModel):
|
class TaskPool(BaseModel):
|
||||||
|
Reference in New Issue
Block a user