allow tasks environment variables to be set (#3294)

* allow tasks environment variables to be set

* build fix

* build fix

* build fix

* another fix

* ajusting the analysis paramters

* make tools dir optional in the analysis task

* bug fix
This commit is contained in:
Cheick Keita
2023-07-14 17:13:15 -07:00
committed by GitHub
parent 7a92dab7c2
commit 930bb3f0b7
15 changed files with 108 additions and 13 deletions

View File

@ -547,6 +547,13 @@ If webhook is set to have Event Grid message format then the payload will look a
"title": "Target Workers",
"type": "integer"
},
"task_env": {
"additionalProperties": {
"type": "string"
},
"title": "Task Env",
"type": "object"
},
"type": {
"$ref": "#/definitions/TaskType"
},
@ -2422,6 +2429,13 @@ If webhook is set to have Event Grid message format then the payload will look a
"title": "Target Workers",
"type": "integer"
},
"task_env": {
"additionalProperties": {
"type": "string"
},
"title": "Task Env",
"type": "object"
},
"type": {
"$ref": "#/definitions/TaskType"
},
@ -3136,6 +3150,13 @@ If webhook is set to have Event Grid message format then the payload will look a
"title": "Target Workers",
"type": "integer"
},
"task_env": {
"additionalProperties": {
"type": "string"
},
"title": "Task Env",
"type": "object"
},
"type": {
"$ref": "#/definitions/TaskType"
},
@ -3646,6 +3667,13 @@ If webhook is set to have Event Grid message format then the payload will look a
"title": "Target Workers",
"type": "integer"
},
"task_env": {
"additionalProperties": {
"type": "string"
},
"title": "Task Env",
"type": "object"
},
"type": {
"$ref": "#/definitions/TaskType"
},
@ -4122,6 +4150,13 @@ If webhook is set to have Event Grid message format then the payload will look a
"title": "Target Workers",
"type": "integer"
},
"task_env": {
"additionalProperties": {
"type": "string"
},
"title": "Task Env",
"type": "object"
},
"type": {
"$ref": "#/definitions/TaskType"
},
@ -4572,6 +4607,13 @@ If webhook is set to have Event Grid message format then the payload will look a
"title": "Target Workers",
"type": "integer"
},
"task_env": {
"additionalProperties": {
"type": "string"
},
"title": "Task Env",
"type": "object"
},
"type": {
"$ref": "#/definitions/TaskType"
},
@ -5049,6 +5091,13 @@ If webhook is set to have Event Grid message format then the payload will look a
"title": "Target Workers",
"type": "integer"
},
"task_env": {
"additionalProperties": {
"type": "string"
},
"title": "Task Env",
"type": "object"
},
"type": {
"$ref": "#/definitions/TaskType"
},
@ -6781,6 +6830,13 @@ If webhook is set to have Event Grid message format then the payload will look a
"title": "Target Workers",
"type": "integer"
},
"task_env": {
"additionalProperties": {
"type": "string"
},
"title": "Task Env",
"type": "object"
},
"type": {
"$ref": "#/definitions/TaskType"
},

View File

@ -220,6 +220,7 @@ public record TaskDetails(
bool? PreserveExistingOutputs = null,
List<string>? ReportList = null,
long? MinimizedStackDepth = null,
Dictionary<string, string>? TaskEnv = null,
// Deprecated. Retained for processing old table data.
string? CoverageFilter = null,
@ -927,6 +928,7 @@ public record WorkUnit(
Guid JobId,
Guid TaskId,
TaskType TaskType,
Dictionary<string, string> Env,
// JSON-serialized `TaskUnitConfig`.
[property: JsonConverter(typeof(TaskUnitConfigConverter))] TaskUnitConfig Config
);

View File

@ -246,7 +246,7 @@ public static class Defs {
),
new ContainerDefinition(
Type: ContainerType.Tools,
Compare: Compare.Equal,
Compare: Compare.AtMost,
Value: 1,
Permissions: ContainerPermission.Read | ContainerPermission.List
),

View File

@ -215,6 +215,7 @@ public class Scheduler : IScheduler {
JobId: taskConfig.JobId,
TaskId: taskConfig.TaskId,
TaskType: taskConfig.TaskType,
Env: task.Config.Task.TaskEnv ?? new Dictionary<string, string>(),
// todo: make sure that we exclude nulls when serializing
// config = task_config.json(exclude_none = True, exclude_unset = True),
Config: taskConfig);

View File

@ -164,6 +164,7 @@ public class JinjaTemplateAdapter {
true,
targetOptions,
1,
new Dictionary<string, string>(),
"coverage filter",
"module allow list",
"source allow list",

View File

@ -78,6 +78,7 @@ impl Fixture {
job_id: self.job_id(),
task_id: self.task_id(),
config,
env: std::collections::HashMap::new(),
}
}
}

View File

@ -163,6 +163,7 @@ fn debug_run_worker(opt: RunWorkerOpt) -> Result<()> {
config: config.into(),
job_id: Uuid::new_v4(),
task_id,
env: std::collections::HashMap::new(),
};
let work_set = WorkSet {
reboot: false,

View File

@ -1,6 +1,7 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
use std::collections::HashMap;
use std::path::PathBuf;
use std::{io::ErrorKind, sync::Arc};
@ -112,6 +113,9 @@ pub struct WorkUnit {
/// JSON-serialized task config.
pub config: Secret<String>,
/// Environment variables to set for the task.
pub env: HashMap<String, String>,
}
impl WorkUnit {

View File

@ -496,6 +496,11 @@ impl IWorkerRunner for WorkerRunner {
let mut cmd = Command::new("onefuzz-task");
cmd.current_dir(&working_dir);
for (k, v) in &work.env {
cmd.env(k, v);
}
cmd.arg("managed");
cmd.arg(config_path);
cmd.arg(setup_dir);

View File

@ -20,6 +20,7 @@ impl Fixture {
job_id,
task_id,
config,
env: std::collections::HashMap::new(),
}
}

View File

@ -70,7 +70,7 @@ pub fn build_analysis_config(
input_queue,
crashes,
analysis,
tools,
tools: Some(tools),
reports,
unique_reports,
no_repro,

View File

@ -37,7 +37,7 @@ pub struct Config {
pub crashes: Option<SyncedDir>,
pub analysis: SyncedDir,
pub tools: SyncedDir,
pub tools: Option<SyncedDir>,
pub reports: Option<SyncedDir>,
pub unique_reports: Option<SyncedDir>,
@ -61,7 +61,9 @@ pub async fn run(config: Config) -> Result<()> {
tmp.reset().await?;
config.analysis.init().await?;
config.tools.init_pull().await?;
if let Some(tools) = &config.tools {
tools.init_pull().await?;
}
// the tempdir is always created, however, the reports_path and
// reports_monitor_future are only created if we have one of the three
@ -95,7 +97,9 @@ pub async fn run(config: Config) -> Result<()> {
(None, None)
};
set_executable(&config.tools.local_path).await?;
if let Some(tools) = &config.tools {
set_executable(&tools.local_path).await?;
}
run_existing(&config, &reports_path).await?;
let poller = poll_inputs(&config, tmp, &reports_path);
@ -207,8 +211,11 @@ pub async fn run_tool(
.analyzer_exe(&config.analyzer_exe)
.analyzer_options(&config.analyzer_options)
.output_dir(&config.analysis.local_path)
.tools_dir(&config.tools.local_path)
.setup_dir(&config.common.setup_dir)
.set_optional(
config.tools.clone().map(|t| t.local_path),
Expand::tools_dir,
)
.set_optional_ref(&config.common.extra_setup_dir, Expand::extra_setup_dir)
.set_optional_ref(&config.common.extra_output, |expand, value| {
expand.extra_output_dir(value.local_path.as_path())

View File

@ -1023,6 +1023,7 @@ class Tasks(Endpoint):
minimized_stack_depth: Optional[int] = None,
module_allowlist: Optional[str] = None,
source_allowlist: Optional[str] = None,
task_env: Optional[Dict[str, str]] = None,
) -> models.Task:
"""
Create a task
@ -1100,6 +1101,7 @@ class Tasks(Endpoint):
minimized_stack_depth=minimized_stack_depth,
module_allowlist=module_allowlist,
source_allowlist=source_allowlist,
task_env=task_env,
),
)

View File

@ -82,6 +82,7 @@ class Libfuzzer(Command):
analyzer_options: Optional[List[str]] = None,
analyzer_env: Optional[Dict[str, str]] = None,
tools: Optional[Container] = None,
task_env: Optional[Dict[str, str]] = None,
) -> None:
target_options = target_options or []
regression_containers = [
@ -125,6 +126,7 @@ class Libfuzzer(Command):
debug=debug,
colocate=colocate_all_tasks or colocate_secondary_tasks,
minimized_stack_depth=minimized_stack_depth,
task_env=task_env,
)
fuzzer_containers = [
@ -176,6 +178,7 @@ class Libfuzzer(Command):
colocate=colocate_all_tasks,
check_fuzzer_help=check_fuzzer_help,
expect_crash_on_failure=expect_crash_on_failure,
task_env=task_env,
)
prereq_tasks = [fuzzer_task.task_id, regression_task.task_id]
@ -238,6 +241,7 @@ class Libfuzzer(Command):
check_fuzzer_help=check_fuzzer_help,
module_allowlist=module_allowlist,
source_allowlist=source_allowlist,
task_env=task_env,
)
report_containers = [
@ -274,24 +278,21 @@ class Libfuzzer(Command):
debug=debug,
colocate=colocate_all_tasks or colocate_secondary_tasks,
minimized_stack_depth=minimized_stack_depth,
task_env=task_env,
)
if analyzer_exe is not None:
self.logger.info("creating custom analysis")
if tools is None:
self.logger.error(
"tools container cannot be empty when specifying a custom analyzer"
)
return None
analysis_containers = [
(ContainerType.setup, containers[ContainerType.setup]),
(ContainerType.tools, tools),
(ContainerType.analysis, containers[ContainerType.analysis]),
(ContainerType.crashes, containers[ContainerType.crashes]),
]
if tools is not None:
analysis_containers.append((ContainerType.tools, tools))
self._add_optional_containers(
analysis_containers,
containers,
@ -317,6 +318,7 @@ class Libfuzzer(Command):
colocate=colocate_all_tasks or colocate_secondary_tasks,
debug=debug,
target_timeout=target_timeout,
task_env=task_env,
)
def basic(
@ -365,6 +367,7 @@ class Libfuzzer(Command):
extra_setup_container: Optional[Container] = None,
extra_output_container: Optional[Container] = None,
crashes: Optional[Container] = None,
task_env: Optional[Dict[str, str]] = None,
) -> Optional[Job]:
"""
Basic libfuzzer job
@ -496,6 +499,7 @@ class Libfuzzer(Command):
analyzer_options=analyzer_options,
analyzer_env=analyzer_env,
tools=tools,
task_env=task_env,
)
self.logger.info("done creating tasks")
@ -531,6 +535,7 @@ class Libfuzzer(Command):
check_fuzzer_help: bool = False,
no_check_fuzzer_help: bool = False,
extra_setup_container: Optional[Container] = None,
task_env: Optional[Dict[str, str]] = None,
) -> Optional[Job]:
"""
libfuzzer merge task
@ -628,6 +633,7 @@ class Libfuzzer(Command):
debug=debug,
preserve_existing_outputs=preserve_existing_outputs,
check_fuzzer_help=check_fuzzer_help,
task_env=task_env,
)
self.logger.info("done creating tasks")
@ -668,6 +674,7 @@ class Libfuzzer(Command):
notification_config: Optional[NotificationConfig] = None,
extra_setup_container: Optional[Container] = None,
crashes: Optional[Container] = None,
task_env: Optional[Dict[str, str]] = None,
) -> Optional[Job]:
pool = self.onefuzz.pools.get(pool_name)
@ -782,6 +789,7 @@ class Libfuzzer(Command):
ensemble_sync_delay=ensemble_sync_delay,
expect_crash_on_failure=expect_crash_on_failure,
check_fuzzer_help=False,
task_env=task_env,
)
# Ensure the fuzzing task starts before we schedule the coverage and
@ -829,6 +837,7 @@ class Libfuzzer(Command):
prereq_tasks=prereq_tasks,
debug=debug,
colocate=colocate_all_tasks or colocate_secondary_tasks,
task_env=task_env,
)
report_containers = [
@ -861,6 +870,7 @@ class Libfuzzer(Command):
check_retry_count=check_retry_count,
debug=debug,
colocate=colocate_all_tasks or colocate_secondary_tasks,
task_env=task_env,
)
self.logger.info("done creating tasks")
@ -901,6 +911,7 @@ class Libfuzzer(Command):
extra_setup_container: Optional[Container] = None,
crashes: Optional[Container] = None,
readonly_inputs: Optional[Container] = None,
task_env: Optional[Dict[str, str]] = None,
) -> Optional[Job]:
"""
libfuzzer tasks, wrapped via qemu-user (PREVIEW FEATURE)
@ -1057,6 +1068,7 @@ class Libfuzzer(Command):
ensemble_sync_delay=ensemble_sync_delay,
expect_crash_on_failure=False,
check_fuzzer_help=check_fuzzer_help,
task_env=task_env,
)
report_containers = [
@ -1093,6 +1105,7 @@ class Libfuzzer(Command):
colocate=colocate_all_tasks,
expect_crash_on_failure=False,
check_fuzzer_help=check_fuzzer_help,
task_env=task_env,
)
self.logger.info("done creating tasks")

View File

@ -172,6 +172,7 @@ class TaskDetails(BaseModel):
target_assembly: Optional[str]
target_class: Optional[str]
target_method: Optional[str]
task_env: Optional[Dict[str, str]]
class TaskPool(BaseModel):