Log redirection, service side (#1727)

* Setting the service side of the log management
- a log is created or reused when e create a job
- when scheduling the task we send the log location to the agent
The expected log structure looks liek
{fuzzContainer}/logs/{job_id}/{task_id}/{machine_id}/1.log

* regenerate doces

* including job_id in the container name

* regenerating docs
removing bad doc file
This commit is contained in:
Cheick Keita
2022-03-29 11:47:20 -07:00
committed by GitHub
parent 424ffdb4b5
commit 7add51fd3a
7 changed files with 125 additions and 23 deletions

View File

@ -109,11 +109,31 @@ def get_container_metadata(
return cast(Dict[str, str], result)
def create_container(
def add_container_sas_url(container_url: str) -> str:
parsed = urllib.parse.urlparse(container_url)
query = urllib.parse.parse_qs(parsed.query)
if "sig" in query:
return container_url
else:
account_name = parsed.netloc.split(".")[0]
account_key = get_storage_account_name_key_by_name(account_name)
sas_token = generate_container_sas(
account_name=account_name,
container_name=parsed.path.split("/")[1],
account_key=account_key,
permission=ContainerSasPermissions(
read=True, write=True, delete=True, list=True
),
expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=1),
)
return f"{container_url}?{sas_token}"
def get_or_create_container_client(
container: Container,
storage_type: StorageType,
metadata: Optional[Dict[str, str]],
) -> Optional[str]:
) -> Optional[ContainerClient]:
client = find_container(container, storage_type)
if client is None:
account = choose_account(storage_type)
@ -134,7 +154,17 @@ def create_container(
err,
)
return None
return client
def create_container(
container: Container,
storage_type: StorageType,
metadata: Optional[Dict[str, str]],
) -> Optional[str]:
client = get_or_create_container_client(container, storage_type, metadata)
if client is None:
return None
return get_container_sas_url_service(
client,
read=True,

View File

@ -7,13 +7,17 @@ import logging
import os
import pathlib
from typing import Dict, List, Optional
from uuid import UUID
from onefuzztypes.enums import Compare, ContainerPermission, ContainerType, TaskFeature
from onefuzztypes.models import TaskConfig, TaskDefinition, TaskUnitConfig
from onefuzztypes.models import Job, Task, TaskConfig, TaskDefinition, TaskUnitConfig
from onefuzztypes.primitives import Container
from ..azure.containers import blob_exists, container_exists, get_container_sas_url
from ..azure.containers import (
add_container_sas_url,
blob_exists,
container_exists,
get_container_sas_url,
)
from ..azure.creds import get_instance_id
from ..azure.queue import get_queue_sas
from ..azure.storage import StorageType
@ -255,18 +259,25 @@ def check_config(config: TaskConfig) -> None:
raise TaskConfigError(err)
def build_task_config(
job_id: UUID, task_id: UUID, task_config: TaskConfig
) -> TaskUnitConfig:
def build_task_config(job: Job, task: Task) -> TaskUnitConfig:
job_id = job.job_id
task_id = task.task_id
task_config = task.config
if task_config.task.type not in TASK_DEFINITIONS:
raise TaskConfigError("unsupported task type: %s" % task_config.task.type.name)
if job.config.logs is None:
raise TaskConfigError(
"Missing log container: job_id %s, task_id %s", job_id, task_id
)
definition = TASK_DEFINITIONS[task_config.task.type]
config = TaskUnitConfig(
job_id=job_id,
task_id=task_id,
logs=add_container_sas_url(job.config.logs),
task_type=task_config.task.type,
instance_telemetry_key=os.environ.get("APPINSIGHTS_INSTRUMENTATIONKEY"),
microsoft_telemetry_key=os.environ.get("ONEFUZZ_TELEMETRY"),

View File

@ -14,6 +14,7 @@ from pydantic import BaseModel
from ..azure.containers import blob_exists, get_container_sas_url
from ..azure.storage import StorageType
from ..jobs import Job
from ..workers.pools import Pool
from .config import build_task_config, get_setup_container
from .main import Task
@ -116,7 +117,11 @@ def build_work_unit(task: Task) -> Optional[Tuple[BucketConfig, WorkUnit]]:
logging.info("scheduling task: %s", task.task_id)
task_config = build_task_config(task.job_id, task.task_id, task.config)
job = Job.get(task.job_id)
if not job:
raise Exception(f"invalid job_id {task.job_id} for task {task.task_id}")
task_config = build_task_config(job, task)
setup_container = get_setup_container(task.config)
setup_script = None