mirror of
https://github.com/microsoft/onefuzz.git
synced 2025-06-17 04:18:07 +00:00
add log checking to refactored integration check (#700)
In practice, Application Insights can take up to 3 minutes before something sent to it is available via KQL. This PR logs a start and stop marker such that the integration tests only search for logs during the integration tests. This reduces the complexity when using the integration tests during the development process. Note: this migrated the new functionality from #356 into the latest integration test tools.
This commit is contained in:
@ -214,9 +214,6 @@ def on_worker_event_done(machine_id: UUID, event: WorkerDoneEvent) -> Result[Non
|
||||
node.debug_keep_node = True
|
||||
node.save()
|
||||
else:
|
||||
logging.error(
|
||||
"task failed. %s:%s status:%s", task.job_id, task.task_id, event.exit_status
|
||||
)
|
||||
task.mark_failed(
|
||||
Error(
|
||||
code=ErrorCode.TASK_FAILED,
|
||||
|
@ -136,7 +136,7 @@ def get_extension(vm_name: str, extension_name: str) -> Optional[Any]:
|
||||
resource_group, vm_name, extension_name
|
||||
)
|
||||
except (ResourceNotFoundError, CloudError) as err:
|
||||
logging.error("extension does not exist %s", err)
|
||||
logging.info("extension does not exist %s", err)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -108,9 +108,12 @@ def get_queue_tasks() -> Sequence[Tuple[Task, Sequence[str]]]:
|
||||
|
||||
|
||||
def new_files(container: Container, filename: str) -> None:
|
||||
report = get_report_or_regression(container, filename)
|
||||
|
||||
notifications = get_notifications(container)
|
||||
|
||||
report = get_report_or_regression(
|
||||
container, filename, expect_reports=bool(notifications)
|
||||
)
|
||||
|
||||
if notifications:
|
||||
done = []
|
||||
for notification in notifications:
|
||||
|
@ -17,24 +17,28 @@ from .azure.storage import StorageType
|
||||
|
||||
|
||||
def parse_report_or_regression(
|
||||
content: Union[str, bytes], file_path: Optional[str] = None
|
||||
content: Union[str, bytes],
|
||||
file_path: Optional[str] = None,
|
||||
expect_reports: bool = False,
|
||||
) -> Optional[Union[Report, RegressionReport]]:
|
||||
if isinstance(content, bytes):
|
||||
try:
|
||||
content = content.decode()
|
||||
except UnicodeDecodeError as err:
|
||||
logging.error(
|
||||
f"unable to parse report ({file_path}): "
|
||||
f"unicode decode of report failed - {err}"
|
||||
)
|
||||
if expect_reports:
|
||||
logging.error(
|
||||
f"unable to parse report ({file_path}): "
|
||||
f"unicode decode of report failed - {err}"
|
||||
)
|
||||
return None
|
||||
|
||||
try:
|
||||
data = json.loads(content)
|
||||
except json.decoder.JSONDecodeError as err:
|
||||
logging.error(
|
||||
f"unable to parse report ({file_path}): json decoding failed - {err}"
|
||||
)
|
||||
if expect_reports:
|
||||
logging.error(
|
||||
f"unable to parse report ({file_path}): json decoding failed - {err}"
|
||||
)
|
||||
return None
|
||||
|
||||
regression_err = None
|
||||
@ -46,29 +50,34 @@ def parse_report_or_regression(
|
||||
try:
|
||||
return Report.parse_obj(data)
|
||||
except ValidationError as err:
|
||||
logging.error(
|
||||
f"unable to parse report ({file_path}) as a report or regression. "
|
||||
f"regression error: {regression_err} report error: {err}"
|
||||
)
|
||||
if expect_reports:
|
||||
logging.error(
|
||||
f"unable to parse report ({file_path}) as a report or regression. "
|
||||
f"regression error: {regression_err} report error: {err}"
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
# cache the last 1000 reports
|
||||
@cached(max_size=1000)
|
||||
def get_report_or_regression(
|
||||
container: Container, filename: str
|
||||
container: Container, filename: str, *, expect_reports: bool = False
|
||||
) -> Optional[Union[Report, RegressionReport]]:
|
||||
file_path = "/".join([container, filename])
|
||||
if not filename.endswith(".json"):
|
||||
logging.error("get_report invalid extension: %s", file_path)
|
||||
if expect_reports:
|
||||
logging.error("get_report invalid extension: %s", file_path)
|
||||
return None
|
||||
|
||||
blob = get_blob(container, filename, StorageType.corpus)
|
||||
if blob is None:
|
||||
logging.error("get_report invalid blob: %s", file_path)
|
||||
if expect_reports:
|
||||
logging.error("get_report invalid blob: %s", file_path)
|
||||
return None
|
||||
|
||||
return parse_report_or_regression(blob, file_path=file_path)
|
||||
return parse_report_or_regression(
|
||||
blob, file_path=file_path, expect_reports=expect_reports
|
||||
)
|
||||
|
||||
|
||||
def get_report(container: Container, filename: str) -> Optional[Report]:
|
||||
|
@ -204,6 +204,8 @@ class Task(BASE_TASK, ORMMixin):
|
||||
)
|
||||
return
|
||||
|
||||
logging.error("task failed %s:%s - %s", self.job_id, self.task_id, error)
|
||||
|
||||
self.error = error
|
||||
self.set_state(TaskState.stopping)
|
||||
|
||||
|
Reference in New Issue
Block a user